build: update README to indicate that source has moved and delete all files (#893)
Towards https://github.com/googleapis/google-cloud-python/issues/10892.
diff --git a/.coveragerc b/.coveragerc
deleted file mode 100644
index 34417c3..0000000
--- a/.coveragerc
+++ /dev/null
@@ -1,15 +0,0 @@
-[run]
-branch = True
-
-[report]
-fail_under = 100
-show_missing = True
-exclude_lines =
- # Re-enable the standard pragma
- pragma: NO COVER
- # Ignore debug-only repr
- def __repr__
- # Ignore abstract methods
- raise NotImplementedError
- # Ignore coverage for code specific to static type checkers
- TYPE_CHECKING
diff --git a/.flake8 b/.flake8
deleted file mode 100644
index 3da787c..0000000
--- a/.flake8
+++ /dev/null
@@ -1,11 +0,0 @@
-[flake8]
-import-order-style=google
-# Note: this forces all google imports to be in the third group. See
-# https://github.com/PyCQA/flake8-import-order/issues/111
-application-import-names=google
-ignore = E203, E266, E501, W503
-exclude =
- __pycache__,
- .git,
- *.pyc,
- conf.py
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
deleted file mode 100644
index 8272466..0000000
--- a/.github/CODEOWNERS
+++ /dev/null
@@ -1,11 +0,0 @@
-# Code owners file.
-# This file controls who is tagged for review for any given pull request.
-#
-# For syntax help see:
-# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax
-# Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json.
-
-* @googleapis/cloud-sdk-python-team
-
-# @googleapis/python-samples-reviewers @googleapis/actools-python are the default owners for samples changes
-/samples/ @googleapis/python-samples-reviewers @googleapis/cloud-sdk-python-team
\ No newline at end of file
diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md
deleted file mode 100644
index 939e534..0000000
--- a/.github/CONTRIBUTING.md
+++ /dev/null
@@ -1,28 +0,0 @@
-# How to Contribute
-
-We'd love to accept your patches and contributions to this project. There are
-just a few small guidelines you need to follow.
-
-## Contributor License Agreement
-
-Contributions to this project must be accompanied by a Contributor License
-Agreement. You (or your employer) retain the copyright to your contribution;
-this simply gives us permission to use and redistribute your contributions as
-part of the project. Head over to <https://cla.developers.google.com/> to see
-your current agreements on file or to sign a new one.
-
-You generally only need to submit a CLA once, so if you've already submitted one
-(even if it was for a different project), you probably don't need to do it
-again.
-
-## Code reviews
-
-All submissions, including submissions by project members, require review. We
-use GitHub pull requests for this purpose. Consult
-[GitHub Help](https://help.github.com/articles/about-pull-requests/) for more
-information on using pull requests.
-
-## Community Guidelines
-
-This project follows [Google's Open Source Community
-Guidelines](https://opensource.google.com/conduct/).
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
deleted file mode 100644
index ee19f10..0000000
--- a/.github/ISSUE_TEMPLATE/bug_report.md
+++ /dev/null
@@ -1,43 +0,0 @@
----
-name: Bug report
-about: Create a report to help us improve
-
----
-
-Thanks for stopping by to let us know something could be better!
-
-**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response.
-
-Please run down the following list and make sure you've tried the usual "quick fixes":
-
- - Search the issues already opened: https://github.com/googleapis/python-api-core/issues
- - Search StackOverflow: https://stackoverflow.com/questions/tagged/google-cloud-platform+python
-
-If you are still having issues, please be sure to include as much information as possible:
-
-#### Environment details
-
- - OS type and version:
- - Python version: `python --version`
- - pip version: `pip --version`
- - `google-api-core` version: `pip show google-api-core`
-
-#### Steps to reproduce
-
- 1. ?
- 2. ?
-
-#### Code example
-
-```python
-# example
-```
-
-#### Stack trace
-```
-# example
-```
-
-Making sure to follow these steps will guarantee the quickest resolution possible.
-
-Thanks!
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
deleted file mode 100644
index 6365857..0000000
--- a/.github/ISSUE_TEMPLATE/feature_request.md
+++ /dev/null
@@ -1,18 +0,0 @@
----
-name: Feature request
-about: Suggest an idea for this library
-
----
-
-Thanks for stopping by to let us know something could be better!
-
-**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response.
-
- **Is your feature request related to a problem? Please describe.**
-A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
- **Describe the solution you'd like**
-A clear and concise description of what you want to happen.
- **Describe alternatives you've considered**
-A clear and concise description of any alternative solutions or features you've considered.
- **Additional context**
-Add any other context or screenshots about the feature request here.
diff --git a/.github/ISSUE_TEMPLATE/support_request.md b/.github/ISSUE_TEMPLATE/support_request.md
deleted file mode 100644
index 9958690..0000000
--- a/.github/ISSUE_TEMPLATE/support_request.md
+++ /dev/null
@@ -1,7 +0,0 @@
----
-name: Support request
-about: If you have a support contract with Google, please create an issue in the Google Cloud Support console.
-
----
-
-**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response.
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
deleted file mode 100644
index 4ca8093..0000000
--- a/.github/PULL_REQUEST_TEMPLATE.md
+++ /dev/null
@@ -1,7 +0,0 @@
-Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly:
-- [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-api-core/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea
-- [ ] Ensure the tests and linter pass
-- [ ] Code coverage does not decrease (if any source code was changed)
-- [ ] Appropriate docs were updated (if necessary)
-
-Fixes #<issue_number_goes_here> 🦕
diff --git a/.github/auto-label.yaml b/.github/auto-label.yaml
deleted file mode 100644
index 21786a4..0000000
--- a/.github/auto-label.yaml
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2024 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-requestsize:
- enabled: true
-
-path:
- pullrequest: true
- paths:
- samples: "samples"
diff --git a/.github/blunderbuss.yml b/.github/blunderbuss.yml
deleted file mode 100644
index 1618464..0000000
--- a/.github/blunderbuss.yml
+++ /dev/null
@@ -1,17 +0,0 @@
-# Blunderbuss config
-#
-# This file controls who is assigned for pull requests and issues.
-# Note: This file is autogenerated. To make changes to the assignee
-# team, please update `codeowner_team` in `.repo-metadata.json`.
-assign_issues:
- - googleapis/actools-python
-
-assign_issues_by:
- - labels:
- - "samples"
- to:
- - googleapis/python-samples-reviewers
- - googleapis/actools-python
-
-assign_prs:
- - googleapis/actools-python
diff --git a/.github/cherry-pick-bot.yml b/.github/cherry-pick-bot.yml
deleted file mode 100644
index 1e9cfcd..0000000
--- a/.github/cherry-pick-bot.yml
+++ /dev/null
@@ -1,2 +0,0 @@
-enabled: true
-
diff --git a/.github/header-checker-lint.yml b/.github/header-checker-lint.yml
deleted file mode 100644
index 6fe78aa..0000000
--- a/.github/header-checker-lint.yml
+++ /dev/null
@@ -1,15 +0,0 @@
-{"allowedCopyrightHolders": ["Google LLC"],
- "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"],
- "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt", "**/__init__.py", "samples/**/constraints.txt", "samples/**/constraints-test.txt"],
- "sourceFileExtensions": [
- "ts",
- "js",
- "java",
- "sh",
- "Dockerfile",
- "yaml",
- "py",
- "html",
- "txt"
- ]
-}
\ No newline at end of file
diff --git a/.github/snippet-bot.yml b/.github/snippet-bot.yml
deleted file mode 100644
index e69de29..0000000
--- a/.github/snippet-bot.yml
+++ /dev/null
diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
deleted file mode 100644
index 5cccd5b..0000000
--- a/.github/workflows/docs.yml
+++ /dev/null
@@ -1,42 +0,0 @@
-on:
- pull_request:
- branches:
- - main
-
-permissions:
- contents: read
-
-name: docs
-jobs:
- docs:
- runs-on: ubuntu-latest
- steps:
- - name: Checkout
- uses: actions/checkout@v6
- - name: Setup Python
- uses: actions/setup-python@v6
- with:
- python-version: "3.10"
- - name: Install nox
- run: |
- python -m pip install --upgrade setuptools pip wheel
- python -m pip install nox
- - name: Run docs
- run: |
- nox -s docs
- docfx:
- runs-on: ubuntu-latest
- steps:
- - name: Checkout
- uses: actions/checkout@v6
- - name: Setup Python
- uses: actions/setup-python@v6
- with:
- python-version: "3.10"
- - name: Install nox
- run: |
- python -m pip install --upgrade setuptools pip wheel
- python -m pip install nox
- - name: Run docfx
- run: |
- nox -s docfx
diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
deleted file mode 100644
index 2975fdd..0000000
--- a/.github/workflows/lint.yml
+++ /dev/null
@@ -1,29 +0,0 @@
-on:
- pull_request:
- branches:
- - main
-
-permissions:
- contents: read
-
-name: lint
-jobs:
- lint:
- runs-on: ubuntu-latest
- steps:
- - name: Checkout
- uses: actions/checkout@v6
- - name: Setup Python
- uses: actions/setup-python@v6
- with:
- python-version: "3.14"
- - name: Install nox
- run: |
- python -m pip install --upgrade setuptools pip wheel
- python -m pip install nox
- - name: Run lint
- run: |
- nox -s lint
- - name: Run lint_setup_py
- run: |
- nox -s lint_setup_py
diff --git a/.github/workflows/mypy.yml b/.github/workflows/mypy.yml
deleted file mode 100644
index e197693..0000000
--- a/.github/workflows/mypy.yml
+++ /dev/null
@@ -1,26 +0,0 @@
-on:
- pull_request:
- branches:
- - main
-
-permissions:
- contents: read
-
-name: mypy
-jobs:
- mypy:
- runs-on: ubuntu-latest
- steps:
- - name: Checkout
- uses: actions/checkout@v6
- - name: Setup Python
- uses: actions/setup-python@v6
- with:
- python-version: "3.14"
- - name: Install nox
- run: |
- python -m pip install --upgrade setuptools pip wheel
- python -m pip install nox
- - name: Run mypy
- run: |
- nox -s mypy
diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml
deleted file mode 100644
index 56b0557..0000000
--- a/.github/workflows/unittest.yml
+++ /dev/null
@@ -1,96 +0,0 @@
-name: "Unit tests"
-
-on:
- pull_request:
- branches:
- - main
-
-permissions:
- contents: read
-
-jobs:
- unit-prerelease:
- name: prerelease_deps
- runs-on: ubuntu-latest
- strategy:
- matrix:
- python: ["3.14"]
- option: ["prerelease"]
- steps:
- - name: Checkout
- uses: actions/checkout@v4
- - name: Setup Python
- uses: actions/setup-python@v5
- with:
- python-version: ${{ matrix.python }}
- allow-prereleases: true
- - name: Install nox
- run: |
- python -m pip install --upgrade setuptools pip wheel
- python -m pip install nox
- - name: Run ${{ matrix.option }} tests
- env:
- COVERAGE_FILE: .coverage${{ matrix.option }}-${{matrix.python }}
- run: |
- nox -s prerelease_deps
- unit:
- name: unit${{ matrix.option }}-${{ matrix.python }}
- runs-on: ubuntu-latest
- strategy:
- matrix:
- python:
- - "3.9"
- - "3.10"
- - "3.11"
- - "3.12"
- - "3.13"
- - "3.14"
- steps:
- - name: Checkout
- uses: actions/checkout@v4
- - name: Setup Python
- uses: actions/setup-python@v5
- with:
- python-version: ${{ matrix.python }}
- allow-prereleases: true
- - name: Install nox
- run: |
- python -m pip install --upgrade setuptools pip wheel
- python -m pip install nox
- - name: Run unit tests
- env:
- COVERAGE_FILE: .coverage-${{matrix.python }}
- run: |
- nox -s unit-${{ matrix.python }}
- - name: Upload coverage results
- uses: actions/upload-artifact@v4
- with:
- name: coverage-artifact-${{ matrix.python }}
- path: .coverage-${{ matrix.python }}
- include-hidden-files: true
-
- report-coverage:
- name: cover
- runs-on: ubuntu-latest
- needs:
- - unit
- steps:
- - name: Checkout
- uses: actions/checkout@v4
- - name: Setup Python
- uses: actions/setup-python@v5
- with:
- python-version: "3.14"
- - name: Install coverage
- run: |
- python -m pip install --upgrade setuptools pip wheel
- python -m pip install coverage
- - name: Download coverage results
- uses: actions/download-artifact@v4
- with:
- path: .coverage-results/
- - name: Report coverage results
- run: |
- find .coverage-results -type f -name '*.zip' -exec unzip {} \;
- coverage combine .coverage-results/**/.coverage*
- coverage report --show-missing --fail-under=100
diff --git a/.gitignore b/.gitignore
deleted file mode 100644
index 168b201..0000000
--- a/.gitignore
+++ /dev/null
@@ -1,65 +0,0 @@
-*.py[cod]
-*.sw[op]
-
-# C extensions
-*.so
-
-# Packages
-*.egg
-*.egg-info
-dist
-build
-eggs
-.eggs
-parts
-bin
-var
-sdist
-develop-eggs
-.installed.cfg
-lib
-lib64
-__pycache__
-
-# Installer logs
-pip-log.txt
-
-# Unit test / coverage reports
-.coverage
-.nox
-.cache
-.pytest_cache
-.pytype
-
-
-# Mac
-.DS_Store
-
-# JetBrains
-.idea
-
-# VS Code
-.vscode
-
-# emacs
-*~
-
-# Built documentation
-docs/_build
-bigquery/docs/generated
-docs.metadata
-
-# Virtual environment
-env/
-venv/
-
-# Test logs
-coverage.xml
-*sponge_log.xml
-
-# System test environment variables.
-system_tests/local_test_setup
-
-# Make sure a generated file isn't accidentally committed.
-pylintrc
-pylintrc.test
diff --git a/.kokoro/build.sh b/.kokoro/build.sh
deleted file mode 100755
index d41b45a..0000000
--- a/.kokoro/build.sh
+++ /dev/null
@@ -1,60 +0,0 @@
-#!/bin/bash
-# Copyright 2024 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -eo pipefail
-
-CURRENT_DIR=$(dirname "${BASH_SOURCE[0]}")
-
-if [[ -z "${PROJECT_ROOT:-}" ]]; then
- PROJECT_ROOT=$(realpath "${CURRENT_DIR}/..")
-fi
-
-pushd "${PROJECT_ROOT}"
-
-# Disable buffering, so that the logs stream through.
-export PYTHONUNBUFFERED=1
-
-# Debug: show build environment
-env | grep KOKORO
-
-# Setup service account credentials.
-if [[ -f "${KOKORO_GFILE_DIR}/service-account.json" ]]
-then
- export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json
-fi
-
-# Setup project id.
-if [[ -f "${KOKORO_GFILE_DIR}/project-id.json" ]]
-then
- export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json")
-fi
-
-# If this is a continuous build, send the test log to the FlakyBot.
-# See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot.
-if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then
- cleanup() {
- chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
- $KOKORO_GFILE_DIR/linux_amd64/flakybot
- }
- trap cleanup EXIT HUP
-fi
-
-# If NOX_SESSION is set, it only runs the specified session,
-# otherwise run all the sessions.
-if [[ -n "${NOX_SESSION:-}" ]]; then
- python3 -m nox -s ${NOX_SESSION:-}
-else
- python3 -m nox
-fi
diff --git a/.kokoro/continuous/common.cfg b/.kokoro/continuous/common.cfg
deleted file mode 100644
index 9f2fa73..0000000
--- a/.kokoro/continuous/common.cfg
+++ /dev/null
@@ -1,27 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-# Build logs will be here
-action {
- define_artifacts {
- regex: "**/*sponge_log.xml"
- }
-}
-
-# Download trampoline resources.
-gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
-
-# Download resources for system tests (service account key, etc.)
-gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python"
-
-# Use the trampoline script to run in docker.
-build_file: "python-api-core/.kokoro/trampoline.sh"
-
-# Configure the docker image for kokoro-trampoline.
-env_vars: {
- key: "TRAMPOLINE_IMAGE"
- value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
-}
-env_vars: {
- key: "TRAMPOLINE_BUILD_FILE"
- value: "github/python-api-core/.kokoro/build.sh"
-}
diff --git a/.kokoro/continuous/continuous.cfg b/.kokoro/continuous/continuous.cfg
deleted file mode 100644
index 8f43917..0000000
--- a/.kokoro/continuous/continuous.cfg
+++ /dev/null
@@ -1 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
\ No newline at end of file
diff --git a/.kokoro/continuous/prerelease-deps.cfg b/.kokoro/continuous/prerelease-deps.cfg
deleted file mode 100644
index 3595fb4..0000000
--- a/.kokoro/continuous/prerelease-deps.cfg
+++ /dev/null
@@ -1,7 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-# Only run this nox session.
-env_vars: {
- key: "NOX_SESSION"
- value: "prerelease_deps"
-}
diff --git a/.kokoro/populate-secrets.sh b/.kokoro/populate-secrets.sh
deleted file mode 100755
index c435402..0000000
--- a/.kokoro/populate-secrets.sh
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/bin/bash
-# Copyright 2024 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -eo pipefail
-
-function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;}
-function msg { println "$*" >&2 ;}
-function println { printf '%s\n' "$(now) $*" ;}
-
-
-# Populates requested secrets set in SECRET_MANAGER_KEYS from service account:
-# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com
-SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager"
-msg "Creating folder on disk for secrets: ${SECRET_LOCATION}"
-mkdir -p ${SECRET_LOCATION}
-for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g")
-do
- msg "Retrieving secret ${key}"
- docker run --entrypoint=gcloud \
- --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \
- gcr.io/google.com/cloudsdktool/cloud-sdk \
- secrets versions access latest \
- --project cloud-devrel-kokoro-resources \
- --secret ${key} > \
- "${SECRET_LOCATION}/${key}"
- if [[ $? == 0 ]]; then
- msg "Secret written to ${SECRET_LOCATION}/${key}"
- else
- msg "Error retrieving secret ${key}"
- fi
-done
diff --git a/.kokoro/presubmit/common.cfg b/.kokoro/presubmit/common.cfg
deleted file mode 100644
index 9f2fa73..0000000
--- a/.kokoro/presubmit/common.cfg
+++ /dev/null
@@ -1,27 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-# Build logs will be here
-action {
- define_artifacts {
- regex: "**/*sponge_log.xml"
- }
-}
-
-# Download trampoline resources.
-gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
-
-# Download resources for system tests (service account key, etc.)
-gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python"
-
-# Use the trampoline script to run in docker.
-build_file: "python-api-core/.kokoro/trampoline.sh"
-
-# Configure the docker image for kokoro-trampoline.
-env_vars: {
- key: "TRAMPOLINE_IMAGE"
- value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
-}
-env_vars: {
- key: "TRAMPOLINE_BUILD_FILE"
- value: "github/python-api-core/.kokoro/build.sh"
-}
diff --git a/.kokoro/presubmit/prerelease-deps.cfg b/.kokoro/presubmit/prerelease-deps.cfg
deleted file mode 100644
index 3595fb4..0000000
--- a/.kokoro/presubmit/prerelease-deps.cfg
+++ /dev/null
@@ -1,7 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-# Only run this nox session.
-env_vars: {
- key: "NOX_SESSION"
- value: "prerelease_deps"
-}
diff --git a/.kokoro/presubmit/presubmit.cfg b/.kokoro/presubmit/presubmit.cfg
deleted file mode 100644
index 8f43917..0000000
--- a/.kokoro/presubmit/presubmit.cfg
+++ /dev/null
@@ -1 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
\ No newline at end of file
diff --git a/.kokoro/samples/lint/common.cfg b/.kokoro/samples/lint/common.cfg
deleted file mode 100644
index 1a2b87b..0000000
--- a/.kokoro/samples/lint/common.cfg
+++ /dev/null
@@ -1,34 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-# Build logs will be here
-action {
- define_artifacts {
- regex: "**/*sponge_log.xml"
- }
-}
-
-# Specify which tests to run
-env_vars: {
- key: "RUN_TESTS_SESSION"
- value: "lint"
-}
-
-env_vars: {
- key: "TRAMPOLINE_BUILD_FILE"
- value: "github/python-api-core/.kokoro/test-samples.sh"
-}
-
-# Configure the docker image for kokoro-trampoline.
-env_vars: {
- key: "TRAMPOLINE_IMAGE"
- value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
-}
-
-# Download secrets for samples
-gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
-
-# Download trampoline resources.
-gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
-
-# Use the trampoline script to run in docker.
-build_file: "python-api-core/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/lint/continuous.cfg b/.kokoro/samples/lint/continuous.cfg
deleted file mode 100644
index a1c8d97..0000000
--- a/.kokoro/samples/lint/continuous.cfg
+++ /dev/null
@@ -1,6 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-env_vars: {
- key: "INSTALL_LIBRARY_FROM_SOURCE"
- value: "True"
-}
\ No newline at end of file
diff --git a/.kokoro/samples/lint/periodic.cfg b/.kokoro/samples/lint/periodic.cfg
deleted file mode 100644
index 50fec96..0000000
--- a/.kokoro/samples/lint/periodic.cfg
+++ /dev/null
@@ -1,6 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-env_vars: {
- key: "INSTALL_LIBRARY_FROM_SOURCE"
- value: "False"
-}
\ No newline at end of file
diff --git a/.kokoro/samples/lint/presubmit.cfg b/.kokoro/samples/lint/presubmit.cfg
deleted file mode 100644
index a1c8d97..0000000
--- a/.kokoro/samples/lint/presubmit.cfg
+++ /dev/null
@@ -1,6 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-env_vars: {
- key: "INSTALL_LIBRARY_FROM_SOURCE"
- value: "True"
-}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.10/common.cfg b/.kokoro/samples/python3.10/common.cfg
deleted file mode 100644
index 40fb8d8..0000000
--- a/.kokoro/samples/python3.10/common.cfg
+++ /dev/null
@@ -1,40 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-# Build logs will be here
-action {
- define_artifacts {
- regex: "**/*sponge_log.xml"
- }
-}
-
-# Specify which tests to run
-env_vars: {
- key: "RUN_TESTS_SESSION"
- value: "py-3.10"
-}
-
-# Declare build specific Cloud project.
-env_vars: {
- key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
- value: "python-docs-samples-tests-310"
-}
-
-env_vars: {
- key: "TRAMPOLINE_BUILD_FILE"
- value: "github/python-api-core/.kokoro/test-samples.sh"
-}
-
-# Configure the docker image for kokoro-trampoline.
-env_vars: {
- key: "TRAMPOLINE_IMAGE"
- value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
-}
-
-# Download secrets for samples
-gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
-
-# Download trampoline resources.
-gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
-
-# Use the trampoline script to run in docker.
-build_file: "python-api-core/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.10/continuous.cfg b/.kokoro/samples/python3.10/continuous.cfg
deleted file mode 100644
index a1c8d97..0000000
--- a/.kokoro/samples/python3.10/continuous.cfg
+++ /dev/null
@@ -1,6 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-env_vars: {
- key: "INSTALL_LIBRARY_FROM_SOURCE"
- value: "True"
-}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.10/periodic-head.cfg b/.kokoro/samples/python3.10/periodic-head.cfg
deleted file mode 100644
index a18c0cf..0000000
--- a/.kokoro/samples/python3.10/periodic-head.cfg
+++ /dev/null
@@ -1,11 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-env_vars: {
- key: "INSTALL_LIBRARY_FROM_SOURCE"
- value: "True"
-}
-
-env_vars: {
- key: "TRAMPOLINE_BUILD_FILE"
- value: "github/python-api-core/.kokoro/test-samples-against-head.sh"
-}
diff --git a/.kokoro/samples/python3.10/periodic.cfg b/.kokoro/samples/python3.10/periodic.cfg
deleted file mode 100644
index 71cd1e5..0000000
--- a/.kokoro/samples/python3.10/periodic.cfg
+++ /dev/null
@@ -1,6 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-env_vars: {
- key: "INSTALL_LIBRARY_FROM_SOURCE"
- value: "False"
-}
diff --git a/.kokoro/samples/python3.10/presubmit.cfg b/.kokoro/samples/python3.10/presubmit.cfg
deleted file mode 100644
index a1c8d97..0000000
--- a/.kokoro/samples/python3.10/presubmit.cfg
+++ /dev/null
@@ -1,6 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-env_vars: {
- key: "INSTALL_LIBRARY_FROM_SOURCE"
- value: "True"
-}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.11/common.cfg b/.kokoro/samples/python3.11/common.cfg
deleted file mode 100644
index d3597f0..0000000
--- a/.kokoro/samples/python3.11/common.cfg
+++ /dev/null
@@ -1,40 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-# Build logs will be here
-action {
- define_artifacts {
- regex: "**/*sponge_log.xml"
- }
-}
-
-# Specify which tests to run
-env_vars: {
- key: "RUN_TESTS_SESSION"
- value: "py-3.11"
-}
-
-# Declare build specific Cloud project.
-env_vars: {
- key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
- value: "python-docs-samples-tests-311"
-}
-
-env_vars: {
- key: "TRAMPOLINE_BUILD_FILE"
- value: "github/python-api-core/.kokoro/test-samples.sh"
-}
-
-# Configure the docker image for kokoro-trampoline.
-env_vars: {
- key: "TRAMPOLINE_IMAGE"
- value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
-}
-
-# Download secrets for samples
-gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
-
-# Download trampoline resources.
-gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
-
-# Use the trampoline script to run in docker.
-build_file: "python-api-core/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.11/continuous.cfg b/.kokoro/samples/python3.11/continuous.cfg
deleted file mode 100644
index a1c8d97..0000000
--- a/.kokoro/samples/python3.11/continuous.cfg
+++ /dev/null
@@ -1,6 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-env_vars: {
- key: "INSTALL_LIBRARY_FROM_SOURCE"
- value: "True"
-}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.11/periodic-head.cfg b/.kokoro/samples/python3.11/periodic-head.cfg
deleted file mode 100644
index a18c0cf..0000000
--- a/.kokoro/samples/python3.11/periodic-head.cfg
+++ /dev/null
@@ -1,11 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-env_vars: {
- key: "INSTALL_LIBRARY_FROM_SOURCE"
- value: "True"
-}
-
-env_vars: {
- key: "TRAMPOLINE_BUILD_FILE"
- value: "github/python-api-core/.kokoro/test-samples-against-head.sh"
-}
diff --git a/.kokoro/samples/python3.11/periodic.cfg b/.kokoro/samples/python3.11/periodic.cfg
deleted file mode 100644
index 71cd1e5..0000000
--- a/.kokoro/samples/python3.11/periodic.cfg
+++ /dev/null
@@ -1,6 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-env_vars: {
- key: "INSTALL_LIBRARY_FROM_SOURCE"
- value: "False"
-}
diff --git a/.kokoro/samples/python3.11/presubmit.cfg b/.kokoro/samples/python3.11/presubmit.cfg
deleted file mode 100644
index a1c8d97..0000000
--- a/.kokoro/samples/python3.11/presubmit.cfg
+++ /dev/null
@@ -1,6 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-env_vars: {
- key: "INSTALL_LIBRARY_FROM_SOURCE"
- value: "True"
-}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.12/common.cfg b/.kokoro/samples/python3.12/common.cfg
deleted file mode 100644
index 8a5840a..0000000
--- a/.kokoro/samples/python3.12/common.cfg
+++ /dev/null
@@ -1,40 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-# Build logs will be here
-action {
- define_artifacts {
- regex: "**/*sponge_log.xml"
- }
-}
-
-# Specify which tests to run
-env_vars: {
- key: "RUN_TESTS_SESSION"
- value: "py-3.12"
-}
-
-# Declare build specific Cloud project.
-env_vars: {
- key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
- value: "python-docs-samples-tests-312"
-}
-
-env_vars: {
- key: "TRAMPOLINE_BUILD_FILE"
- value: "github/python-api-core/.kokoro/test-samples.sh"
-}
-
-# Configure the docker image for kokoro-trampoline.
-env_vars: {
- key: "TRAMPOLINE_IMAGE"
- value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
-}
-
-# Download secrets for samples
-gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
-
-# Download trampoline resources.
-gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
-
-# Use the trampoline script to run in docker.
-build_file: "python-api-core/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.12/continuous.cfg b/.kokoro/samples/python3.12/continuous.cfg
deleted file mode 100644
index a1c8d97..0000000
--- a/.kokoro/samples/python3.12/continuous.cfg
+++ /dev/null
@@ -1,6 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-env_vars: {
- key: "INSTALL_LIBRARY_FROM_SOURCE"
- value: "True"
-}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.12/periodic-head.cfg b/.kokoro/samples/python3.12/periodic-head.cfg
deleted file mode 100644
index a18c0cf..0000000
--- a/.kokoro/samples/python3.12/periodic-head.cfg
+++ /dev/null
@@ -1,11 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-env_vars: {
- key: "INSTALL_LIBRARY_FROM_SOURCE"
- value: "True"
-}
-
-env_vars: {
- key: "TRAMPOLINE_BUILD_FILE"
- value: "github/python-api-core/.kokoro/test-samples-against-head.sh"
-}
diff --git a/.kokoro/samples/python3.12/periodic.cfg b/.kokoro/samples/python3.12/periodic.cfg
deleted file mode 100644
index 71cd1e5..0000000
--- a/.kokoro/samples/python3.12/periodic.cfg
+++ /dev/null
@@ -1,6 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-env_vars: {
- key: "INSTALL_LIBRARY_FROM_SOURCE"
- value: "False"
-}
diff --git a/.kokoro/samples/python3.12/presubmit.cfg b/.kokoro/samples/python3.12/presubmit.cfg
deleted file mode 100644
index a1c8d97..0000000
--- a/.kokoro/samples/python3.12/presubmit.cfg
+++ /dev/null
@@ -1,6 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-env_vars: {
- key: "INSTALL_LIBRARY_FROM_SOURCE"
- value: "True"
-}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.13/common.cfg b/.kokoro/samples/python3.13/common.cfg
deleted file mode 100644
index 2a4199f..0000000
--- a/.kokoro/samples/python3.13/common.cfg
+++ /dev/null
@@ -1,40 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-# Build logs will be here
-action {
- define_artifacts {
- regex: "**/*sponge_log.xml"
- }
-}
-
-# Specify which tests to run
-env_vars: {
- key: "RUN_TESTS_SESSION"
- value: "py-3.13"
-}
-
-# Declare build specific Cloud project.
-env_vars: {
- key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
- value: "python-docs-samples-tests-313"
-}
-
-env_vars: {
- key: "TRAMPOLINE_BUILD_FILE"
- value: "github/python-api-core/.kokoro/test-samples.sh"
-}
-
-# Configure the docker image for kokoro-trampoline.
-env_vars: {
- key: "TRAMPOLINE_IMAGE"
- value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
-}
-
-# Download secrets for samples
-gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
-
-# Download trampoline resources.
-gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
-
-# Use the trampoline script to run in docker.
-build_file: "python-api-core/.kokoro/trampoline_v2.sh"
diff --git a/.kokoro/samples/python3.13/continuous.cfg b/.kokoro/samples/python3.13/continuous.cfg
deleted file mode 100644
index a1c8d97..0000000
--- a/.kokoro/samples/python3.13/continuous.cfg
+++ /dev/null
@@ -1,6 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-env_vars: {
- key: "INSTALL_LIBRARY_FROM_SOURCE"
- value: "True"
-}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.13/periodic-head.cfg b/.kokoro/samples/python3.13/periodic-head.cfg
deleted file mode 100644
index a18c0cf..0000000
--- a/.kokoro/samples/python3.13/periodic-head.cfg
+++ /dev/null
@@ -1,11 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-env_vars: {
- key: "INSTALL_LIBRARY_FROM_SOURCE"
- value: "True"
-}
-
-env_vars: {
- key: "TRAMPOLINE_BUILD_FILE"
- value: "github/python-api-core/.kokoro/test-samples-against-head.sh"
-}
diff --git a/.kokoro/samples/python3.13/periodic.cfg b/.kokoro/samples/python3.13/periodic.cfg
deleted file mode 100644
index 71cd1e5..0000000
--- a/.kokoro/samples/python3.13/periodic.cfg
+++ /dev/null
@@ -1,6 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-env_vars: {
- key: "INSTALL_LIBRARY_FROM_SOURCE"
- value: "False"
-}
diff --git a/.kokoro/samples/python3.13/presubmit.cfg b/.kokoro/samples/python3.13/presubmit.cfg
deleted file mode 100644
index a1c8d97..0000000
--- a/.kokoro/samples/python3.13/presubmit.cfg
+++ /dev/null
@@ -1,6 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-env_vars: {
- key: "INSTALL_LIBRARY_FROM_SOURCE"
- value: "True"
-}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.14/common.cfg b/.kokoro/samples/python3.14/common.cfg
deleted file mode 100644
index a083385..0000000
--- a/.kokoro/samples/python3.14/common.cfg
+++ /dev/null
@@ -1,40 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-# Build logs will be here
-action {
- define_artifacts {
- regex: "**/*sponge_log.xml"
- }
-}
-
-# Specify which tests to run
-env_vars: {
- key: "RUN_TESTS_SESSION"
- value: "py-3.14"
-}
-
-# Declare build specific Cloud project.
-env_vars: {
- key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
- value: "python-docs-samples-tests-314"
-}
-
-env_vars: {
- key: "TRAMPOLINE_BUILD_FILE"
- value: "github/python-api-core/.kokoro/test-samples.sh"
-}
-
-# Configure the docker image for kokoro-trampoline.
-env_vars: {
- key: "TRAMPOLINE_IMAGE"
- value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
-}
-
-# Download secrets for samples
-gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
-
-# Download trampoline resources.
-gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
-
-# Use the trampoline script to run in docker.
-build_file: "python-api-core/.kokoro/trampoline_v2.sh"
diff --git a/.kokoro/samples/python3.14/continuous.cfg b/.kokoro/samples/python3.14/continuous.cfg
deleted file mode 100644
index a1c8d97..0000000
--- a/.kokoro/samples/python3.14/continuous.cfg
+++ /dev/null
@@ -1,6 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-env_vars: {
- key: "INSTALL_LIBRARY_FROM_SOURCE"
- value: "True"
-}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.14/periodic-head.cfg b/.kokoro/samples/python3.14/periodic-head.cfg
deleted file mode 100644
index a18c0cf..0000000
--- a/.kokoro/samples/python3.14/periodic-head.cfg
+++ /dev/null
@@ -1,11 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-env_vars: {
- key: "INSTALL_LIBRARY_FROM_SOURCE"
- value: "True"
-}
-
-env_vars: {
- key: "TRAMPOLINE_BUILD_FILE"
- value: "github/python-api-core/.kokoro/test-samples-against-head.sh"
-}
diff --git a/.kokoro/samples/python3.14/periodic.cfg b/.kokoro/samples/python3.14/periodic.cfg
deleted file mode 100644
index 71cd1e5..0000000
--- a/.kokoro/samples/python3.14/periodic.cfg
+++ /dev/null
@@ -1,6 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-env_vars: {
- key: "INSTALL_LIBRARY_FROM_SOURCE"
- value: "False"
-}
diff --git a/.kokoro/samples/python3.14/presubmit.cfg b/.kokoro/samples/python3.14/presubmit.cfg
deleted file mode 100644
index a1c8d97..0000000
--- a/.kokoro/samples/python3.14/presubmit.cfg
+++ /dev/null
@@ -1,6 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-env_vars: {
- key: "INSTALL_LIBRARY_FROM_SOURCE"
- value: "True"
-}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.9/common.cfg b/.kokoro/samples/python3.9/common.cfg
deleted file mode 100644
index 234887c..0000000
--- a/.kokoro/samples/python3.9/common.cfg
+++ /dev/null
@@ -1,40 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-# Build logs will be here
-action {
- define_artifacts {
- regex: "**/*sponge_log.xml"
- }
-}
-
-# Specify which tests to run
-env_vars: {
- key: "RUN_TESTS_SESSION"
- value: "py-3.9"
-}
-
-# Declare build specific Cloud project.
-env_vars: {
- key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
- value: "python-docs-samples-tests-py39"
-}
-
-env_vars: {
- key: "TRAMPOLINE_BUILD_FILE"
- value: "github/python-api-core/.kokoro/test-samples.sh"
-}
-
-# Configure the docker image for kokoro-trampoline.
-env_vars: {
- key: "TRAMPOLINE_IMAGE"
- value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
-}
-
-# Download secrets for samples
-gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
-
-# Download trampoline resources.
-gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
-
-# Use the trampoline script to run in docker.
-build_file: "python-api-core/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.9/continuous.cfg b/.kokoro/samples/python3.9/continuous.cfg
deleted file mode 100644
index a1c8d97..0000000
--- a/.kokoro/samples/python3.9/continuous.cfg
+++ /dev/null
@@ -1,6 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-env_vars: {
- key: "INSTALL_LIBRARY_FROM_SOURCE"
- value: "True"
-}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.9/periodic-head.cfg b/.kokoro/samples/python3.9/periodic-head.cfg
deleted file mode 100644
index a18c0cf..0000000
--- a/.kokoro/samples/python3.9/periodic-head.cfg
+++ /dev/null
@@ -1,11 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-env_vars: {
- key: "INSTALL_LIBRARY_FROM_SOURCE"
- value: "True"
-}
-
-env_vars: {
- key: "TRAMPOLINE_BUILD_FILE"
- value: "github/python-api-core/.kokoro/test-samples-against-head.sh"
-}
diff --git a/.kokoro/samples/python3.9/periodic.cfg b/.kokoro/samples/python3.9/periodic.cfg
deleted file mode 100644
index 71cd1e5..0000000
--- a/.kokoro/samples/python3.9/periodic.cfg
+++ /dev/null
@@ -1,6 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-env_vars: {
- key: "INSTALL_LIBRARY_FROM_SOURCE"
- value: "False"
-}
diff --git a/.kokoro/samples/python3.9/presubmit.cfg b/.kokoro/samples/python3.9/presubmit.cfg
deleted file mode 100644
index a1c8d97..0000000
--- a/.kokoro/samples/python3.9/presubmit.cfg
+++ /dev/null
@@ -1,6 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-env_vars: {
- key: "INSTALL_LIBRARY_FROM_SOURCE"
- value: "True"
-}
\ No newline at end of file
diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh
deleted file mode 100755
index e9d8bd7..0000000
--- a/.kokoro/test-samples-against-head.sh
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/bin/bash
-# Copyright 2024 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# A customized test runner for samples.
-#
-# For periodic builds, you can specify this file for testing against head.
-
-# `-e` enables the script to automatically fail when a command fails
-# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero
-set -eo pipefail
-# Enables `**` to include files nested inside sub-folders
-shopt -s globstar
-
-exec .kokoro/test-samples-impl.sh
diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh
deleted file mode 100755
index 55910c8..0000000
--- a/.kokoro/test-samples-impl.sh
+++ /dev/null
@@ -1,102 +0,0 @@
-#!/bin/bash
-# Copyright 2024 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-# `-e` enables the script to automatically fail when a command fails
-# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero
-set -eo pipefail
-# Enables `**` to include files nested inside sub-folders
-shopt -s globstar
-
-# Exit early if samples don't exist
-if ! find samples -name 'requirements.txt' | grep -q .; then
- echo "No tests run. './samples/**/requirements.txt' not found"
- exit 0
-fi
-
-# Disable buffering, so that the logs stream through.
-export PYTHONUNBUFFERED=1
-
-# Debug: show build environment
-env | grep KOKORO
-
-# Install nox
-python3.9 -m pip install --upgrade --quiet nox
-
-# Use secrets acessor service account to get secrets
-if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then
- gcloud auth activate-service-account \
- --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \
- --project="cloud-devrel-kokoro-resources"
-fi
-
-# This script will create 3 files:
-# - testing/test-env.sh
-# - testing/service-account.json
-# - testing/client-secrets.json
-./scripts/decrypt-secrets.sh
-
-source ./testing/test-env.sh
-export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json
-
-# For cloud-run session, we activate the service account for gcloud sdk.
-gcloud auth activate-service-account \
- --key-file "${GOOGLE_APPLICATION_CREDENTIALS}"
-
-export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json
-
-echo -e "\n******************** TESTING PROJECTS ********************"
-
-# Switch to 'fail at end' to allow all tests to complete before exiting.
-set +e
-# Use RTN to return a non-zero value if the test fails.
-RTN=0
-ROOT=$(pwd)
-# Find all requirements.txt in the samples directory (may break on whitespace).
-for file in samples/**/requirements.txt; do
- cd "$ROOT"
- # Navigate to the project folder.
- file=$(dirname "$file")
- cd "$file"
-
- echo "------------------------------------------------------------"
- echo "- testing $file"
- echo "------------------------------------------------------------"
-
- # Use nox to execute the tests for the project.
- python3.9 -m nox -s "$RUN_TESTS_SESSION"
- EXIT=$?
-
- # If this is a periodic build, send the test log to the FlakyBot.
- # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot.
- if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
- chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
- $KOKORO_GFILE_DIR/linux_amd64/flakybot
- fi
-
- if [[ $EXIT -ne 0 ]]; then
- RTN=1
- echo -e "\n Testing failed: Nox returned a non-zero exit code. \n"
- else
- echo -e "\n Testing completed.\n"
- fi
-
-done
-cd "$ROOT"
-
-# Workaround for Kokoro permissions issue: delete secrets
-rm testing/{test-env.sh,client-secrets.json,service-account.json}
-
-exit "$RTN"
diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh
deleted file mode 100755
index 7933d82..0000000
--- a/.kokoro/test-samples.sh
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/bin/bash
-# Copyright 2024 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# The default test runner for samples.
-#
-# For periodic builds, we rewinds the repo to the latest release, and
-# run test-samples-impl.sh.
-
-# `-e` enables the script to automatically fail when a command fails
-# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero
-set -eo pipefail
-# Enables `**` to include files nested inside sub-folders
-shopt -s globstar
-
-# Run periodic samples tests at latest release
-if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
- # preserving the test runner implementation.
- cp .kokoro/test-samples-impl.sh "${TMPDIR}/test-samples-impl.sh"
- echo "--- IMPORTANT IMPORTANT IMPORTANT ---"
- echo "Now we rewind the repo back to the latest release..."
- LATEST_RELEASE=$(git describe --abbrev=0 --tags)
- git checkout $LATEST_RELEASE
- echo "The current head is: "
- echo $(git rev-parse --verify HEAD)
- echo "--- IMPORTANT IMPORTANT IMPORTANT ---"
- # move back the test runner implementation if there's no file.
- if [ ! -f .kokoro/test-samples-impl.sh ]; then
- cp "${TMPDIR}/test-samples-impl.sh" .kokoro/test-samples-impl.sh
- fi
-fi
-
-exec .kokoro/test-samples-impl.sh
diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh
deleted file mode 100755
index 48f7969..0000000
--- a/.kokoro/trampoline.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/bin/bash
-# Copyright 2024 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -eo pipefail
-
-# Always run the cleanup script, regardless of the success of bouncing into
-# the container.
-function cleanup() {
- chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh
- ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh
- echo "cleanup";
-}
-trap cleanup EXIT
-
-$(dirname $0)/populate-secrets.sh # Secret Manager secrets.
-python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py"
\ No newline at end of file
diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh
deleted file mode 100755
index d03f92d..0000000
--- a/.kokoro/trampoline_v2.sh
+++ /dev/null
@@ -1,487 +0,0 @@
-#!/usr/bin/env bash
-# Copyright 2024 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# trampoline_v2.sh
-#
-# This script does 3 things.
-#
-# 1. Prepare the Docker image for the test
-# 2. Run the Docker with appropriate flags to run the test
-# 3. Upload the newly built Docker image
-#
-# in a way that is somewhat compatible with trampoline_v1.
-#
-# To run this script, first download few files from gcs to /dev/shm.
-# (/dev/shm is passed into the container as KOKORO_GFILE_DIR).
-#
-# gcloud storage cp gs://cloud-devrel-kokoro-resources/python-docs-samples/secrets_viewer_service_account.json /dev/shm
-# gcloud storage cp gs://cloud-devrel-kokoro-resources/python-docs-samples/automl_secrets.txt /dev/shm
-#
-# Then run the script.
-# .kokoro/trampoline_v2.sh
-#
-# These environment variables are required:
-# TRAMPOLINE_IMAGE: The docker image to use.
-# TRAMPOLINE_DOCKERFILE: The location of the Dockerfile.
-#
-# You can optionally change these environment variables:
-# TRAMPOLINE_IMAGE_UPLOAD:
-# (true|false): Whether to upload the Docker image after the
-# successful builds.
-# TRAMPOLINE_BUILD_FILE: The script to run in the docker container.
-# TRAMPOLINE_WORKSPACE: The workspace path in the docker container.
-# Defaults to /workspace.
-# Potentially there are some repo specific envvars in .trampolinerc in
-# the project root.
-
-
-set -euo pipefail
-
-TRAMPOLINE_VERSION="2.0.5"
-
-if command -v tput >/dev/null && [[ -n "${TERM:-}" ]]; then
- readonly IO_COLOR_RED="$(tput setaf 1)"
- readonly IO_COLOR_GREEN="$(tput setaf 2)"
- readonly IO_COLOR_YELLOW="$(tput setaf 3)"
- readonly IO_COLOR_RESET="$(tput sgr0)"
-else
- readonly IO_COLOR_RED=""
- readonly IO_COLOR_GREEN=""
- readonly IO_COLOR_YELLOW=""
- readonly IO_COLOR_RESET=""
-fi
-
-function function_exists {
- [ $(LC_ALL=C type -t $1)"" == "function" ]
-}
-
-# Logs a message using the given color. The first argument must be one
-# of the IO_COLOR_* variables defined above, such as
-# "${IO_COLOR_YELLOW}". The remaining arguments will be logged in the
-# given color. The log message will also have an RFC-3339 timestamp
-# prepended (in UTC). You can disable the color output by setting
-# TERM=vt100.
-function log_impl() {
- local color="$1"
- shift
- local timestamp="$(date -u "+%Y-%m-%dT%H:%M:%SZ")"
- echo "================================================================"
- echo "${color}${timestamp}:" "$@" "${IO_COLOR_RESET}"
- echo "================================================================"
-}
-
-# Logs the given message with normal coloring and a timestamp.
-function log() {
- log_impl "${IO_COLOR_RESET}" "$@"
-}
-
-# Logs the given message in green with a timestamp.
-function log_green() {
- log_impl "${IO_COLOR_GREEN}" "$@"
-}
-
-# Logs the given message in yellow with a timestamp.
-function log_yellow() {
- log_impl "${IO_COLOR_YELLOW}" "$@"
-}
-
-# Logs the given message in red with a timestamp.
-function log_red() {
- log_impl "${IO_COLOR_RED}" "$@"
-}
-
-readonly tmpdir=$(mktemp -d -t ci-XXXXXXXX)
-readonly tmphome="${tmpdir}/h"
-mkdir -p "${tmphome}"
-
-function cleanup() {
- rm -rf "${tmpdir}"
-}
-trap cleanup EXIT
-
-RUNNING_IN_CI="${RUNNING_IN_CI:-false}"
-
-# The workspace in the container, defaults to /workspace.
-TRAMPOLINE_WORKSPACE="${TRAMPOLINE_WORKSPACE:-/workspace}"
-
-pass_down_envvars=(
- # TRAMPOLINE_V2 variables.
- # Tells scripts whether they are running as part of CI or not.
- "RUNNING_IN_CI"
- # Indicates which CI system we're in.
- "TRAMPOLINE_CI"
- # Indicates the version of the script.
- "TRAMPOLINE_VERSION"
-)
-
-log_yellow "Building with Trampoline ${TRAMPOLINE_VERSION}"
-
-# Detect which CI systems we're in. If we're in any of the CI systems
-# we support, `RUNNING_IN_CI` will be true and `TRAMPOLINE_CI` will be
-# the name of the CI system. Both envvars will be passing down to the
-# container for telling which CI system we're in.
-if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then
- # descriptive env var for indicating it's on CI.
- RUNNING_IN_CI="true"
- TRAMPOLINE_CI="kokoro"
- if [[ "${TRAMPOLINE_USE_LEGACY_SERVICE_ACCOUNT:-}" == "true" ]]; then
- if [[ ! -f "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" ]]; then
- log_red "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json does not exist. Did you forget to mount cloud-devrel-kokoro-resources/trampoline? Aborting."
- exit 1
- fi
- # This service account will be activated later.
- TRAMPOLINE_SERVICE_ACCOUNT="${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json"
- else
- if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
- gcloud auth list
- fi
- log_yellow "Configuring Container Registry access"
- gcloud auth configure-docker --quiet
- fi
- pass_down_envvars+=(
- # KOKORO dynamic variables.
- "KOKORO_BUILD_NUMBER"
- "KOKORO_BUILD_ID"
- "KOKORO_JOB_NAME"
- "KOKORO_GIT_COMMIT"
- "KOKORO_GITHUB_COMMIT"
- "KOKORO_GITHUB_PULL_REQUEST_NUMBER"
- "KOKORO_GITHUB_PULL_REQUEST_COMMIT"
- # For FlakyBot
- "KOKORO_GITHUB_COMMIT_URL"
- "KOKORO_GITHUB_PULL_REQUEST_URL"
- )
-elif [[ "${TRAVIS:-}" == "true" ]]; then
- RUNNING_IN_CI="true"
- TRAMPOLINE_CI="travis"
- pass_down_envvars+=(
- "TRAVIS_BRANCH"
- "TRAVIS_BUILD_ID"
- "TRAVIS_BUILD_NUMBER"
- "TRAVIS_BUILD_WEB_URL"
- "TRAVIS_COMMIT"
- "TRAVIS_COMMIT_MESSAGE"
- "TRAVIS_COMMIT_RANGE"
- "TRAVIS_JOB_NAME"
- "TRAVIS_JOB_NUMBER"
- "TRAVIS_JOB_WEB_URL"
- "TRAVIS_PULL_REQUEST"
- "TRAVIS_PULL_REQUEST_BRANCH"
- "TRAVIS_PULL_REQUEST_SHA"
- "TRAVIS_PULL_REQUEST_SLUG"
- "TRAVIS_REPO_SLUG"
- "TRAVIS_SECURE_ENV_VARS"
- "TRAVIS_TAG"
- )
-elif [[ -n "${GITHUB_RUN_ID:-}" ]]; then
- RUNNING_IN_CI="true"
- TRAMPOLINE_CI="github-workflow"
- pass_down_envvars+=(
- "GITHUB_WORKFLOW"
- "GITHUB_RUN_ID"
- "GITHUB_RUN_NUMBER"
- "GITHUB_ACTION"
- "GITHUB_ACTIONS"
- "GITHUB_ACTOR"
- "GITHUB_REPOSITORY"
- "GITHUB_EVENT_NAME"
- "GITHUB_EVENT_PATH"
- "GITHUB_SHA"
- "GITHUB_REF"
- "GITHUB_HEAD_REF"
- "GITHUB_BASE_REF"
- )
-elif [[ "${CIRCLECI:-}" == "true" ]]; then
- RUNNING_IN_CI="true"
- TRAMPOLINE_CI="circleci"
- pass_down_envvars+=(
- "CIRCLE_BRANCH"
- "CIRCLE_BUILD_NUM"
- "CIRCLE_BUILD_URL"
- "CIRCLE_COMPARE_URL"
- "CIRCLE_JOB"
- "CIRCLE_NODE_INDEX"
- "CIRCLE_NODE_TOTAL"
- "CIRCLE_PREVIOUS_BUILD_NUM"
- "CIRCLE_PROJECT_REPONAME"
- "CIRCLE_PROJECT_USERNAME"
- "CIRCLE_REPOSITORY_URL"
- "CIRCLE_SHA1"
- "CIRCLE_STAGE"
- "CIRCLE_USERNAME"
- "CIRCLE_WORKFLOW_ID"
- "CIRCLE_WORKFLOW_JOB_ID"
- "CIRCLE_WORKFLOW_UPSTREAM_JOB_IDS"
- "CIRCLE_WORKFLOW_WORKSPACE_ID"
- )
-fi
-
-# Configure the service account for pulling the docker image.
-function repo_root() {
- local dir="$1"
- while [[ ! -d "${dir}/.git" ]]; do
- dir="$(dirname "$dir")"
- done
- echo "${dir}"
-}
-
-# Detect the project root. In CI builds, we assume the script is in
-# the git tree and traverse from there, otherwise, traverse from `pwd`
-# to find `.git` directory.
-if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then
- PROGRAM_PATH="$(realpath "$0")"
- PROGRAM_DIR="$(dirname "${PROGRAM_PATH}")"
- PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")"
-else
- PROJECT_ROOT="$(repo_root $(pwd))"
-fi
-
-log_yellow "Changing to the project root: ${PROJECT_ROOT}."
-cd "${PROJECT_ROOT}"
-
-# To support relative path for `TRAMPOLINE_SERVICE_ACCOUNT`, we need
-# to use this environment variable in `PROJECT_ROOT`.
-if [[ -n "${TRAMPOLINE_SERVICE_ACCOUNT:-}" ]]; then
-
- mkdir -p "${tmpdir}/gcloud"
- gcloud_config_dir="${tmpdir}/gcloud"
-
- log_yellow "Using isolated gcloud config: ${gcloud_config_dir}."
- export CLOUDSDK_CONFIG="${gcloud_config_dir}"
-
- log_yellow "Using ${TRAMPOLINE_SERVICE_ACCOUNT} for authentication."
- gcloud auth activate-service-account \
- --key-file "${TRAMPOLINE_SERVICE_ACCOUNT}"
- log_yellow "Configuring Container Registry access"
- gcloud auth configure-docker --quiet
-fi
-
-required_envvars=(
- # The basic trampoline configurations.
- "TRAMPOLINE_IMAGE"
- "TRAMPOLINE_BUILD_FILE"
-)
-
-if [[ -f "${PROJECT_ROOT}/.trampolinerc" ]]; then
- source "${PROJECT_ROOT}/.trampolinerc"
-fi
-
-log_yellow "Checking environment variables."
-for e in "${required_envvars[@]}"
-do
- if [[ -z "${!e:-}" ]]; then
- log "Missing ${e} env var. Aborting."
- exit 1
- fi
-done
-
-# We want to support legacy style TRAMPOLINE_BUILD_FILE used with V1
-# script: e.g. "github/repo-name/.kokoro/run_tests.sh"
-TRAMPOLINE_BUILD_FILE="${TRAMPOLINE_BUILD_FILE#github/*/}"
-log_yellow "Using TRAMPOLINE_BUILD_FILE: ${TRAMPOLINE_BUILD_FILE}"
-
-# ignore error on docker operations and test execution
-set +e
-
-log_yellow "Preparing Docker image."
-# We only download the docker image in CI builds.
-if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then
- # Download the docker image specified by `TRAMPOLINE_IMAGE`
-
- # We may want to add --max-concurrent-downloads flag.
-
- log_yellow "Start pulling the Docker image: ${TRAMPOLINE_IMAGE}."
- if docker pull "${TRAMPOLINE_IMAGE}"; then
- log_green "Finished pulling the Docker image: ${TRAMPOLINE_IMAGE}."
- has_image="true"
- else
- log_red "Failed pulling the Docker image: ${TRAMPOLINE_IMAGE}."
- has_image="false"
- fi
-else
- # For local run, check if we have the image.
- if docker images "${TRAMPOLINE_IMAGE}:latest" | grep "${TRAMPOLINE_IMAGE}"; then
- has_image="true"
- else
- has_image="false"
- fi
-fi
-
-
-# The default user for a Docker container has uid 0 (root). To avoid
-# creating root-owned files in the build directory we tell docker to
-# use the current user ID.
-user_uid="$(id -u)"
-user_gid="$(id -g)"
-user_name="$(id -un)"
-
-# To allow docker in docker, we add the user to the docker group in
-# the host os.
-docker_gid=$(cut -d: -f3 < <(getent group docker))
-
-update_cache="false"
-if [[ "${TRAMPOLINE_DOCKERFILE:-none}" != "none" ]]; then
- # Build the Docker image from the source.
- context_dir=$(dirname "${TRAMPOLINE_DOCKERFILE}")
- docker_build_flags=(
- "-f" "${TRAMPOLINE_DOCKERFILE}"
- "-t" "${TRAMPOLINE_IMAGE}"
- "--build-arg" "UID=${user_uid}"
- "--build-arg" "USERNAME=${user_name}"
- )
- if [[ "${has_image}" == "true" ]]; then
- docker_build_flags+=("--cache-from" "${TRAMPOLINE_IMAGE}")
- fi
-
- log_yellow "Start building the docker image."
- if [[ "${TRAMPOLINE_VERBOSE:-false}" == "true" ]]; then
- echo "docker build" "${docker_build_flags[@]}" "${context_dir}"
- fi
-
- # ON CI systems, we want to suppress docker build logs, only
- # output the logs when it fails.
- if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then
- if docker build "${docker_build_flags[@]}" "${context_dir}" \
- > "${tmpdir}/docker_build.log" 2>&1; then
- if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
- cat "${tmpdir}/docker_build.log"
- fi
-
- log_green "Finished building the docker image."
- update_cache="true"
- else
- log_red "Failed to build the Docker image, aborting."
- log_yellow "Dumping the build logs:"
- cat "${tmpdir}/docker_build.log"
- exit 1
- fi
- else
- if docker build "${docker_build_flags[@]}" "${context_dir}"; then
- log_green "Finished building the docker image."
- update_cache="true"
- else
- log_red "Failed to build the Docker image, aborting."
- exit 1
- fi
- fi
-else
- if [[ "${has_image}" != "true" ]]; then
- log_red "We do not have ${TRAMPOLINE_IMAGE} locally, aborting."
- exit 1
- fi
-fi
-
-# We use an array for the flags so they are easier to document.
-docker_flags=(
- # Remove the container after it exists.
- "--rm"
-
- # Use the host network.
- "--network=host"
-
- # Run in priviledged mode. We are not using docker for sandboxing or
- # isolation, just for packaging our dev tools.
- "--privileged"
-
- # Run the docker script with the user id. Because the docker image gets to
- # write in ${PWD} you typically want this to be your user id.
- # To allow docker in docker, we need to use docker gid on the host.
- "--user" "${user_uid}:${docker_gid}"
-
- # Pass down the USER.
- "--env" "USER=${user_name}"
-
- # Mount the project directory inside the Docker container.
- "--volume" "${PROJECT_ROOT}:${TRAMPOLINE_WORKSPACE}"
- "--workdir" "${TRAMPOLINE_WORKSPACE}"
- "--env" "PROJECT_ROOT=${TRAMPOLINE_WORKSPACE}"
-
- # Mount the temporary home directory.
- "--volume" "${tmphome}:/h"
- "--env" "HOME=/h"
-
- # Allow docker in docker.
- "--volume" "/var/run/docker.sock:/var/run/docker.sock"
-
- # Mount the /tmp so that docker in docker can mount the files
- # there correctly.
- "--volume" "/tmp:/tmp"
- # Pass down the KOKORO_GFILE_DIR and KOKORO_KEYSTORE_DIR
- # TODO(tmatsuo): This part is not portable.
- "--env" "TRAMPOLINE_SECRET_DIR=/secrets"
- "--volume" "${KOKORO_GFILE_DIR:-/dev/shm}:/secrets/gfile"
- "--env" "KOKORO_GFILE_DIR=/secrets/gfile"
- "--volume" "${KOKORO_KEYSTORE_DIR:-/dev/shm}:/secrets/keystore"
- "--env" "KOKORO_KEYSTORE_DIR=/secrets/keystore"
-)
-
-# Add an option for nicer output if the build gets a tty.
-if [[ -t 0 ]]; then
- docker_flags+=("-it")
-fi
-
-# Passing down env vars
-for e in "${pass_down_envvars[@]}"
-do
- if [[ -n "${!e:-}" ]]; then
- docker_flags+=("--env" "${e}=${!e}")
- fi
-done
-
-# If arguments are given, all arguments will become the commands run
-# in the container, otherwise run TRAMPOLINE_BUILD_FILE.
-if [[ $# -ge 1 ]]; then
- log_yellow "Running the given commands '" "${@:1}" "' in the container."
- readonly commands=("${@:1}")
- if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
- echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}"
- fi
- docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}"
-else
- log_yellow "Running the tests in a Docker container."
- docker_flags+=("--entrypoint=${TRAMPOLINE_BUILD_FILE}")
- if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
- echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}"
- fi
- docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}"
-fi
-
-
-test_retval=$?
-
-if [[ ${test_retval} -eq 0 ]]; then
- log_green "Build finished with ${test_retval}"
-else
- log_red "Build finished with ${test_retval}"
-fi
-
-# Only upload it when the test passes.
-if [[ "${update_cache}" == "true" ]] && \
- [[ $test_retval == 0 ]] && \
- [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]]; then
- log_yellow "Uploading the Docker image."
- if docker push "${TRAMPOLINE_IMAGE}"; then
- log_green "Finished uploading the Docker image."
- else
- log_red "Failed uploading the Docker image."
- fi
- # Call trampoline_after_upload_hook if it's defined.
- if function_exists trampoline_after_upload_hook; then
- trampoline_after_upload_hook
- fi
-
-fi
-
-exit "${test_retval}"
diff --git a/.librarian/state.yaml b/.librarian/state.yaml
deleted file mode 100644
index e6e2940..0000000
--- a/.librarian/state.yaml
+++ /dev/null
@@ -1,11 +0,0 @@
-image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-librarian-generator@sha256:c8612d3fffb3f6a32353b2d1abd16b61e87811866f7ec9d65b59b02eb452a620
-libraries:
- - id: google-api-core
- version: 2.29.0
- last_generated_commit: ""
- apis: []
- source_roots:
- - .
- preserve_regex: []
- remove_regex: []
- tag_format: v{version}
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
deleted file mode 100644
index 1d74695..0000000
--- a/.pre-commit-config.yaml
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright 2024 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# See https://pre-commit.com for more information
-# See https://pre-commit.com/hooks.html for more hooks
-repos:
-- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v4.0.1
- hooks:
- - id: trailing-whitespace
- - id: end-of-file-fixer
- - id: check-yaml
-- repo: https://github.com/psf/black
- rev: 23.7.0
- hooks:
- - id: black
-- repo: https://github.com/pycqa/flake8
- rev: 6.1.0
- hooks:
- - id: flake8
diff --git a/.trampolinerc b/.trampolinerc
deleted file mode 100644
index 0080152..0000000
--- a/.trampolinerc
+++ /dev/null
@@ -1,61 +0,0 @@
-# Copyright 2024 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Add required env vars here.
-required_envvars+=(
-)
-
-# Add env vars which are passed down into the container here.
-pass_down_envvars+=(
- "NOX_SESSION"
- ###############
- # Docs builds
- ###############
- "STAGING_BUCKET"
- "V2_STAGING_BUCKET"
- ##################
- # Samples builds
- ##################
- "INSTALL_LIBRARY_FROM_SOURCE"
- "RUN_TESTS_SESSION"
- "BUILD_SPECIFIC_GCLOUD_PROJECT"
- # Target directories.
- "RUN_TESTS_DIRS"
- # The nox session to run.
- "RUN_TESTS_SESSION"
-)
-
-# Prevent unintentional override on the default image.
-if [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]] && \
- [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then
- echo "Please set TRAMPOLINE_IMAGE if you want to upload the Docker image."
- exit 1
-fi
-
-# Define the default value if it makes sense.
-if [[ -z "${TRAMPOLINE_IMAGE_UPLOAD:-}" ]]; then
- TRAMPOLINE_IMAGE_UPLOAD=""
-fi
-
-if [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then
- TRAMPOLINE_IMAGE=""
-fi
-
-if [[ -z "${TRAMPOLINE_DOCKERFILE:-}" ]]; then
- TRAMPOLINE_DOCKERFILE=""
-fi
-
-if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then
- TRAMPOLINE_BUILD_FILE=""
-fi
diff --git a/CHANGELOG.md b/CHANGELOG.md
deleted file mode 100644
index 716dfd0..0000000
--- a/CHANGELOG.md
+++ /dev/null
@@ -1,1168 +0,0 @@
-# Changelog
-
-[PyPI History][1]
-
-[1]: https://pypi.org/project/google-api-core/#history
-
-## [2.29.0](https://github.com/googleapis/google-cloud-python/compare/google-api-core-v2.28.1...google-api-core-v2.29.0) (2026-01-08)
-
-
-### Features
-
-* Auto enable mTLS when supported certificates are detected (#869) ([f8bf6f9610f3e0e7580f223794c3906513e1fa73](https://github.com/googleapis/google-cloud-python/commit/f8bf6f9610f3e0e7580f223794c3906513e1fa73))
-* make parse_version_to_tuple public (#864) ([c969186f2b66bde1df5e25bbedc5868e27d136f9](https://github.com/googleapis/google-cloud-python/commit/c969186f2b66bde1df5e25bbedc5868e27d136f9))
-
-
-### Bug Fixes
-
-* flaky tests due to imprecision in floating point calculation and performance test setup (#865) ([93404080f853699b9217e4b76391a13525db4e3e](https://github.com/googleapis/google-cloud-python/commit/93404080f853699b9217e4b76391a13525db4e3e))
-* remove call to importlib.metadata.packages_distributions() for py38/py39 (#859) ([628003e217d9a881d24f3316aecfd48c244a73f0](https://github.com/googleapis/google-cloud-python/commit/628003e217d9a881d24f3316aecfd48c244a73f0))
-* Log version check errors (#858) ([6493118cae2720696c3d0097274edfd7fe2bce67](https://github.com/googleapis/google-cloud-python/commit/6493118cae2720696c3d0097274edfd7fe2bce67))
-* closes tailing streams in bidi classes. (#851) ([c97b3a004044ebf8b35c2a7ba97409d7795e11b0](https://github.com/googleapis/google-cloud-python/commit/c97b3a004044ebf8b35c2a7ba97409d7795e11b0))
-
-## [2.28.1](https://github.com/googleapis/python-api-core/compare/v2.28.0...v2.28.1) (2025-10-28)
-
-
-### Bug Fixes
-
-* Remove dependency on packaging and pkg_resources ([#852](https://github.com/googleapis/python-api-core/issues/852)) ([ca59a86](https://github.com/googleapis/python-api-core/commit/ca59a863b08a79c2bf0607f9085de1417422820b))
-
-## [2.28.0](https://github.com/googleapis/python-api-core/compare/v2.27.0...v2.28.0) (2025-10-24)
-
-
-### Features
-
-* Provide and use Python version support check ([#832](https://github.com/googleapis/python-api-core/issues/832)) ([d36e896](https://github.com/googleapis/python-api-core/commit/d36e896f98a2371c4d58ce1a7a3bc1a77a081836))
-
-## [2.27.0](https://github.com/googleapis/python-api-core/compare/v2.26.0...v2.27.0) (2025-10-22)
-
-
-### Features
-
-* Support for async bidi streaming apis ([#836](https://github.com/googleapis/python-api-core/issues/836)) ([9530548](https://github.com/googleapis/python-api-core/commit/95305480d234b6dd0903960db020e55125a997e0))
-
-## [2.26.0](https://github.com/googleapis/python-api-core/compare/v2.25.2...v2.26.0) (2025-10-08)
-
-
-### Features
-
-* Add trove classifier for Python 3.14 ([#842](https://github.com/googleapis/python-api-core/issues/842)) ([43690de](https://github.com/googleapis/python-api-core/commit/43690de33a23321d52ab856e2bf253590e1a9357))
-
-## [2.25.2](https://github.com/googleapis/python-api-core/compare/v2.25.1...v2.25.2) (2025-10-01)
-
-
-### Bug Fixes
-
-* Deprecate credentials_file argument ([#841](https://github.com/googleapis/python-api-core/issues/841)) ([324eb74](https://github.com/googleapis/python-api-core/commit/324eb7464d6ade9a8c2e413d4695bc7d7adfcb3d))
-* Fix async tests and round-off error in test expectations ([#837](https://github.com/googleapis/python-api-core/issues/837)) ([14a5978](https://github.com/googleapis/python-api-core/commit/14a59789e144905bd6c82180ad07a52bf1f84f02))
-
-## [2.25.1](https://github.com/googleapis/python-api-core/compare/v2.25.0...v2.25.1) (2025-06-02)
-
-
-### Bug Fixes
-
-* Allow BackgroundConsumer To Inform Caller of Fatal Exceptions with Optional Callback ([3206c01](https://github.com/googleapis/python-api-core/commit/3206c0170dda80a613bf257ebcf3b78c1a20465f))
-
-## [2.25.0](https://github.com/googleapis/python-api-core/compare/v2.24.2...v2.25.0) (2025-05-06)
-
-
-### Features
-
-* Add protobuf runtime version to `x-goog-api-client` header ([#812](https://github.com/googleapis/python-api-core/issues/812)) ([118bd96](https://github.com/googleapis/python-api-core/commit/118bd96f3907234351972409834ab5309cdfcee4))
-* Support dynamic retry backoff values ([#793](https://github.com/googleapis/python-api-core/issues/793)) ([70697a3](https://github.com/googleapis/python-api-core/commit/70697a3e39c389768e724fddacb3c9b97d609384))
-
-
-### Bug Fixes
-
-* Resolve issue where pre-release versions of dependencies are installed ([#808](https://github.com/googleapis/python-api-core/issues/808)) ([1ca7973](https://github.com/googleapis/python-api-core/commit/1ca7973a395099403be1a99c7c4583a8f22d5d8e))
-
-## [2.24.2](https://github.com/googleapis/python-api-core/compare/v2.24.1...v2.24.2) (2025-03-06)
-
-
-### Bug Fixes
-
-* **deps:** Allow protobuf 6.x ([#804](https://github.com/googleapis/python-api-core/issues/804)) ([687be7c](https://github.com/googleapis/python-api-core/commit/687be7cbf629a61feb43ef37d3d920fa32b2d636))
-
-## [2.24.1](https://github.com/googleapis/python-api-core/compare/v2.24.0...v2.24.1) (2025-01-24)
-
-
-### Bug Fixes
-
-* Memory leak in bidi classes ([#770](https://github.com/googleapis/python-api-core/issues/770)) ([c1b8afa](https://github.com/googleapis/python-api-core/commit/c1b8afa4e2abe256e70651defccdc285f104ed19))
-* Resolve the issue where rpc timeout of 0 is used when timeout expires ([#776](https://github.com/googleapis/python-api-core/issues/776)) ([a5604a5](https://github.com/googleapis/python-api-core/commit/a5604a55070c6d92618d078191bf99f4c168d5f6))
-
-
-### Documentation
-
-* Add warnings regarding consuming externally sourced credentials ([#783](https://github.com/googleapis/python-api-core/issues/783)) ([0ec1825](https://github.com/googleapis/python-api-core/commit/0ec18254b90721684679a98bcacef4615467a227))
-
-## [2.24.0](https://github.com/googleapis/python-api-core/compare/v2.23.0...v2.24.0) (2024-12-06)
-
-
-### Features
-
-* Add automatic logging config to support debug logging ([#754](https://github.com/googleapis/python-api-core/issues/754)) ([d18d9b5](https://github.com/googleapis/python-api-core/commit/d18d9b5131162b44eebcc0859a7aca1198a2ac06))
-* Update recognized logging fields ([#766](https://github.com/googleapis/python-api-core/issues/766)) ([5f80f77](https://github.com/googleapis/python-api-core/commit/5f80f778bc25d878b3187c6138077ad8c6bcd35f))
-
-## [2.23.0](https://github.com/googleapis/python-api-core/compare/v2.22.0...v2.23.0) (2024-11-11)
-
-
-### Features
-
-* Migrate to pyproject.toml ([#736](https://github.com/googleapis/python-api-core/issues/736)) ([159e9a4](https://github.com/googleapis/python-api-core/commit/159e9a49525937f18a55c38136aae32575424d55))
-
-## [2.22.0](https://github.com/googleapis/python-api-core/compare/v2.21.0...v2.22.0) (2024-10-25)
-
-
-### Features
-
-* Add support for python 3.13 ([#696](https://github.com/googleapis/python-api-core/issues/696)) ([46b3d3a](https://github.com/googleapis/python-api-core/commit/46b3d3abaa1bae28e9d788d7c3006224cd6f74d5))
-
-
-### Bug Fixes
-
-* Add type hints to ClientOptions ([#735](https://github.com/googleapis/python-api-core/issues/735)) ([b91ed19](https://github.com/googleapis/python-api-core/commit/b91ed19210148dfa49ec790c4dd5f4a7bff80954))
-* Improve `Any` decode error ([#712](https://github.com/googleapis/python-api-core/issues/712)) ([0d5ed37](https://github.com/googleapis/python-api-core/commit/0d5ed37c96f9b40bccae98e228163a88abeb1763))
-* Require proto-plus >= 1.25.0 for Python 3.13 ([#740](https://github.com/googleapis/python-api-core/issues/740)) ([a26313e](https://github.com/googleapis/python-api-core/commit/a26313e1cb12e44aa498f12622edccc0c83ba0c3))
-* Switch to unittest.mock from mock ([#713](https://github.com/googleapis/python-api-core/issues/713)) ([8c53381](https://github.com/googleapis/python-api-core/commit/8c533819b7e212aa2f1d695a7ce08629f4fb2daf))
-
-## [2.21.0](https://github.com/googleapis/python-api-core/compare/v2.20.0...v2.21.0) (2024-10-07)
-
-
-### Features
-
-* Add support for asynchronous long running operations ([#724](https://github.com/googleapis/python-api-core/issues/724)) ([aaed69b](https://github.com/googleapis/python-api-core/commit/aaed69b6f1d694cd7e561e2aa03fdd8d6cfb369a))
-
-
-### Bug Fixes
-
-* Set chunk size for async stream content ([#702](https://github.com/googleapis/python-api-core/issues/702)) ([45b8a6d](https://github.com/googleapis/python-api-core/commit/45b8a6db5a5c75acdd8be896d0152f11608c7e51))
-
-## [2.20.0](https://github.com/googleapis/python-api-core/compare/v2.19.2...v2.20.0) (2024-09-18)
-
-
-### Features
-
-* Add async unsupported paramater exception ([#694](https://github.com/googleapis/python-api-core/issues/694)) ([8c137fe](https://github.com/googleapis/python-api-core/commit/8c137feb6e880fdd93d1248d9b6c10002dc3c096))
-* Add support for asynchronous rest streaming ([#686](https://github.com/googleapis/python-api-core/issues/686)) ([1b7bb6d](https://github.com/googleapis/python-api-core/commit/1b7bb6d1b721e4ee1561e8e4a347846d7fdd7c27))
-* Add support for creating exceptions from an asynchronous response ([#688](https://github.com/googleapis/python-api-core/issues/688)) ([1c4b0d0](https://github.com/googleapis/python-api-core/commit/1c4b0d079f2103a7b5562371a7bd1ada92528de3))
-
-## [2.19.2](https://github.com/googleapis/python-api-core/compare/v2.19.1...v2.19.2) (2024-08-16)
-
-
-### Bug Fixes
-
-* Fail gracefully if could not import `rpc_status` module ([#680](https://github.com/googleapis/python-api-core/issues/680)) ([7ccbf57](https://github.com/googleapis/python-api-core/commit/7ccbf5738fa236649f9a155055c71789362b5c4c))
-
-## [2.19.1](https://github.com/googleapis/python-api-core/compare/v2.19.0...v2.19.1) (2024-06-19)
-
-
-### Bug Fixes
-
-* Add support for protobuf 5.x ([#644](https://github.com/googleapis/python-api-core/issues/644)) ([fda0ca6](https://github.com/googleapis/python-api-core/commit/fda0ca6f0664ac5044671591ed62618175a7393f))
-* Ignore unknown fields in rest streaming. ([#651](https://github.com/googleapis/python-api-core/issues/651)) ([1203fb9](https://github.com/googleapis/python-api-core/commit/1203fb97d2685535f89113e944c4764c1deb595e))
-
-## [2.19.0](https://github.com/googleapis/python-api-core/compare/v2.18.0...v2.19.0) (2024-04-29)
-
-
-### Features
-
-* Add google.api_core.version_header ([#638](https://github.com/googleapis/python-api-core/issues/638)) ([a7b53e9](https://github.com/googleapis/python-api-core/commit/a7b53e9e9a7deb88baf92a2827958429e3677069))
-
-## [2.18.0](https://github.com/googleapis/python-api-core/compare/v2.17.1...v2.18.0) (2024-03-20)
-
-
-### Features
-
-* Add common logic for supporting universe domain ([#621](https://github.com/googleapis/python-api-core/issues/621)) ([94f2ca3](https://github.com/googleapis/python-api-core/commit/94f2ca3b4d094e6e10154634d3463d07ebea2035))
-
-
-### Bug Fixes
-
-* Add _registered_method to grpc ChannelStub ([#614](https://github.com/googleapis/python-api-core/issues/614)) ([5eaaea8](https://github.com/googleapis/python-api-core/commit/5eaaea8a989f8bdbdb5fbc95a155a20837c87f42))
-* **deps:** Require proto-plus >= 1.22.3 ([#626](https://github.com/googleapis/python-api-core/issues/626)) ([4fed37c](https://github.com/googleapis/python-api-core/commit/4fed37cbc32122f156e38250b5fa8b2b08a787a1))
-
-## [2.17.1](https://github.com/googleapis/python-api-core/compare/v2.17.0...v2.17.1) (2024-02-13)
-
-
-### Bug Fixes
-
-* Resolve issue handling protobuf responses in rest streaming ([#604](https://github.com/googleapis/python-api-core/issues/604)) ([bcebc92](https://github.com/googleapis/python-api-core/commit/bcebc92eca69dae81c5e546d526c92b164a6b3b4))
-
-## [2.17.0](https://github.com/googleapis/python-api-core/compare/v2.16.2...v2.17.0) (2024-02-06)
-
-
-### Features
-
-* Add attempt_direct_path argument to create_channel ([#583](https://github.com/googleapis/python-api-core/issues/583)) ([94726e7](https://github.com/googleapis/python-api-core/commit/94726e739698035b00667983f854c600252abd28))
-
-
-### Bug Fixes
-
-* Retry constructors methods support None ([#592](https://github.com/googleapis/python-api-core/issues/592)) ([416203c](https://github.com/googleapis/python-api-core/commit/416203c1888934670bfeccafe5f5469f87314512))
-
-## [2.16.2](https://github.com/googleapis/python-api-core/compare/v2.16.1...v2.16.2) (2024-02-02)
-
-
-### Bug Fixes
-
-* Spelling error `a,out` -> `amount` ([#596](https://github.com/googleapis/python-api-core/issues/596)) ([88688b1](https://github.com/googleapis/python-api-core/commit/88688b1625c4dab0df6124a0560f550eb322500f))
-
-## [2.16.1](https://github.com/googleapis/python-api-core/compare/v2.16.0...v2.16.1) (2024-01-30)
-
-
-### Bug Fixes
-
-* Fix broken import for google.api_core.retry_async.AsyncRetry ([#587](https://github.com/googleapis/python-api-core/issues/587)) ([ac012c0](https://github.com/googleapis/python-api-core/commit/ac012c04c69b8bbe72962f0d0d9e9536c0b4a524))
-
-## [2.16.0](https://github.com/googleapis/python-api-core/compare/v2.15.0...v2.16.0) (2024-01-29)
-
-
-### Features
-
-* Retry and retry_async support streaming rpcs ([#495](https://github.com/googleapis/python-api-core/issues/495)) ([17ff5f1](https://github.com/googleapis/python-api-core/commit/17ff5f1d83a9a6f50a0226fb0e794634bd584f17))
-
-## [2.15.0](https://github.com/googleapis/python-api-core/compare/v2.14.0...v2.15.0) (2023-12-07)
-
-
-### Features
-
-* Add support for Python 3.12 ([#557](https://github.com/googleapis/python-api-core/issues/557)) ([091b4f1](https://github.com/googleapis/python-api-core/commit/091b4f1c7fcc59c3f2a02ee44fd3c30b78423f12))
-* Add type annotations to wrapped grpc calls ([#554](https://github.com/googleapis/python-api-core/issues/554)) ([fc12b40](https://github.com/googleapis/python-api-core/commit/fc12b40bfc6e0c4bb313196e2e3a9c9374ce1c45))
-* Add universe_domain argument to ClientOptions ([3069ef4](https://github.com/googleapis/python-api-core/commit/3069ef4b9123ddb64841cbb7bbb183b53d502e0a))
-* Introduce compatibility with native namespace packages ([#561](https://github.com/googleapis/python-api-core/issues/561)) ([bd82827](https://github.com/googleapis/python-api-core/commit/bd82827108f1eeb6c05cfacf6c044b2afacc18a2))
-
-
-### Bug Fixes
-
-* Fix regression in `bidi` causing `Thread-ConsumeBidirectionalStream caught unexpected exception and will exit` ([#562](https://github.com/googleapis/python-api-core/issues/562)) ([40c8ae0](https://github.com/googleapis/python-api-core/commit/40c8ae0cf1f797e31e106461164e22db4fb2d3d9))
-* Replace deprecated `datetime.datetime.utcnow()` ([#552](https://github.com/googleapis/python-api-core/issues/552)) ([448923a](https://github.com/googleapis/python-api-core/commit/448923acf277a70e8704c949311bf4feaef8cab6)), closes [#540](https://github.com/googleapis/python-api-core/issues/540)
-
-## [2.14.0](https://github.com/googleapis/python-api-core/compare/v2.13.1...v2.14.0) (2023-11-09)
-
-
-### Features
-
-* Support with_call for wrapped rpcs ([#550](https://github.com/googleapis/python-api-core/issues/550)) ([01a57a7](https://github.com/googleapis/python-api-core/commit/01a57a745f4c8345c9c93412c27dd416b49f5953))
-
-## [2.13.1](https://github.com/googleapis/python-api-core/compare/v2.13.0...v2.13.1) (2023-11-09)
-
-
-### Bug Fixes
-
-* Update async client to use async retry ([#544](https://github.com/googleapis/python-api-core/issues/544)) ([f21bb32](https://github.com/googleapis/python-api-core/commit/f21bb32b8e6310116a642a6e6b6dd8e44e30e656))
-
-## [2.13.0](https://github.com/googleapis/python-api-core/compare/v2.12.0...v2.13.0) (2023-11-03)
-
-
-### Features
-
-* Add caching to routing header calculation ([#526](https://github.com/googleapis/python-api-core/issues/526)) ([6251eab](https://github.com/googleapis/python-api-core/commit/6251eab3fca5f7e509cb9b6e476ce1184094b711))
-
-
-### Bug Fixes
-
-* Add warning to retry target to avoid incorrect usage ([#543](https://github.com/googleapis/python-api-core/issues/543)) ([bfb40e6](https://github.com/googleapis/python-api-core/commit/bfb40e6929ef47be7a6464d2f1e0d06595736b8d))
-* Drop usage of distutils ([#541](https://github.com/googleapis/python-api-core/issues/541)) ([4bd9e10](https://github.com/googleapis/python-api-core/commit/4bd9e10f20eea227c88e3e1496010cca6dd8a270))
-* Ensure exception is available when BackgroundConsumer open stream fails ([#357](https://github.com/googleapis/python-api-core/issues/357)) ([405272c](https://github.com/googleapis/python-api-core/commit/405272c05f8c6d20e242c6172b01f78f0fd3bf32))
-
-## [2.12.0](https://github.com/googleapis/python-api-core/compare/v2.11.1...v2.12.0) (2023-09-07)
-
-
-### Features
-
-* Add a little bit of typing to google.api_core.retry ([#453](https://github.com/googleapis/python-api-core/issues/453)) ([2477ab9](https://github.com/googleapis/python-api-core/commit/2477ab9ea5c2e863a493fb7ebebaa429a44ea096))
-* Add grpc Compression argument to channels and methods ([#451](https://github.com/googleapis/python-api-core/issues/451)) ([bdebd63](https://github.com/googleapis/python-api-core/commit/bdebd6331f9c0d3d1a8ceaf274f07d2ed75bfe92))
-
-
-### Documentation
-
-* Fix a typo in google/api_core/page_iterator.py ([#511](https://github.com/googleapis/python-api-core/issues/511)) ([c0ce73c](https://github.com/googleapis/python-api-core/commit/c0ce73c4de53ad694fe36d17408998aa1230398f))
-
-## [2.11.1](https://github.com/googleapis/python-api-core/compare/v2.11.0...v2.11.1) (2023-06-12)
-
-
-### Bug Fixes
-
-* Add actionable errors for GCE long running operations ([#498](https://github.com/googleapis/python-api-core/issues/498)) ([7dfc3a7](https://github.com/googleapis/python-api-core/commit/7dfc3a7a439243f05238a11b68a31720fde1769e))
-* Invalid `dev` version identifiers in `setup.py` ([#505](https://github.com/googleapis/python-api-core/issues/505)) ([8844edb](https://github.com/googleapis/python-api-core/commit/8844edb1e802040810918a12bc9ff89104da38d4))
-
-## [2.11.0](https://github.com/googleapis/python-api-core/compare/v2.10.2...v2.11.0) (2022-11-10)
-
-
-### Features
-
-* Add support for Python 3.11 ([#466](https://github.com/googleapis/python-api-core/issues/466)) ([ff379e3](https://github.com/googleapis/python-api-core/commit/ff379e304c353bcab734e1c4706b74b356a1e932))
-* Allow representing enums with their unqualified symbolic names in headers ([#465](https://github.com/googleapis/python-api-core/issues/465)) ([522b98e](https://github.com/googleapis/python-api-core/commit/522b98ecc1ebd1c2280d3d7c73a02f6e4fb528d4))
-
-
-### Bug Fixes
-
-* Major refactoring of Polling, Retry and Timeout logic ([#462](https://github.com/googleapis/python-api-core/issues/462)) ([434253d](https://github.com/googleapis/python-api-core/commit/434253de16d9efdf984ddb64c409706cda1d5f82))
-* Require google-auth >= 2.14.1 ([#463](https://github.com/googleapis/python-api-core/issues/463)) ([7cc329f](https://github.com/googleapis/python-api-core/commit/7cc329fe1498b0a4285123448e4ea80c6a780d47))
-
-## [2.10.2](https://github.com/googleapis/python-api-core/compare/v2.10.1...v2.10.2) (2022-10-08)
-
-
-### Bug Fixes
-
-* **deps:** Allow protobuf 3.19.5 ([#459](https://github.com/googleapis/python-api-core/issues/459)) ([e949364](https://github.com/googleapis/python-api-core/commit/e949364ce3a2c4c3cdb2658054d4793aa942d999))
-
-## [2.10.1](https://github.com/googleapis/python-api-core/compare/v2.10.0...v2.10.1) (2022-09-14)
-
-
-### Bug Fixes
-
-* Improve transcoding error message ([#442](https://github.com/googleapis/python-api-core/issues/442)) ([538df80](https://github.com/googleapis/python-api-core/commit/538df80ed6d21f43b512a73853935f7a7b9bdf52))
-
-## [2.10.0](https://github.com/googleapis/python-api-core/compare/v2.9.0...v2.10.0) (2022-09-02)
-
-
-### Features
-
-* Add 'strict' to flatten_query_params to lower-case bools ([#433](https://github.com/googleapis/python-api-core/issues/433)) ([83678e9](https://github.com/googleapis/python-api-core/commit/83678e94e1081f9087b19c43f26fad4774184d66))
-
-## [2.9.0](https://github.com/googleapis/python-api-core/compare/v2.8.2...v2.9.0) (2022-09-01)
-
-
-### Features
-
-* Make grpc transcode logic work in terms of protobuf python objects ([#428](https://github.com/googleapis/python-api-core/issues/428)) ([c3ad8ea](https://github.com/googleapis/python-api-core/commit/c3ad8ea67447e3d8a1154d7a9221e116f60d425a))
-
-
-### Bug Fixes
-
-* Require python 3.7+ ([#410](https://github.com/googleapis/python-api-core/issues/410)) ([7ddb8c0](https://github.com/googleapis/python-api-core/commit/7ddb8c00e6be7ab6905a9a802ad1c3063fbfa46c))
-* Restore support for grpcio-gcp ([#418](https://github.com/googleapis/python-api-core/issues/418)) ([8c19609](https://github.com/googleapis/python-api-core/commit/8c19609d6244930bd91fd5f40ef9b5b65584c4a5))
-
-## [2.8.2](https://github.com/googleapis/python-api-core/compare/v2.8.1...v2.8.2) (2022-06-13)
-
-
-### Bug Fixes
-
-* **deps:** allow protobuf < 5.0.0 ([#400](https://github.com/googleapis/python-api-core/issues/400)) ([8f73d2e](https://github.com/googleapis/python-api-core/commit/8f73d2ee2d3af2201f877aa7e2f7361147759dc7))
-* drop support for grpc-gcp ([#401](https://github.com/googleapis/python-api-core/issues/401)) ([5da6733](https://github.com/googleapis/python-api-core/commit/5da6733a475c436efc11b14889af73b3a0e20379))
-
-
-### Documentation
-
-* fix changelog header to consistent size ([#394](https://github.com/googleapis/python-api-core/issues/394)) ([ac266e9](https://github.com/googleapis/python-api-core/commit/ac266e935bc4e7c6dff250384407e7a60d8dba90))
-* Fix typo in the BackgroundConsumer docstring ([#395](https://github.com/googleapis/python-api-core/issues/395)) ([0eb727f](https://github.com/googleapis/python-api-core/commit/0eb727f92314db3c4383754514f75a49ba02e27b))
-
-## [2.8.1](https://github.com/googleapis/python-api-core/compare/v2.8.0...v2.8.1) (2022-05-26)
-
-
-### Bug Fixes
-
-* **deps:** require googleapis-common-protos >= 1.56.2 ([d84d66c](https://github.com/googleapis/python-api-core/commit/d84d66c2a4107f5f9a20c53e870a27fb1250ea3d))
-* **deps:** require protobuf>= 3.15.0, <4.0.0dev ([#385](https://github.com/googleapis/python-api-core/issues/385)) ([d84d66c](https://github.com/googleapis/python-api-core/commit/d84d66c2a4107f5f9a20c53e870a27fb1250ea3d))
-
-## [2.8.0](https://github.com/googleapis/python-api-core/compare/v2.7.3...v2.8.0) (2022-05-18)
-
-
-### Features
-
-* adds support for audience in client_options ([#379](https://github.com/googleapis/python-api-core/issues/379)) ([c97c498](https://github.com/googleapis/python-api-core/commit/c97c4980125a86f384cdf12720df7bb1a2adf9d2))
-* adds support for audience in client_options. ([c97c498](https://github.com/googleapis/python-api-core/commit/c97c4980125a86f384cdf12720df7bb1a2adf9d2))
-
-## [2.7.3](https://github.com/googleapis/python-api-core/compare/v2.7.2...v2.7.3) (2022-04-29)
-
-
-### Bug Fixes
-
-* Avoid AttributeError if grpcio-status is not installed ([#370](https://github.com/googleapis/python-api-core/issues/370)) ([022add1](https://github.com/googleapis/python-api-core/commit/022add16266f9c07f0f88eea13472cc2e0bfc991))
-
-## [2.7.2](https://github.com/googleapis/python-api-core/compare/v2.7.1...v2.7.2) (2022-04-13)
-
-
-### Bug Fixes
-
-* allow grpc without grpcio-status ([#355](https://github.com/googleapis/python-api-core/issues/355)) ([112049e](https://github.com/googleapis/python-api-core/commit/112049e79f5a5b0a989d85d438a1bd29485f46f7))
-* remove dependency on pkg_resources ([#361](https://github.com/googleapis/python-api-core/issues/361)) ([523dbd0](https://github.com/googleapis/python-api-core/commit/523dbd0b10d37ffcf83fa751f0bad313f162abf1))
-
-## [2.7.1](https://github.com/googleapis/python-api-core/compare/v2.7.0...v2.7.1) (2022-03-09)
-
-
-### Bug Fixes
-
-* add more context to error message. ([#340](https://github.com/googleapis/python-api-core/issues/340)) ([0680fb4](https://github.com/googleapis/python-api-core/commit/0680fb4d3e013fe2de27e0a2ae2cd9896479e596))
-
-## [2.7.0](https://github.com/googleapis/python-api-core/compare/v2.6.1...v2.7.0) (2022-03-08)
-
-
-### Features
-
-* expose extra fields in ExtendedOperation ([#351](https://github.com/googleapis/python-api-core/issues/351)) ([9abc6f4](https://github.com/googleapis/python-api-core/commit/9abc6f48f23c87b9771dca3c96b4f6af39620a50))
-
-## [2.6.1](https://github.com/googleapis/python-api-core/compare/v2.6.0...v2.6.1) (2022-03-05)
-
-
-### Bug Fixes
-
-* Remove py2 tag from wheel ([#343](https://github.com/googleapis/python-api-core/issues/343)) ([7e21e9e](https://github.com/googleapis/python-api-core/commit/7e21e9e34892472a34f9b44175fa761f0e3fd9ed))
-
-## [2.6.0](https://github.com/googleapis/python-api-core/compare/v2.5.0...v2.6.0) (2022-03-03)
-
-
-### Features
-
-* initial support for Extended Operations ([#344](https://github.com/googleapis/python-api-core/issues/344)) ([021bb7d](https://github.com/googleapis/python-api-core/commit/021bb7d5bf0a1d8ac58dbf0c738fac309135ba7d))
-
-## [2.5.0](https://github.com/googleapis/python-api-core/compare/v2.4.0...v2.5.0) (2022-02-02)
-
-
-### Features
-
-* add api_key to client options ([#248](https://github.com/googleapis/python-api-core/issues/248)) ([5e5ad37](https://github.com/googleapis/python-api-core/commit/5e5ad37b8161109d65b0fab43636f7424e570fa3))
-
-
-### Bug Fixes
-
-* **deps:** remove setuptools from dependencies ([#339](https://github.com/googleapis/python-api-core/issues/339)) ([c782f29](https://github.com/googleapis/python-api-core/commit/c782f294b50b078f01959627fb82aa4c5efec333))
-
-
-### Documentation
-
-* fix typo in library name ([#332](https://github.com/googleapis/python-api-core/issues/332)) ([f267111](https://github.com/googleapis/python-api-core/commit/f267111823545a6c67ef5f10b85cd8c2fab8a612))
-
-## [2.4.0](https://www.github.com/googleapis/python-api-core/compare/v2.3.2...v2.4.0) (2022-01-11)
-
-
-### Features
-
-* add support for 'error_info' ([#315](https://www.github.com/googleapis/python-api-core/issues/315)) ([cc46aa6](https://www.github.com/googleapis/python-api-core/commit/cc46aa68ec184871330d16a6c767f57a4f0eb633))
-* iterator for processing JSON responses in REST streaming. ([#317](https://www.github.com/googleapis/python-api-core/issues/317)) ([f9f2696](https://www.github.com/googleapis/python-api-core/commit/f9f26969842b456ea372bed941d712b7a9ab7239))
-
-## [2.3.2](https://www.github.com/googleapis/python-api-core/compare/v2.3.1...v2.3.2) (2021-12-16)
-
-
-### Bug Fixes
-
-* address broken wheels in version 2.3.1
-
-## [2.3.1](https://www.github.com/googleapis/python-api-core/compare/v2.3.0...v2.3.1) (2021-12-15)
-
-
-### Bug Fixes
-* exclude function target from retry deadline exceeded exception message ([#318](https://www.github.com/googleapis/python-api-core/issues/318)) ([34ebdcc](https://www.github.com/googleapis/python-api-core/commit/34ebdcc251d4f3d7d496e8e0b78847645a06650b))
-
-## [2.3.0](https://www.github.com/googleapis/python-api-core/compare/v2.2.2...v2.3.0) (2021-11-25)
-
-
-### Features
-
-* add operations rest client to support long-running operations. ([#311](https://www.github.com/googleapis/python-api-core/issues/311)) ([ce1adf3](https://www.github.com/googleapis/python-api-core/commit/ce1adf395982ede157c0f25a920946bb52789873))
-
-
-### Bug Fixes
-
-* handle bare 'grpc.Call' in 'from_grpc_error' ([#298](https://www.github.com/googleapis/python-api-core/issues/298)) ([060b339](https://www.github.com/googleapis/python-api-core/commit/060b339e3af296dd1772bfc1b4a0d2b4264cae1f))
-
-## [2.2.2](https://www.github.com/googleapis/python-api-core/compare/v2.2.1...v2.2.2) (2021-11-02)
-
-
-### Bug Fixes
-
-* make 'gapic_v1.method.DEFAULT' a typed object ([#292](https://www.github.com/googleapis/python-api-core/issues/292)) ([ffc51f0](https://www.github.com/googleapis/python-api-core/commit/ffc51f03c7ce5d9f009ba859b8df385d52925578))
-
-## [2.2.1](https://www.github.com/googleapis/python-api-core/compare/v2.2.0...v2.2.1) (2021-10-26)
-
-
-### Bug Fixes
-
-* revert "fix: do not error on LROs with no response or error" ([#294](https://www.github.com/googleapis/python-api-core/issues/294)) ([9e6091e](https://www.github.com/googleapis/python-api-core/commit/9e6091ee59a30e72a6278b369f6a08e7aef32f22))
-
-## [2.2.0](https://www.github.com/googleapis/python-api-core/compare/v2.1.1...v2.2.0) (2021-10-25)
-
-
-### Features
-
-* add 'GoogleAPICallError.error_details' property ([#286](https://www.github.com/googleapis/python-api-core/issues/286)) ([ef6f0fc](https://www.github.com/googleapis/python-api-core/commit/ef6f0fcfdfe771172056e35e3c990998b3b00416))
-
-## [2.1.1](https://www.github.com/googleapis/python-api-core/compare/v2.1.0...v2.1.1) (2021-10-13)
-
-
-### Bug Fixes
-
-* add mypy checking + 'py.typed' file ([#290](https://www.github.com/googleapis/python-api-core/issues/290)) ([0023ee1](https://www.github.com/googleapis/python-api-core/commit/0023ee1fe0e8b80c7a9e8987e0f322a829e5d613))
-
-## [2.1.0](https://www.github.com/googleapis/python-api-core/compare/v2.0.1...v2.1.0) (2021-10-05)
-
-
-### Features
-
-* add grpc transcoding + tests ([#259](https://www.github.com/googleapis/python-api-core/issues/259)) ([afe0fa1](https://www.github.com/googleapis/python-api-core/commit/afe0fa14c21289c8244606a9f81544cff8ac5f7c))
-* Add helper function to format query_params for rest transport. ([#275](https://www.github.com/googleapis/python-api-core/issues/275)) ([1c5eb4d](https://www.github.com/googleapis/python-api-core/commit/1c5eb4df93d78e791082d9282330ebf0faacd222))
-* add support for Python 3.10 ([#284](https://www.github.com/googleapis/python-api-core/issues/284)) ([a422a5d](https://www.github.com/googleapis/python-api-core/commit/a422a5d72cb6f363d57e7a4effe421ba8e049cde))
-
-## [2.0.1](https://www.github.com/googleapis/python-api-core/compare/v2.0.0...v2.0.1) (2021-08-31)
-
-
-### Bug Fixes
-
-* do not error on LROs with no response or error ([#258](https://www.github.com/googleapis/python-api-core/issues/258)) ([618f192](https://www.github.com/googleapis/python-api-core/commit/618f19201af729205892fcecd9c8e315ba3174a3))
-
-## [2.0.0](https://www.github.com/googleapis/python-api-core/compare/v2.0.0-b1...v2.0.0) (2021-08-18)
-
-### ⚠ BREAKING CHANGES
-
-* drop support for Python 2.7 / 3.5 ([#212](https://www.github.com/googleapis/python-api-core/issues/212)) ([a30f004](https://www.github.com/googleapis/python-api-core/commit/a30f004e74f709d46e905dd819c71f43354e9ac9))
-
-### Bug Fixes
-
-* bump grpcio version to use stable aio API ([#234](https://www.github.com/googleapis/python-api-core/issues/234)) ([bdbf889](https://www.github.com/googleapis/python-api-core/commit/bdbf889210b709d7c1945f2160bcba9161b4dd2e))
-* strip trailing _ from field mask paths ([#228](https://www.github.com/googleapis/python-api-core/issues/228)) ([ff6ef1b](https://www.github.com/googleapis/python-api-core/commit/ff6ef1bd07fa68307b7c82c910416d770e7b3416))
-
-## [2.0.0b1](https://www.github.com/googleapis/python-api-core/compare/v1.31.1...v2.0.0b1) (2021-08-03)
-
-
-### ⚠ BREAKING CHANGES
-
-* drop support for Python 2.7 / 3.5 ([#212](https://www.github.com/googleapis/python-api-core/issues/212)) ([a30f004](https://www.github.com/googleapis/python-api-core/commit/a30f004e74f709d46e905dd819c71f43354e9ac9))
-
-### Bug Fixes
-
-* strip trailing _ from field mask paths ([#228](https://www.github.com/googleapis/python-api-core/issues/228)) ([ff6ef1b](https://www.github.com/googleapis/python-api-core/commit/ff6ef1bd07fa68307b7c82c910416d770e7b3416))
-
-## [1.31.1](https://www.github.com/googleapis/python-api-core/compare/v1.31.0...v1.31.1) (2021-07-26)
-
-
-### Bug Fixes
-
-* add 'requests.exceptions.ChunkedEncodingError' to retryable exceptions ([#237](https://www.github.com/googleapis/python-api-core/issues/237)) ([5e540f2](https://www.github.com/googleapis/python-api-core/commit/5e540f28493cc3e13260458a8d1c6a1abb2ed313))
-
-
-### Documentation
-
-* add Samples section to CONTRIBUTING.rst ([#229](https://www.github.com/googleapis/python-api-core/issues/229)) ([a12c051](https://www.github.com/googleapis/python-api-core/commit/a12c0516c42918b05415835029717814353b883b))
-
-## [1.31.0](https://www.github.com/googleapis/python-api-core/compare/v1.30.0...v1.31.0) (2021-07-07)
-
-
-### Features
-
-* add ServiceUnavailable exception to polling retries ([#184](https://www.github.com/googleapis/python-api-core/issues/184)) ([11032cf](https://www.github.com/googleapis/python-api-core/commit/11032cf08ecc16dd252a6cda8b33b0b28ec4f4ba))
-
-
-### Bug Fixes
-
-* undprecate entity factory helpers ([#101](https://www.github.com/googleapis/python-api-core/issues/101)) ([1fbee03](https://www.github.com/googleapis/python-api-core/commit/1fbee03495a136eef3d6aaa5ea0aadd6e4b58e8b)), closes [#100](https://www.github.com/googleapis/python-api-core/issues/100)
-
-## [1.30.0](https://www.github.com/googleapis/python-api-core/compare/v1.29.0...v1.30.0) (2021-06-08)
-
-
-### Features
-
-* add iterator capability to paged iterators ([#200](https://www.github.com/googleapis/python-api-core/issues/200)) ([3487d68](https://www.github.com/googleapis/python-api-core/commit/3487d68bdab6f20e2ab931c8283f63c94862cf31))
-
-## [1.29.0](https://www.github.com/googleapis/python-api-core/compare/v1.28.0...v1.29.0) (2021-06-02)
-
-
-### Features
-
-* HTTPIterator now accepts a page_size parameter to control page … ([#197](https://www.github.com/googleapis/python-api-core/issues/197)) ([a421913](https://www.github.com/googleapis/python-api-core/commit/a4219137a5bfcf2a6f44780ecdbf475c1129e461))
-
-
-### Documentation
-
-* fix broken links in multiprocessing.rst ([#195](https://www.github.com/googleapis/python-api-core/issues/195)) ([8d8bc51](https://www.github.com/googleapis/python-api-core/commit/8d8bc5150ee5543b4aeb2c271da034a5305d1436))
-
-## [1.28.0](https://www.github.com/googleapis/python-api-core/compare/v1.27.0...v1.28.0) (2021-05-20)
-
-
-### Bug Fixes
-
-* require google-auth>=1.25.0 ([#190](https://www.github.com/googleapis/python-api-core/issues/190)) ([155da5e](https://www.github.com/googleapis/python-api-core/commit/155da5e18cc2fdcfa57de6f956b7d078e79cd4b7))
-
-
-### Miscellaneous Chores
-
-* release 1.28.0 ([#192](https://www.github.com/googleapis/python-api-core/issues/192)) ([11b5da4](https://www.github.com/googleapis/python-api-core/commit/11b5da426a842541ca2b861d3387fc312b3f5b60))
-
-## [1.27.0](https://www.github.com/googleapis/python-api-core/compare/v1.26.3...v1.27.0) (2021-05-18)
-
-
-### Features
-
-* Add support for `rest/` token in `x-goog-api-client` header ([#189](https://www.github.com/googleapis/python-api-core/issues/189)) ([15aca6b](https://www.github.com/googleapis/python-api-core/commit/15aca6b288b2ec5ce0251e442e1dfa7f52e1b124))
-* retry google.auth TransportError and requests ConnectionError ([#178](https://www.github.com/googleapis/python-api-core/issues/178)) ([6ae04a8](https://www.github.com/googleapis/python-api-core/commit/6ae04a8d134fffe13f06081e15f9723c1b2ea334))
-
-## [1.26.3](https://www.github.com/googleapis/python-api-core/compare/v1.26.2...v1.26.3) (2021-03-25)
-
-
-### Bug Fixes
-
-* skip empty policy bindings in `len()` and `iter()` ([#159](https://www.github.com/googleapis/python-api-core/issues/159)) ([9eaa786](https://www.github.com/googleapis/python-api-core/commit/9eaa7868164a7e98792de24d2be97f79fba22322))
-
-
-### Documentation
-
-* update python contributing guide ([#147](https://www.github.com/googleapis/python-api-core/issues/147)) ([1d76b57](https://www.github.com/googleapis/python-api-core/commit/1d76b57d1f218f7885f85dc7c052bad1ad3857ac))
-
-## [1.26.2](https://www.github.com/googleapis/python-api-core/compare/v1.26.1...v1.26.2) (2021-03-23)
-
-
-### Bug Fixes
-
-* save empty IAM policy bindings ([#155](https://www.github.com/googleapis/python-api-core/issues/155)) ([536c2ca](https://www.github.com/googleapis/python-api-core/commit/536c2cad814b8fa8cd346a3d7bd5f6b9889c4a6f))
-
-## [1.26.1](https://www.github.com/googleapis/python-api-core/compare/v1.26.0...v1.26.1) (2021-02-12)
-
-
-### Bug Fixes
-
-* add operation name to x-goog-request-params in async client ([#137](https://www.github.com/googleapis/python-api-core/issues/137)) ([7271b23](https://www.github.com/googleapis/python-api-core/commit/7271b23afddb032e49e957525704d0cd5bfa4c65))
-
-## [1.26.0](https://www.github.com/googleapis/python-api-core/compare/v1.25.1...v1.26.0) (2021-02-08)
-
-
-### Features
-
-* allow default_host and default_scopes to be passed to create_channel ([#134](https://www.github.com/googleapis/python-api-core/issues/134)) ([94c76e0](https://www.github.com/googleapis/python-api-core/commit/94c76e0873e5b2f42331d5b1ad286c1e63b61395))
-
-## [1.25.1](https://www.github.com/googleapis/python-api-core/compare/v1.25.0...v1.25.1) (2021-01-25)
-
-
-### Bug Fixes
-
-* add operation name to x-goog-request-params ([#133](https://www.github.com/googleapis/python-api-core/issues/133)) ([97cef4a](https://www.github.com/googleapis/python-api-core/commit/97cef4ad1db55938715f9ac8000d1b0ad1e71873))
-
-
-### Documentation
-
-* fix spelling errors in retry ([#131](https://www.github.com/googleapis/python-api-core/issues/131)) ([232dab0](https://www.github.com/googleapis/python-api-core/commit/232dab0ad3ef2cca0edfe707d8f90ca0ea200ba2))
-
-## [1.25.0](https://www.github.com/googleapis/python-api-core/compare/v1.24.1...v1.25.0) (2021-01-14)
-
-
-### Features
-
-* allow gRPC metadata to be passed to operations client ([#127](https://www.github.com/googleapis/python-api-core/issues/127)) ([73854e8](https://www.github.com/googleapis/python-api-core/commit/73854e897b885e9be290f2676a8a1466b4f041e4))
-
-
-### Documentation
-
-* **python:** document adding Python 3.9 support, dropping 3.5 support ([#120](https://www.github.com/googleapis/python-api-core/issues/120)) ([b51b7f5](https://www.github.com/googleapis/python-api-core/commit/b51b7f587042fe9340371c1b5c8e9adf8001c43a)), closes [#787](https://www.github.com/googleapis/python-api-core/issues/787)
-
-## [1.24.1](https://www.github.com/googleapis/python-api-core/compare/v1.24.0...v1.24.1) (2020-12-16)
-
-
-### Bug Fixes
-
-* support 'retry' for ops built from HTTP/gRPC responses ([#115](https://www.github.com/googleapis/python-api-core/issues/115)) ([7a38243](https://www.github.com/googleapis/python-api-core/commit/7a38243c351b228d103eee81fc5ae521ad1c930e)), closes [#87](https://www.github.com/googleapis/python-api-core/issues/87)
-
-## [1.24.0](https://www.github.com/googleapis/python-api-core/compare/v1.23.0...v1.24.0) (2020-12-14)
-
-
-### Features
-
-* add support for Python 3.9, drop support for Python 3.5 ([#111](https://www.github.com/googleapis/python-api-core/issues/111)) ([fdbed0f](https://www.github.com/googleapis/python-api-core/commit/fdbed0f0cbae8de21c73338a6817f8aa79cef4c9)), closes [#110](https://www.github.com/googleapis/python-api-core/issues/110)
-
-
-### Documentation
-
-* explain how to create credentials from dict ([#109](https://www.github.com/googleapis/python-api-core/issues/109)) ([5dce6d6](https://www.github.com/googleapis/python-api-core/commit/5dce6d61e7324a415c1b3ceaeec1ce1b5f1ea189))
-
-## [1.23.0](https://www.github.com/googleapis/python-api-core/compare/v1.22.4...v1.23.0) (2020-10-16)
-
-
-### Features
-
-* **api-core:** pass retry from result() to done() ([#9](https://www.github.com/googleapis/python-api-core/issues/9)) ([6623b31](https://www.github.com/googleapis/python-api-core/commit/6623b31a2040b834be808d711fa397dc428f1837))
-
-
-### Bug Fixes
-
-* map LRO errors to library exception types ([#86](https://www.github.com/googleapis/python-api-core/issues/86)) ([a855339](https://www.github.com/googleapis/python-api-core/commit/a85533903c57be4809fe76435e298409e0903931)), closes [#15](https://www.github.com/googleapis/python-api-core/issues/15)
-* harden install to use full paths, and windows separators on windows ([#88](https://www.github.com/googleapis/python-api-core/issues/88)) ([db8e636](https://www.github.com/googleapis/python-api-core/commit/db8e636f545a8872f959e3f403cfec30ffed6c34))
-* update out-of-date comment in exceptions.py ([#93](https://www.github.com/googleapis/python-api-core/issues/93)) ([70ebe42](https://www.github.com/googleapis/python-api-core/commit/70ebe42601b3d088b3421233ef7d8245229b7265))
-
-## [1.22.4](https://www.github.com/googleapis/python-api-core/compare/v1.22.3...v1.22.4) (2020-10-05)
-
-
-### Bug Fixes
-
-* use version.py instead of pkg_resources.get_distribution ([#80](https://www.github.com/googleapis/python-api-core/issues/80)) ([d480d97](https://www.github.com/googleapis/python-api-core/commit/d480d97e41cd6705325b3b649360553a83c23f47))
-
-## [1.22.3](https://www.github.com/googleapis/python-api-core/compare/v1.22.2...v1.22.3) (2020-10-02)
-
-
-### Bug Fixes
-
-* **deps:** require six >= 1.13.0 ([#78](https://www.github.com/googleapis/python-api-core/issues/78)) ([a7a8b98](https://www.github.com/googleapis/python-api-core/commit/a7a8b98602a3eb277fdc607ac69f3bcb147f3351)), closes [/github.com/benjaminp/six/blob/c0be8815d13df45b6ae471c4c436cce8c192245d/CHANGES#L30-L31](https://www.github.com/googleapis//github.com/benjaminp/six/blob/c0be8815d13df45b6ae471c4c436cce8c192245d/CHANGES/issues/L30-L31)
-
-## [1.22.2](https://www.github.com/googleapis/python-api-core/compare/v1.22.1...v1.22.2) (2020-09-03)
-
-
-### Bug Fixes
-
-* only add quota project id if supported ([#75](https://www.github.com/googleapis/python-api-core/issues/75)) ([8f8ee78](https://www.github.com/googleapis/python-api-core/commit/8f8ee7879e4f834f3c676e535ffc41b5b9b2de62))
-
-## [1.22.1](https://www.github.com/googleapis/python-api-core/compare/v1.22.0...v1.22.1) (2020-08-12)
-
-
-### Documentation
-
-* fix spelling errors for amount in retry ([#69](https://www.github.com/googleapis/python-api-core/issues/69)) ([7bb713d](https://www.github.com/googleapis/python-api-core/commit/7bb713d13b1fe3cca58263f5e499136a84abc456))
-
-## [1.22.0](https://www.github.com/googleapis/python-api-core/compare/v1.21.0...v1.22.0) (2020-07-21)
-
-
-### Features
-
-* allow quota project to be passed to create_channel ([#58](https://www.github.com/googleapis/python-api-core/issues/58)) ([e2d9a7b](https://www.github.com/googleapis/python-api-core/commit/e2d9a7b209b7dfab300dc848fabbae8f42a2ab19))
-
-
-### Bug Fixes
-
-* _determine_timeout problem handling float type timeout ([#64](https://www.github.com/googleapis/python-api-core/issues/64)) ([2010373](https://www.github.com/googleapis/python-api-core/commit/2010373b27536d1191175624b297a709d70153fa))
-
-
-### Documentation
-
-* change the documentation for using 'six.moves.collections_abc.Mapping' instead of 'dict' in 'client_options.from_dict()' ([#53](https://www.github.com/googleapis/python-api-core/issues/53)) ([c890675](https://www.github.com/googleapis/python-api-core/commit/c890675dc9ebc084f105be81dc81c048f4f599ea))
-
-## [1.21.0](https://www.github.com/googleapis/python-api-core/compare/v1.20.1...v1.21.0) (2020-06-18)
-
-
-### Features
-
-* allow credentials files to be passed for channel creation ([#50](https://www.github.com/googleapis/python-api-core/issues/50)) ([ded92d0](https://www.github.com/googleapis/python-api-core/commit/ded92d0acdcde4295d0e5df05fda0d83783a3991))
-
-## [1.20.1](https://www.github.com/googleapis/python-api-core/compare/v1.20.0...v1.20.1) (2020-06-16)
-
-
-### Bug Fixes
-
-* **dependencies:** increase protobuf version ([#49](https://www.github.com/googleapis/python-api-core/issues/49)) ([1ba6095](https://www.github.com/googleapis/python-api-core/commit/1ba609592968c9d828449b89a3ade3bcaf5edd7f)), closes [#48](https://www.github.com/googleapis/python-api-core/issues/48)
-
-## [1.20.0](https://www.github.com/googleapis/python-api-core/compare/v1.19.1...v1.20.0) (2020-06-09)
-
-
-### Features
-
-* allow disabling response stream pre-fetch ([#30](https://www.github.com/googleapis/python-api-core/issues/30)) ([74e0b0f](https://www.github.com/googleapis/python-api-core/commit/74e0b0f8387207933c120af15b2bb5d175dd8f84)), closes [#25](https://www.github.com/googleapis/python-api-core/issues/25)
-
-## [1.19.1](https://www.github.com/googleapis/python-api-core/compare/v1.19.0...v1.19.1) (2020-06-06)
-
-
-### Bug Fixes
-
-* bump up grpcio minimum version to 1.29.0 ([#41](https://www.github.com/googleapis/python-api-core/issues/41)) ([4b11422](https://www.github.com/googleapis/python-api-core/commit/4b114221b3ae01eee540bedf47381c3b7c214b0c))
-
-## [1.19.0](https://www.github.com/googleapis/python-api-core/compare/v1.18.0...v1.19.0) (2020-06-05)
-
-
-### Features
-
-* **client_options:** add new client options 'quota_project_id', 'scopes', and 'credentials_file' ([a582936](https://www.github.com/googleapis/python-api-core/commit/a58293601d6da90c499d404e634a979a6cae9708))
-
-## [1.18.0](https://www.github.com/googleapis/python-api-core/compare/v1.17.0...v1.18.0) (2020-06-04)
-
-
-### Features
-
-* [CBT-6 helper] Exposing Retry._deadline as a property ([#20](https://www.github.com/googleapis/python-api-core/issues/20)) ([7be1e59](https://www.github.com/googleapis/python-api-core/commit/7be1e59e9d75c112f346d2b76dce3dd60e3584a1))
-* add client_encryped_cert_source to ClientOptions ([#31](https://www.github.com/googleapis/python-api-core/issues/31)) ([e4eaec0](https://www.github.com/googleapis/python-api-core/commit/e4eaec0ff255114138d3715280f86d34d861a6fa))
-* AsyncIO Integration [Part 2] ([#28](https://www.github.com/googleapis/python-api-core/issues/28)) ([dd9b2f3](https://www.github.com/googleapis/python-api-core/commit/dd9b2f38a70e85952cc05552ec8070cdf29ddbb4)), closes [#23](https://www.github.com/googleapis/python-api-core/issues/23)
-* First batch of AIO integration ([#26](https://www.github.com/googleapis/python-api-core/issues/26)) ([a82f289](https://www.github.com/googleapis/python-api-core/commit/a82f2892b8f219b82e120e6ed9f4070869c28be7))
-* third batch of AsyncIO integration ([#29](https://www.github.com/googleapis/python-api-core/issues/29)) ([7d8d580](https://www.github.com/googleapis/python-api-core/commit/7d8d58075a92e93662747d36a2d55b5e9f0943e1))
-
-## [1.17.0](https://www.github.com/googleapis/python-api-core/compare/v1.16.0...v1.17.0) (2020-04-14)
-
-
-### Features
-
-* **api_core:** add retry param into PollingFuture() and it's inheritors ([#9923](https://www.github.com/googleapis/python-api-core/issues/9923)) ([14f1f34](https://www.github.com/googleapis/python-api-core/commit/14f1f34e013c90fed2da2918625083d299fda557)), closes [#6197](https://www.github.com/googleapis/python-api-core/issues/6197)
-* **api-core:** add client_cert_source to ClientOptions ([#17](https://www.github.com/googleapis/python-api-core/issues/17)) ([748c935](https://www.github.com/googleapis/python-api-core/commit/748c935d4cf03a1f04fba9139c3c3150fd694d88))
-
-
-### Bug Fixes
-
-* consume part of StreamingResponseIterator to support failure while under a retry context ([#10206](https://www.github.com/googleapis/python-api-core/issues/10206)) ([2b103b6](https://www.github.com/googleapis/python-api-core/commit/2b103b60ece16a1e1bc98cfda7ec375191a90f75))
-
-## 1.16.0
-
-01-13-2020 14:19 PST
-
-### New Features
-
-- feat(storage): support optionsRequestedPolicyVersion ([#9989](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/9989))
-- feat(api_core): support version 3 policy bindings ([#9869](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/9869))
-
-## 1.15.0
-
-12-16-2019 15:27 PST
-
-### New Features
-- Make the last retry happen at deadline. ([#9873](https://github.com/googleapis/google-cloud-python/pull/9873))
-- Add a repr method for ClientOptions. ([#9849](https://github.com/googleapis/google-cloud-python/pull/9849))
-- Simplify `from_rfc3339` methods. ([#9641](https://github.com/googleapis/google-cloud-python/pull/9641))
-- Provide a `raw_page` field for `page_iterator.Page`. ([#9486](https://github.com/googleapis/google-cloud-python/pull/9486))
-
-### Documentation
-- Add Python 2 sunset banner to documentation. ([#9036](https://github.com/googleapis/google-cloud-python/pull/9036))
-- Remove references to the old authentication credentials. ([#9456](https://github.com/googleapis/google-cloud-python/pull/9456))
-
-## 1.14.3
-
-10-07-2019 10:35 PDT
-
-
-### Implementation Changes
-- Finalize during close of 'ResumableBidiRpc' ([#9337](https://github.com/googleapis/google-cloud-python/pull/9337))
-- add on_error to Retry.__init__ ([#8892](https://github.com/googleapis/google-cloud-python/pull/8892))
-- Fix race in 'BackgroundConsumer._thread_main'. ([#8883](https://github.com/googleapis/google-cloud-python/pull/8883))
-
-### Documentation
-- Fix intersphinx reference to requests ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294))
-- Fix broken links in docs. ([#9148](https://github.com/googleapis/google-cloud-python/pull/9148))
-- About of time -> amount of time ([#9052](https://github.com/googleapis/google-cloud-python/pull/9052))
-- Remove compatability badges from READMEs. ([#9035](https://github.com/googleapis/google-cloud-python/pull/9035))
-
-### Internal / Testing Changes
-- Remove CI for gh-pages, use googleapis.dev for api_core refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085))
-
-## 1.14.2
-
-07-30-2019 14:08 PDT
-
-
-### Documentation
-- Add client_options documentation. ([#8834](https://github.com/googleapis/google-cloud-python/pull/8834))
-
-## 1.14.1
-
-07-30-2019 12:24 PDT
-
-
-### Implementation Changes
-- Remove error log entry on clean BiDi shutdown. ([#8806](https://github.com/googleapis/google-cloud-python/pull/8806))
-- Forward 'timeout' arg from 'exception' to `_blocking_poll`. ([#8735](https://github.com/googleapis/google-cloud-python/pull/8735))
-
-### Documentation
-- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805))
-- Link to googleapis.dev documentation in READMEs. ([#8705](https://github.com/googleapis/google-cloud-python/pull/8705))
-
-## 1.14.0
-
-07-17-2019 13:16 PDT
-
-
-### New Features
-- Firestore: Add `should_terminate` predicate for clean BiDi shutdown. ([#8650](https://github.com/googleapis/google-cloud-python/pull/8650))
-
-### Dependencies
-- Update pins of 'googleapis-common-protos. ([#8688](https://github.com/googleapis/google-cloud-python/pull/8688))
-
-### Documentation
-- Add compatibility check badges to READMEs. ([#8288](https://github.com/googleapis/google-cloud-python/pull/8288))
-
-### Internal / Testing Changes
-- All: Add docs job to publish to googleapis.dev. ([#8464](https://github.com/googleapis/google-cloud-python/pull/8464))
-
-## 1.13.0
-
-06-24-2019 10:34 PDT
-
-### New Features
-- Add `client_options.ClientOptions` object. ([#8265](https://github.com/googleapis/google-cloud-python/pull/8265))
-
-## 1.12.0
-
-06-18-2019 12:37 PDT
-
-
-### New Features
-- Add Throttling to Bidi Reopening. Mitigates ResumableBidiRpc consuming 100% CPU ([#8193](https://github.com/googleapis/google-cloud-python/pull/8193))
-
-## 1.11.1
-
-05-28-2019 11:19 PDT
-
-
-### Implementation Changes
-- Classify 503 Service Unavailable errors as transient. ([#8182](https://github.com/googleapis/google-cloud-python/pull/8182))
-
-### Dependencies
-- Pin `grpcio < 2.0dev`. ([#8182](https://github.com/googleapis/google-cloud-python/pull/8182))
-
-### Internal / Testing Changes
-- Add parameterized test for `from_rfc3339` with nanos ([#7675](https://github.com/googleapis/google-cloud-python/pull/7675))
-- Unbreak pytype by silencing a false positive. ([#8106](https://github.com/googleapis/google-cloud-python/pull/8106))
-
-## 1.11.0
-
-05-15-2019 10:29 PDT
-
-### New Features
-
-- Refactor 'client_info' support. ([#7849](https://github.com/googleapis/google-cloud-python/pull/7849))
-
-## 1.10.0
-
-04-29-2019 10:12 PDT
-
-### Implementation Changes
-
-- Append leading zeros for nanosecond precision DateTimes
- ([#7663](https://github.com/googleapis/google-cloud-python/pull/7663))
-
-### New Features
-
-- Add `user_agent` property to `ClientInfo`
- ([#7799](https://github.com/googleapis/google-cloud-python/pull/7799))
-
-## 1.9.0
-
-04-05-2019 10:38 PDT
-
-
-### Implementation Changes
-- Allow passing metadata as part of creating a bidi ([#7514](https://github.com/googleapis/google-cloud-python/pull/7514))
-
-### Internal / Testing Changes
-- Update setup.py
-- API Core: specify a pytype output directory in setup.cfg. ([#7639](https://github.com/googleapis/google-cloud-python/pull/7639))
-
-## 1.8.2
-
-03-22-2019 16:27 PDT
-
-
-### Implementation Changes
-- Remove classifier for Python 3.4 for end-of-life. ([#7535](https://github.com/googleapis/google-cloud-python/pull/7535))
-
-### Internal / Testing Changes
-- When re-opening a `ResumableBidiRPC` set `_request_queue_generator` to `None`. ([#7548](https://github.com/googleapis/google-cloud-python/pull/7548))
-
-## 1.8.1
-
-03-12-2019 12:45 PDT
-
-### Implementation Changes
-- Protect the creation of a background thread in BackgroundConsumer and wait on it starting. ([#7499](https://github.com/googleapis/google-cloud-python/pull/7499))
-
-## 1.8.0
-
-02-23-2019 15:46 PST
-
-
-### New Features
-- Add support to unwrap Anys into wrapped pb2 objects. ([#7430](https://github.com/googleapis/google-cloud-python/pull/7430))
-- Add `Operation.deserialize`. ([#7427](https://github.com/googleapis/google-cloud-python/pull/7427))
-
-### Documentation
-- Updated client library documentation URLs. ([#7307](https://github.com/googleapis/google-cloud-python/pull/7307))
-
-### Internal / Testing Changes
-- Fix new lint failure. ([#7382](https://github.com/googleapis/google-cloud-python/pull/7382))
-
-## 1.7.0
-
-12-17-2018 13:56 PST
-
-### New Features
-- Support converting `DatetimeWithNanos` to / from `google.protobuf.timestamp_pb2.Timestamp`. ([#6919](https://github.com/googleapis/google-cloud-python/pull/6919))
-
-### Documentation
-- Document Python 2 deprecation. ([#6910](https://github.com/googleapis/google-cloud-python/pull/6910))
-- Add usage example for `google.api_core.iam.Polcy`. ([#6855](https://github.com/googleapis/google-cloud-python/pull/6855))
-
-### Internal / Testing Changes
-- Work around pytype big for `ABCMeta.register`. ([#6873](https://github.com/googleapis/google-cloud-python/pull/6873))
-
-## 1.6.0
-
-11-30-2018 12:45 PST
-
-
-### Implementation Changes
-- Import stdlib ABCs from 'collections.abc' rather than 'collections'. ([#6451](https://github.com/googleapis/google-cloud-python/pull/6451))
-
-### New Features
-- Move google.cloud.iam (core) to google.api_core.iam ([#6740](https://github.com/googleapis/google-cloud-python/pull/6740))
-- Add bidi support to api_core. ([#6191](https://github.com/googleapis/google-cloud-python/pull/6191))
-
-### Documentation
-- Fix typo ([#6532](https://github.com/googleapis/google-cloud-python/pull/6532))
-
-### Internal / Testing Changes
-- blacken api_core and core ([#6668](https://github.com/googleapis/google-cloud-python/pull/6668))
-
-## 1.5.2
-
-11-09-2018 14:22 PST
-
-
-### Implementation Changes
-- Retry transient errors in 'PollingFuture.result'. ([#6305](https://github.com/googleapis/google-cloud-python/pull/6305))
-
-### Dependencies
-- Remove hyphen from named extra in api_core. ([#6468](https://github.com/googleapis/google-cloud-python/pull/6468))
-- Bump minimum 'api_core' version for all GAPIC libs to 1.4.1. ([#6391](https://github.com/googleapis/google-cloud-python/pull/6391))
-- Avoid broken 'google-common-apis 1.5.4' release. ([#6355](https://github.com/googleapis/google-cloud-python/pull/6355))
-
-## 1.5.1
-
-10-29-2018 13:29 PDT
-
-### Implementation Changes
-- Don't URL-encode slashes in gRPC request headers. ([#6310](https://github.com/googleapis/google-cloud-python/pull/6310))
-
-### Internal / Testing Changes
-- Back out changes from [#6267](https://github.com/googleapis/google-cloud-python/pull/6267) / `api_core-1.6.0a1` release. ([#6328](https://github.com/googleapis/google-cloud-python/pull/6328))
-
-## 1.5.0
-
-### New Features
-- Add bidi, Bidirection Streaming, to api-core ([#6211](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/6211))
-
-### Internal / Testing Changes
-- Use new Nox ([#6175](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/6175))
-
-## 1.4.1
-
-### Dependencies
-- Pin minimum protobuf dependency to 3.4.0. ([#6132](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/6132))
-
-### Internal / Testing Changes
-- Add type-checking via pytype to api_core. ([#6116](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/6116))
-
-## 1.4.0
-
-### Dependencies
-
-- Add support for gRPC connection management (available when using optional grpc_gcp dependency) ([#5553](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5553)) ([#5904](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5904))
-- Update classifiers to drop Python 3.4 and add Python 3.7 ([#5702](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5702))
-
-## 1.3.0
-
-### New Features
-
-- Add protobuf_helpers.field_mask to calculate a field mask from two messages (#5320)
-
-## 1.2.1
-
-### Implementation Changes
-- Make client_info work without gRPC installed. (#5075)
-- Rename `x-goog-header-params` to `x-goog-request-params` (#5495)
-
-## 1.2.0
-
-### Implementation Changes
-- Add close method to grpc Channel (#5333)
-
-### Internal / Testing Changes
-- Fix tests after grpcio update (#5333)
-- Add Test runs for Python 3.7 and remove 3.4 (#5295)
-
-## 1.1.2
-
-### Packaging
-- Update setuptools before packaging (#5265)
-
-## 1.1.1
-
-### Internal / Testing Changes
-- Use `install_requires` for platform dependencies instead of `extras_require` (#4991)
-- Update trove classifer to '5 - Production/Stable'
-
-## 1.1.0
-
-### Interface additions
-
-- Add `datetime_helpers.DatetimeWithNanoSeconds` (#4979)
-
-### Implementation changes
-
-- Use a class to wrap grpc streaming errors instead of monkey-patching (#4995)
-
-## 1.0.0
-
-This is the stable v1.0.0 release of google-api-core for Python. Releases after
-this will not contain breaking changes.
-
-### Interface changes and additions
-
-- Made `api_core.page_iterator.PageIterator.item_to_value` public
-- Added ability to specify retry for `Operation` and `polling.Future`. (#4922)
-
-## 0.1.4
-
-### New Features
-
-- Add `ChannelStub` to `grpc_helpers` for testing gRPC-based clients. (#4705)
-
-### Notable Implementation Changes
-
-- Fix handling of gapic metadata when specified as `None`. (#4701)
-
-## 0.1.3
-
-### Notable Implementation Changes
-
-- Apply scopes to explicitly provided credentials if needed (#4594).
-- Removing `google.api_core.gapic_v1.method.METRICS_METADATA_KEY`. It
- can be accessed via
- `google.api_core.gapic_v1.client_info.METRICS_METADATA_KEY` (#4588).
-
-### Dependencies
-
-- Upgrading to latest `grpcio==1.8.2` (#4642). For details, see
- related gRPC [bug](https://github.com/grpc/grpc/issues/9688)
- and [fix](https://github.com/grpc/grpc/pull/13665).
-
-PyPI: https://pypi.org/project/google-api-core/0.1.3/
-
-## 0.1.2
-
-- Upgrading `concurrent.futures` backport from `>= 3.0.0`
- to `>= 3.2.0` (#4521).
-- Moved `datetime`-related helpers from `google.cloud.core` to
- `google.api_core.datetime_helpers` (#4399).
-- Added missing `client_info` to `gapic_v1/__init__.py`'s
- `__all__` (#4567).
-- Added helpers for routing headers to `gapic_v1` (#4336).
-
-PyPI: https://pypi.org/project/google-api-core/0.1.2/
-
-## 0.1.1
-
-### Dependencies
-
-- Upgrading `grpcio` dependency from `1.2.0, < 1.6dev` to `>= 1.7.0` (#4280)
-
-PyPI: https://pypi.org/project/google-api-core/0.1.1/
-
-## 0.1.0
-
-Initial release
-
-Prior to being separated, this package was developed in `google-cloud-core`, so
-relevant changes from that package are included here.
-
-- Add google.api.core.gapic_v1.config (#4022)
-- Add google.api.core.helpers.grpc_helpers (#4041)
-- Add google.api.core.gapic_v1.method (#4057)
-- Add wrap_with_paging (#4067)
-- Add grpc_helpers.create_channel (#4069)
-- Add DEFAULT sentinel for gapic_v1.method (#4079)
-- Remove `googleapis-common-protos` from deps in non-`core` packages. (#4098)
-- Add google.api.core.operations_v1 (#4081)
-- Fix test assertion in test_wrap_method_with_overriding_retry_deadline (#4131)
-- Add google.api.core.helpers.general_helpers.wraps (#4166)
-- Update Docs with Python Setup Guide (#4187)
-- Move modules in google.api.core.helpers up one level, delete google.api.core.helpers. (#4196)
-- Clarify that PollingFuture timeout is in seconds. (#4201)
-- Add api_core package (#4210)
-- Replace usage of google.api.core with google.api_core (#4221)
-- Add google.api_core.gapic_v2.client_info (#4225)
-- Fix how api_core.operation populates exception errors (#4231)
-- Fix bare except (#4250)
-- Fix parsing of API errors with Unicode err message (#4251)
-- Port gax proto helper methods (#4249)
-- Remove gapic_v1.method.wrap_with_paging (#4257)
-- Add final set of protobuf helpers to api_core (#4259)
-
-PyPI: https://pypi.org/project/google-api-core/0.1.0/
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
deleted file mode 100644
index 039f436..0000000
--- a/CODE_OF_CONDUCT.md
+++ /dev/null
@@ -1,95 +0,0 @@
-<!-- # Generated by synthtool. DO NOT EDIT! !-->
-# Code of Conduct
-
-## Our Pledge
-
-In the interest of fostering an open and welcoming environment, we as
-contributors and maintainers pledge to making participation in our project and
-our community a harassment-free experience for everyone, regardless of age, body
-size, disability, ethnicity, gender identity and expression, level of
-experience, education, socio-economic status, nationality, personal appearance,
-race, religion, or sexual identity and orientation.
-
-## Our Standards
-
-Examples of behavior that contributes to creating a positive environment
-include:
-
-* Using welcoming and inclusive language
-* Being respectful of differing viewpoints and experiences
-* Gracefully accepting constructive criticism
-* Focusing on what is best for the community
-* Showing empathy towards other community members
-
-Examples of unacceptable behavior by participants include:
-
-* The use of sexualized language or imagery and unwelcome sexual attention or
- advances
-* Trolling, insulting/derogatory comments, and personal or political attacks
-* Public or private harassment
-* Publishing others' private information, such as a physical or electronic
- address, without explicit permission
-* Other conduct which could reasonably be considered inappropriate in a
- professional setting
-
-## Our Responsibilities
-
-Project maintainers are responsible for clarifying the standards of acceptable
-behavior and are expected to take appropriate and fair corrective action in
-response to any instances of unacceptable behavior.
-
-Project maintainers have the right and responsibility to remove, edit, or reject
-comments, commits, code, wiki edits, issues, and other contributions that are
-not aligned to this Code of Conduct, or to ban temporarily or permanently any
-contributor for other behaviors that they deem inappropriate, threatening,
-offensive, or harmful.
-
-## Scope
-
-This Code of Conduct applies both within project spaces and in public spaces
-when an individual is representing the project or its community. Examples of
-representing a project or community include using an official project e-mail
-address, posting via an official social media account, or acting as an appointed
-representative at an online or offline event. Representation of a project may be
-further defined and clarified by project maintainers.
-
-This Code of Conduct also applies outside the project spaces when the Project
-Steward has a reasonable belief that an individual's behavior may have a
-negative impact on the project or its community.
-
-## Conflict Resolution
-
-We do not believe that all conflict is bad; healthy debate and disagreement
-often yield positive results. However, it is never okay to be disrespectful or
-to engage in behavior that violates the project’s code of conduct.
-
-If you see someone violating the code of conduct, you are encouraged to address
-the behavior directly with those involved. Many issues can be resolved quickly
-and easily, and this gives people more control over the outcome of their
-dispute. If you are unable to resolve the matter for any reason, or if the
-behavior is threatening or harassing, report it. We are dedicated to providing
-an environment where participants feel welcome and safe.
-
-
-Reports should be directed to *googleapis-stewards@google.com*, the
-Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to
-receive and address reported violations of the code of conduct. They will then
-work with a committee consisting of representatives from the Open Source
-Programs Office and the Google Open Source Strategy team. If for any reason you
-are uncomfortable reaching out to the Project Steward, please email
-opensource@google.com.
-
-We will investigate every complaint, but you may not receive a direct response.
-We will use our discretion in determining when and how to follow up on reported
-incidents, which may range from not taking action to permanent expulsion from
-the project and project-sponsored spaces. We will notify the accused of the
-report and provide them an opportunity to discuss it before any action is taken.
-The identity of the reporter will be omitted from the details of the report
-supplied to the accused. In potentially harmful situations, such as ongoing
-harassment or threats to anyone's safety, we may take action without notice.
-
-## Attribution
-
-This Code of Conduct is adapted from the Contributor Covenant, version 1.4,
-available at
-https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
\ No newline at end of file
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
deleted file mode 100644
index 946b49d..0000000
--- a/CONTRIBUTING.rst
+++ /dev/null
@@ -1,247 +0,0 @@
-############
-Contributing
-############
-
-#. **Please sign one of the contributor license agreements below.**
-#. Fork the repo, develop and test your code changes, add docs.
-#. Make sure that your commit messages clearly describe the changes.
-#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_)
-
-.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews
-
-.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries.
-
-***************
-Adding Features
-***************
-
-In order to add a feature:
-
-- The feature must be documented in both the API and narrative
- documentation.
-
-- The feature must work fully on the following CPython versions:
- 3.9, 3.10, 3.11, 3.12, 3.13 and 3.14 on both UNIX and Windows.
-
-- The feature must not add unnecessary dependencies (where
- "unnecessary" is of course subjective, but new dependencies should
- be discussed).
-
-****************************
-Using a Development Checkout
-****************************
-
-You'll have to create a development environment using a Git checkout:
-
-- While logged into your GitHub account, navigate to the
- ``python-api-core`` `repo`_ on GitHub.
-
-- Fork and clone the ``python-api-core`` repository to your GitHub account by
- clicking the "Fork" button.
-
-- Clone your fork of ``python-api-core`` from your GitHub account to your local
- computer, substituting your account username and specifying the destination
- as ``hack-on-python-api-core``. E.g.::
-
- $ cd ${HOME}
- $ git clone git@github.com:USERNAME/python-api-core.git hack-on-python-api-core
- $ cd hack-on-python-api-core
- # Configure remotes such that you can pull changes from the googleapis/python-api-core
- # repository into your local repository.
- $ git remote add upstream git@github.com:googleapis/python-api-core.git
- # fetch and merge changes from upstream into main
- $ git fetch upstream
- $ git merge upstream/main
-
-Now your local repo is set up such that you will push changes to your GitHub
-repo, from which you can submit a pull request.
-
-To work on the codebase and run the tests, we recommend using ``nox``,
-but you can also use a ``virtualenv`` of your own creation.
-
-.. _repo: https://github.com/googleapis/python-api-core
-
-Using ``nox``
-=============
-
-We use `nox <https://nox.readthedocs.io/en/latest/>`__ to instrument our tests.
-
-- To test your changes, run unit tests with ``nox``::
- $ nox -s unit
-
-- To run a single unit test::
-
- $ nox -s unit-3.13 -- -k <name of test>
-
-
- .. note::
-
- The unit tests tests are described in the ``noxfile.py`` files
- in each directory.
-
-.. nox: https://pypi.org/project/nox/
-
-*****************************************
-I'm getting weird errors... Can you help?
-*****************************************
-
-If the error mentions ``Python.h`` not being found,
-install ``python-dev`` and try again.
-On Debian/Ubuntu::
-
- $ sudo apt-get install python-dev
-
-************
-Coding Style
-************
-- We use the automatic code formatter ``black``. You can run it using
- the nox session ``blacken``. This will eliminate many lint errors. Run via::
-
- $ nox -s blacken
-
-- PEP8 compliance is required, with exceptions defined in the linter configuration.
- If you have ``nox`` installed, you can test that you have not introduced
- any non-compliant code via::
-
- $ nox -s lint
-
-- In order to make ``nox -s lint`` run faster, you can set some environment
- variables::
-
- export GOOGLE_CLOUD_TESTING_REMOTE="upstream"
- export GOOGLE_CLOUD_TESTING_BRANCH="main"
-
- By doing this, you are specifying the location of the most up-to-date
- version of ``python-api-core``. The the suggested remote name ``upstream``
- should point to the official ``googleapis`` checkout and the
- the branch should be the main branch on that remote (``main``).
-
-- This repository contains configuration for the
- `pre-commit <https://pre-commit.com/>`__ tool, which automates checking
- our linters during a commit. If you have it installed on your ``$PATH``,
- you can enable enforcing those checks via:
-
-.. code-block:: bash
-
- $ pre-commit install
- pre-commit installed at .git/hooks/pre-commit
-
-Exceptions to PEP8:
-
-- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for
- "Function-Under-Test"), which is PEP8-incompliant, but more readable.
- Some also use a local variable, ``MUT`` (short for "Module-Under-Test").
-
-
-*************
-Test Coverage
-*************
-
-- The codebase *must* have 100% test statement coverage after each commit.
- You can test coverage via ``nox -s cover``.
-
-******************************************************
-Documentation Coverage and Building HTML Documentation
-******************************************************
-
-If you fix a bug, and the bug requires an API or behavior modification, all
-documentation in this package which references that API or behavior must be
-changed to reflect the bug fix, ideally in the same commit that fixes the bug
-or adds the feature.
-
-Build the docs via:
-
- $ nox -s docs
-
-*************************
-Samples and code snippets
-*************************
-
-Code samples and snippets live in the `samples/` catalogue. Feel free to
-provide more examples, but make sure to write tests for those examples.
-Each folder containing example code requires its own `noxfile.py` script
-which automates testing. If you decide to create a new folder, you can
-base it on the `samples/snippets` folder (providing `noxfile.py` and
-the requirements files).
-
-The tests will run against a real Google Cloud Project, so you should
-configure them just like the System Tests.
-
-- To run sample tests, you can execute::
-
- # Run all tests in a folder
- $ cd samples/snippets
- $ nox -s py-3.8
-
- # Run a single sample test
- $ cd samples/snippets
- $ nox -s py-3.8 -- -k <name of test>
-
-********************************************
-Note About ``README`` as it pertains to PyPI
-********************************************
-
-The `description on PyPI`_ for the project comes directly from the
-``README``. Due to the reStructuredText (``rst``) parser used by
-PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst``
-instead of
-``https://github.com/googleapis/python-api-core/blob/main/CONTRIBUTING.rst``)
-may cause problems creating links or rendering the description.
-
-.. _description on PyPI: https://pypi.org/project/google-api-core
-
-
-*************************
-Supported Python Versions
-*************************
-
-We support:
-
-- `Python 3.9`_
-- `Python 3.10`_
-- `Python 3.11`_
-- `Python 3.12`_
-- `Python 3.13`_
-- `Python 3.14`_
-
-.. _Python 3.9: https://docs.python.org/3.9/
-.. _Python 3.10: https://docs.python.org/3.10/
-.. _Python 3.11: https://docs.python.org/3.11/
-.. _Python 3.12: https://docs.python.org/3.12/
-.. _Python 3.13: https://docs.python.org/3.13/
-.. _Python 3.14: https://docs.python.org/3.14/
-
-
-Supported versions can be found in our ``noxfile.py`` `config`_.
-
-.. _config: https://github.com/googleapis/python-api-core/blob/main/noxfile.py
-
-
-**********
-Versioning
-**********
-
-This library follows `Semantic Versioning`_.
-
-.. _Semantic Versioning: http://semver.org/
-
-Some packages are currently in major version zero (``0.y.z``), which means that
-anything may change at any time and the public API should not be considered
-stable.
-
-******************************
-Contributor License Agreements
-******************************
-
-Before we can accept your pull requests you'll need to sign a Contributor
-License Agreement (CLA):
-
-- **If you are an individual writing original source code** and **you own the
- intellectual property**, then you'll need to sign an
- `individual CLA <https://developers.google.com/open-source/cla/individual>`__.
-- **If you work for a company that wants to allow you to contribute your work**,
- then you'll need to sign a
- `corporate CLA <https://developers.google.com/open-source/cla/corporate>`__.
-
-You can sign these electronically (just scroll to the bottom). After that,
-we'll be able to accept your pull requests.
diff --git a/MANIFEST.in b/MANIFEST.in
deleted file mode 100644
index d6814cd..0000000
--- a/MANIFEST.in
+++ /dev/null
@@ -1,25 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright 2024 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Generated by synthtool. DO NOT EDIT!
-include README.rst LICENSE
-recursive-include google *.json *.proto py.typed
-recursive-include tests *
-global-exclude *.py[co]
-global-exclude __pycache__
-
-# Exclude scripts for samples readmegen
-prune scripts/readme-gen
diff --git a/README.rst b/README.rst
index 6805567..79e7428 100644
--- a/README.rst
+++ b/README.rst
@@ -1,3 +1,8 @@
+:**NOTE**: **This github repository is archived. The repository contents and history have moved to** `google-cloud-python`_.
+
+.. _google-cloud-python: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-api-core
+
+
Core Library for Google Client Libraries
========================================
diff --git a/docs/_static/custom.css b/docs/_static/custom.css
deleted file mode 100644
index b0a2954..0000000
--- a/docs/_static/custom.css
+++ /dev/null
@@ -1,20 +0,0 @@
-div#python2-eol {
- border-color: red;
- border-width: medium;
-}
-
-/* Ensure minimum width for 'Parameters' / 'Returns' column */
-dl.field-list > dt {
- min-width: 100px
-}
-
-/* Insert space between methods for readability */
-dl.method {
- padding-top: 10px;
- padding-bottom: 10px
-}
-
-/* Insert empty space between classes */
-dl.class {
- padding-bottom: 50px
-}
diff --git a/docs/_templates/layout.html b/docs/_templates/layout.html
deleted file mode 100644
index 6316a53..0000000
--- a/docs/_templates/layout.html
+++ /dev/null
@@ -1,50 +0,0 @@
-
-{% extends "!layout.html" %}
-{%- block content %}
-{%- if theme_fixed_sidebar|lower == 'true' %}
- <div class="document">
- {{ sidebar() }}
- {%- block document %}
- <div class="documentwrapper">
- {%- if render_sidebar %}
- <div class="bodywrapper">
- {%- endif %}
-
- {%- block relbar_top %}
- {%- if theme_show_relbar_top|tobool %}
- <div class="related top">
-
- {{- rellink_markup () }}
- </div>
- {%- endif %}
- {% endblock %}
-
- <div class="body" role="main">
- <div class="admonition" id="python2-eol">
- As of January 1, 2020 this library no longer supports Python 2 on the latest released version.
- Library versions released prior to that date will continue to be available. For more information please
- visit <a href="https://cloud.google.com/python/docs/python2-sunset/">Python 2 support on Google Cloud</a>.
- </div>
- {% block body %} {% endblock %}
- </div>
-
- {%- block relbar_bottom %}
- {%- if theme_show_relbar_bottom|tobool %}
- <div class="related bottom">
-
- {{- rellink_markup () }}
- </div>
- {%- endif %}
- {% endblock %}
-
- {%- if render_sidebar %}
- </div>
- {%- endif %}
- </div>
- {%- endblock %}
- <div class="clearer"></div>
- </div>
-{%- else %}
-{{ super() }}
-{%- endif %}
-{%- endblock %}
diff --git a/docs/auth.rst b/docs/auth.rst
deleted file mode 100644
index 3dcc5fd..0000000
--- a/docs/auth.rst
+++ /dev/null
@@ -1,213 +0,0 @@
-Authentication
-**************
-
-.. _Overview:
-
-Overview
-========
-
-For a language agnostic overview of authentication on Google Cloud, see `Authentication Overview`_.
-
-.. _Authentication Overview: https://cloud.google.com/docs/authentication
-
-* **If you're running in a Google Virtual Machine Environment (Compute Engine, App Engine, Cloud Run, Cloud Functions)**,
- authentication should "just work".
-
-* **If you're developing locally**,
- the easiest way to authenticate is using the `Google Cloud SDK`_:
-
- .. code-block:: bash
-
- $ gcloud auth application-default login
-
- Note that this command generates credentials for client libraries. To authenticate the CLI itself, use:
-
- .. code-block:: bash
-
- $ gcloud auth login
-
- Previously, ``gcloud auth login`` was used for both use cases. If
- your ``gcloud`` installation does not support the new command,
- please update it:
-
- .. code-block:: bash
-
- $ gcloud components update
-
-.. _Google Cloud SDK: http://cloud.google.com/sdk
-
-
-* **If you're running your application elsewhere**,
- you should download a `service account`_ JSON keyfile
- and point to it using an environment variable:
-
- .. code-block:: bash
-
- $ export GOOGLE_APPLICATION_CREDENTIALS="/path/to/keyfile.json"
-
-.. _service account: https://cloud.google.com/iam/docs/creating-managing-service-accounts#creating
-
-Client-Provided Authentication
-==============================
-
-Every package uses a :class:`Client <google.cloud.client.Client>`
-as a base for interacting with an API.
-For example:
-
-.. code-block:: python
-
- from google.cloud import datastore
- client = datastore.Client()
-
-Passing no arguments at all will "just work" if you've followed the
-instructions in the :ref:`Overview`.
-The credentials are inferred from your local environment by using
-Google `Application Default Credentials`_.
-
-.. _Application Default Credentials: https://developers.google.com/identity/protocols/application-default-credentials
-
-.. _Precedence:
-
-Credential Discovery Precedence
--------------------------------
-
-When loading the `Application Default Credentials`_,
-the library will check for credentials in your environment by following the
-precedence outlined by :func:`google.auth.default`.
-
-Explicit Credentials
-====================
-
-The Application Default Credentials discussed above can be useful
-if your code needs to run in many different environments or
-if you just don't want authentication to be a focus in your code.
-
-However, you may want to be explicit because
-
-* your code will only run in one place
-* you may have code which needs to be run as a specific service account
- every time (rather than with the locally inferred credentials)
-* you may want to use two separate accounts to simultaneously access data
- from different projects
-
-In these situations, you can create an explicit
-:class:`~google.auth.credentials.Credentials` object suited to your environment.
-After creation, you can pass it directly to a :class:`Client <google.cloud.client.Client>`:
-
-.. code:: python
-
- client = Client(credentials=credentials)
-
-.. tip::
- To create a credentials object, follow the `google-auth-guide`_.
-
-.. _google-auth-guide: https://googleapis.dev/python/google-auth/latest/user-guide.html#service-account-private-key-files
-
-Google Compute Engine Environment
----------------------------------
-
-These credentials are used in Google Virtual Machine Environments.
-This includes most App Engine runtimes, Compute Engine, Cloud
-Functions, and Cloud Run.
-
-To create
-:class:`credentials <google.auth.compute_engine.Credentials>`:
-
-.. code:: python
-
- from google.auth import compute_engine
- credentials = compute_engine.Credentials()
-
-Service Accounts
-----------------
-
-A `service account`_ is stored in a JSON keyfile.
-
-.. code:: python
-
- from google.oauth2 import service_account
-
- credentials = service_account.Credentials.from_service_account_file(
- '/path/to/key.json')
-
-A JSON string or dictionary:
-
-.. code:: python
-
- import json
-
- from google.oauth2 import service_account
-
- json_account_info = json.loads(...) # convert JSON to dictionary
- credentials = service_account.Credentials.from_service_account_info(
- json_account_info)
-
-.. tip::
-
- Previously the Google Cloud Console would issue a PKCS12/P12 key for your
- service account. This library does not support that key format. You can
- generate a new JSON key for the same service account from the console.
-
-User Accounts (3-legged OAuth 2.0) with a refresh token
--------------------------------------------------------
-
-The majority of cases are intended to authenticate machines or
-workers rather than actual user accounts. However, it's also
-possible to call Google Cloud APIs with a user account via
-`OAuth 2.0`_.
-
-.. _OAuth 2.0: https://developers.google.com/identity/protocols/OAuth2
-
-.. tip::
-
- A production application should **use a service account**,
- but you may wish to use your own personal user account when first
- getting started with the ``google-cloud-*`` library.
-
-The simplest way to use credentials from a user account is via
-Application Default Credentials using ``gcloud auth application-default login``
-(as mentioned above) and :func:`google.auth.default`:
-
-.. code:: python
-
- import google.auth
-
- credentials, project = google.auth.default()
-
-This will still follow the :ref:`precedence <Precedence>`
-described above,
-so be sure none of the other possible environments conflict
-with your user provided credentials.
-
-Troubleshooting
-===============
-
-Setting up a Service Account
-----------------------------
-
-If your application is not running on a Google Virtual Machine Environment,
-you need a Service Account. See `Creating a Service Account`_.
-
-.. _Creating a Service Account: https://cloud.google.com/iam/docs/creating-managing-service-accounts#creating
-
-Using Google Compute Engine
----------------------------
-
-If your code is running on Google Compute Engine,
-using the inferred Google `Application Default Credentials`_
-will be sufficient for retrieving credentials.
-
-However, by default your credentials may not grant you
-access to the services you intend to use.
-Be sure when you `set up the GCE instance`_,
-you add the correct scopes for the APIs you want to access:
-
-* **All APIs**
-
- * ``https://www.googleapis.com/auth/cloud-platform``
- * ``https://www.googleapis.com/auth/cloud-platform.read-only``
-
-For scopes for specific APIs see `OAuth 2.0 Scopes for Google APIs`_
-
-.. _set up the GCE instance: https://cloud.google.com/compute/docs/authentication#using
-.. _OAuth 2.0 Scopes for Google APIS: https://developers.google.com/identity/protocols/oauth2/scopes
diff --git a/docs/changelog.md b/docs/changelog.md
deleted file mode 120000
index 04c99a5..0000000
--- a/docs/changelog.md
+++ /dev/null
@@ -1 +0,0 @@
-../CHANGELOG.md
\ No newline at end of file
diff --git a/docs/client_info.rst b/docs/client_info.rst
deleted file mode 100644
index e976b18..0000000
--- a/docs/client_info.rst
+++ /dev/null
@@ -1,11 +0,0 @@
-Client Information Helpers
-==========================
-
-.. automodule:: google.api_core.client_info
- :members:
- :show-inheritance:
-
-.. automodule:: google.api_core.gapic_v1.client_info
- :members:
- :show-inheritance:
-
diff --git a/docs/client_options.rst b/docs/client_options.rst
deleted file mode 100644
index da7c9a3..0000000
--- a/docs/client_options.rst
+++ /dev/null
@@ -1,6 +0,0 @@
-Client Options
-==========================
-
-.. automodule:: google.api_core.client_options
- :members:
- :show-inheritance:
diff --git a/docs/conf.py b/docs/conf.py
deleted file mode 100644
index ad4723c..0000000
--- a/docs/conf.py
+++ /dev/null
@@ -1,384 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# google-api-core documentation build configuration file
-#
-# This file is execfile()d with the current directory set to its
-# containing dir.
-#
-# Note that not all possible configuration values are present in this
-# autogenerated file.
-#
-# All configuration values have a default; values that are commented out
-# serve to show the default.
-
-import sys
-import os
-import shlex
-
-# If extensions (or modules to document with autodoc) are in another directory,
-# add these directories to sys.path here. If the directory is relative to the
-# documentation root, use os.path.abspath to make it absolute, like shown here.
-sys.path.insert(0, os.path.abspath(".."))
-
-# For plugins that can not read conf.py.
-# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85
-sys.path.insert(0, os.path.abspath("."))
-
-__version__ = ""
-
-# -- General configuration ------------------------------------------------
-
-# If your documentation needs a minimal Sphinx version, state it here.
-needs_sphinx = "1.5.5"
-
-# Add any Sphinx extension module names here, as strings. They can be
-# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
-# ones.
-extensions = [
- "sphinx.ext.autodoc",
- "sphinx.ext.autosummary",
- "sphinx.ext.intersphinx",
- "sphinx.ext.coverage",
- "sphinx.ext.doctest",
- "sphinx.ext.napoleon",
- "sphinx.ext.todo",
- "sphinx.ext.viewcode",
- "recommonmark",
-]
-
-# autodoc/autosummary flags
-autoclass_content = "both"
-autodoc_default_options = {"members": True}
-autosummary_generate = True
-
-
-# Add any paths that contain templates here, relative to this directory.
-templates_path = ["_templates"]
-
-# The suffix(es) of source filenames.
-# You can specify multiple suffix as a list of string:
-# source_suffix = ['.rst', '.md']
-source_suffix = [".rst", ".md"]
-
-# The encoding of source files.
-# source_encoding = 'utf-8-sig'
-
-# The root toctree document.
-root_doc = "index"
-
-# General information about the project.
-project = "google-api-core"
-copyright = "2019, Google"
-author = "Google APIs"
-
-# The version info for the project you're documenting, acts as replacement for
-# |version| and |release|, also used in various other places throughout the
-# built documents.
-#
-# The full version, including alpha/beta/rc tags.
-release = __version__
-# The short X.Y version.
-version = ".".join(release.split(".")[0:2])
-
-# The language for content autogenerated by Sphinx. Refer to documentation
-# for a list of supported languages.
-#
-# This is also used if you do content translation via gettext catalogs.
-# Usually you set "language" from the command line for these cases.
-language = None
-
-# There are two options for replacing |today|: either, you set today to some
-# non-false value, then it is used:
-# today = ''
-# Else, today_fmt is used as the format for a strftime call.
-# today_fmt = '%B %d, %Y'
-
-# List of patterns, relative to source directory, that match files and
-# directories to ignore when looking for source files.
-exclude_patterns = [
- "_build",
- "**/.nox/**/*",
- "samples/AUTHORING_GUIDE.md",
- "samples/CONTRIBUTING.md",
- "samples/snippets/README.rst",
-]
-
-# The reST default role (used for this markup: `text`) to use for all
-# documents.
-# default_role = None
-
-# If true, '()' will be appended to :func: etc. cross-reference text.
-# add_function_parentheses = True
-
-# If true, the current module name will be prepended to all description
-# unit titles (such as .. function::).
-# add_module_names = True
-
-# If true, sectionauthor and moduleauthor directives will be shown in the
-# output. They are ignored by default.
-# show_authors = False
-
-# The name of the Pygments (syntax highlighting) style to use.
-pygments_style = "sphinx"
-
-# A list of ignored prefixes for module index sorting.
-# modindex_common_prefix = []
-
-# If true, keep warnings as "system message" paragraphs in the built documents.
-# keep_warnings = False
-
-# If true, `todo` and `todoList` produce output, else they produce nothing.
-todo_include_todos = True
-
-
-# -- Options for HTML output ----------------------------------------------
-
-# The theme to use for HTML and HTML Help pages. See the documentation for
-# a list of builtin themes.
-html_theme = "alabaster"
-
-# Theme options are theme-specific and customize the look and feel of a theme
-# further. For a list of options available for each theme, see the
-# documentation.
-html_theme_options = {
- "description": "Google Cloud Client Libraries for google-api-core",
- "github_user": "googleapis",
- "github_repo": "python-api-core",
- "github_banner": True,
- "font_family": "'Roboto', Georgia, sans",
- "head_font_family": "'Roboto', Georgia, serif",
- "code_font_family": "'Roboto Mono', 'Consolas', monospace",
-}
-
-# Add any paths that contain custom themes here, relative to this directory.
-# html_theme_path = []
-
-# The name for this set of Sphinx documents. If None, it defaults to
-# "<project> v<release> documentation".
-# html_title = None
-
-# A shorter title for the navigation bar. Default is the same as html_title.
-# html_short_title = None
-
-# The name of an image file (relative to this directory) to place at the top
-# of the sidebar.
-# html_logo = None
-
-# The name of an image file (within the static path) to use as favicon of the
-# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
-# pixels large.
-# html_favicon = None
-
-# Add any paths that contain custom static files (such as style sheets) here,
-# relative to this directory. They are copied after the builtin static files,
-# so a file named "default.css" will overwrite the builtin "default.css".
-html_static_path = ["_static"]
-
-# Add any extra paths that contain custom files (such as robots.txt or
-# .htaccess) here, relative to this directory. These files are copied
-# directly to the root of the documentation.
-# html_extra_path = []
-
-# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
-# using the given strftime format.
-# html_last_updated_fmt = '%b %d, %Y'
-
-# If true, SmartyPants will be used to convert quotes and dashes to
-# typographically correct entities.
-# html_use_smartypants = True
-
-# Custom sidebar templates, maps document names to template names.
-# html_sidebars = {}
-
-# Additional templates that should be rendered to pages, maps page names to
-# template names.
-# html_additional_pages = {}
-
-# If false, no module index is generated.
-# html_domain_indices = True
-
-# If false, no index is generated.
-# html_use_index = True
-
-# If true, the index is split into individual pages for each letter.
-# html_split_index = False
-
-# If true, links to the reST sources are added to the pages.
-# html_show_sourcelink = True
-
-# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
-# html_show_sphinx = True
-
-# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
-# html_show_copyright = True
-
-# If true, an OpenSearch description file will be output, and all pages will
-# contain a <link> tag referring to it. The value of this option must be the
-# base URL from which the finished HTML is served.
-# html_use_opensearch = ''
-
-# This is the file name suffix for HTML files (e.g. ".xhtml").
-# html_file_suffix = None
-
-# Language to be used for generating the HTML full-text search index.
-# Sphinx supports the following languages:
-# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
-# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
-# html_search_language = 'en'
-
-# A dictionary with options for the search language support, empty by default.
-# Now only 'ja' uses this config value
-# html_search_options = {'type': 'default'}
-
-# The name of a javascript file (relative to the configuration directory) that
-# implements a search results scorer. If empty, the default will be used.
-# html_search_scorer = 'scorer.js'
-
-# Output file base name for HTML help builder.
-htmlhelp_basename = "google-api-core-doc"
-
-# -- Options for warnings ------------------------------------------------------
-
-
-suppress_warnings = [
- # Temporarily suppress this to avoid "more than one target found for
- # cross-reference" warning, which are intractable for us to avoid while in
- # a mono-repo.
- # See https://github.com/sphinx-doc/sphinx/blob
- # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843
- "ref.python"
-]
-
-# -- Options for LaTeX output ---------------------------------------------
-
-latex_elements = {
- # The paper size ('letterpaper' or 'a4paper').
- #'papersize': 'letterpaper',
- # The font size ('10pt', '11pt' or '12pt').
- #'pointsize': '10pt',
- # Additional stuff for the LaTeX preamble.
- #'preamble': '',
- # Latex figure (float) alignment
- #'figure_align': 'htbp',
-}
-
-# Grouping the document tree into LaTeX files. List of tuples
-# (source start file, target name, title,
-# author, documentclass [howto, manual, or own class]).
-latex_documents = [
- (
- root_doc,
- "google-api-core.tex",
- "google-api-core Documentation",
- author,
- "manual",
- )
-]
-
-# The name of an image file (relative to this directory) to place at the top of
-# the title page.
-# latex_logo = None
-
-# For "manual" documents, if this is true, then toplevel headings are parts,
-# not chapters.
-# latex_use_parts = False
-
-# If true, show page references after internal links.
-# latex_show_pagerefs = False
-
-# If true, show URL addresses after external links.
-# latex_show_urls = False
-
-# Documents to append as an appendix to all manuals.
-# latex_appendices = []
-
-# If false, no module index is generated.
-# latex_domain_indices = True
-
-
-# -- Options for manual page output ---------------------------------------
-
-# One entry per manual page. List of tuples
-# (source start file, name, description, authors, manual section).
-man_pages = [
- (
- root_doc,
- "google-api-core",
- "google-api-core Documentation",
- [author],
- 1,
- )
-]
-
-# If true, show URL addresses after external links.
-# man_show_urls = False
-
-
-# -- Options for Texinfo output -------------------------------------------
-
-# Grouping the document tree into Texinfo files. List of tuples
-# (source start file, target name, title, author,
-# dir menu entry, description, category)
-texinfo_documents = [
- (
- root_doc,
- "google-api-core",
- "google-api-core Documentation",
- author,
- "google-api-core",
- "google-api-core Library",
- "APIs",
- )
-]
-
-# Documents to append as an appendix to all manuals.
-# texinfo_appendices = []
-
-# If false, no module index is generated.
-# texinfo_domain_indices = True
-
-# How to display URL addresses: 'footnote', 'no', or 'inline'.
-# texinfo_show_urls = 'footnote'
-
-# If true, do not generate a @detailmenu in the "Top" node's menu.
-# texinfo_no_detailmenu = False
-
-
-# Example configuration for intersphinx: refer to the Python standard library.
-intersphinx_mapping = {
- "python": ("https://python.readthedocs.org/en/latest/", None),
- "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None),
- "google.api_core": (
- "https://googleapis.dev/python/google-api-core/latest/",
- None,
- ),
- "grpc": ("https://grpc.github.io/grpc/python/", None),
- "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None),
- "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None),
-}
-
-
-# Napoleon settings
-napoleon_google_docstring = True
-napoleon_numpy_docstring = True
-napoleon_include_private_with_doc = False
-napoleon_include_special_with_doc = True
-napoleon_use_admonition_for_examples = False
-napoleon_use_admonition_for_notes = False
-napoleon_use_admonition_for_references = False
-napoleon_use_ivar = False
-napoleon_use_param = True
-napoleon_use_rtype = True
diff --git a/docs/exceptions.rst b/docs/exceptions.rst
deleted file mode 100644
index d671f4e..0000000
--- a/docs/exceptions.rst
+++ /dev/null
@@ -1,6 +0,0 @@
-Exceptions
-==========
-
-.. automodule:: google.api_core.exceptions
- :members:
- :show-inheritance:
diff --git a/docs/futures.rst b/docs/futures.rst
deleted file mode 100644
index d0dadac..0000000
--- a/docs/futures.rst
+++ /dev/null
@@ -1,14 +0,0 @@
-Futures
-=======================
-
-.. automodule:: google.api_core.future
- :members:
- :show-inheritance:
-
-.. automodule:: google.api_core.future.polling
- :members:
- :show-inheritance:
-
-.. automodule:: google.api_core.future.async_future
- :members:
- :show-inheritance:
diff --git a/docs/helpers.rst b/docs/helpers.rst
deleted file mode 100644
index 6f72df9..0000000
--- a/docs/helpers.rst
+++ /dev/null
@@ -1,26 +0,0 @@
-Helpers
-=======
-
-
-General Helpers
----------------
-
-.. automodule:: google.api_core.general_helpers
- :members:
- :show-inheritance:
-
-
-Datetime Helpers
-----------------
-
-.. automodule:: google.api_core.datetime_helpers
- :members:
- :show-inheritance:
-
-
-gRPC Helpers
-------------
-
-.. automodule:: google.api_core.grpc_helpers
- :members:
- :show-inheritance:
diff --git a/docs/iam.rst b/docs/iam.rst
deleted file mode 100644
index bb80ae3..0000000
--- a/docs/iam.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-Identity and Access Management
-==============================
-
-.. automodule:: google.api_core.iam
- :members:
- :show-inheritance:
- :member-order: bysource
diff --git a/docs/index.rst b/docs/index.rst
deleted file mode 100644
index 858e889..0000000
--- a/docs/index.rst
+++ /dev/null
@@ -1,32 +0,0 @@
-The ``google-cloud-core`` package contains helpers common to all
-``google-cloud-*`` packages. In an attempt to reach a stable API,
-much of the functionality has been split out into this package,
-``google-api-core``.
-
-.. include:: multiprocessing.rst
-
-Core
-====
-
-.. toctree::
- auth
- client_info
- client_options
- exceptions
- futures
- helpers
- iam
- operation
- operations_client
- page_iterator
- path_template
- retry
- timeout
-
-Changelog
-~~~~~~~~~
-
-.. toctree::
- :maxdepth: 2
-
- changelog
diff --git a/docs/multiprocessing.rst b/docs/multiprocessing.rst
deleted file mode 100644
index 536d17b..0000000
--- a/docs/multiprocessing.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-.. note::
-
- Because this client uses :mod:`grpc` library, it is safe to
- share instances across threads. In multiprocessing scenarios, the best
- practice is to create client instances *after* the invocation of
- :func:`os.fork` by :class:`multiprocessing.pool.Pool` or
- :class:`multiprocessing.Process`.
diff --git a/docs/operation.rst b/docs/operation.rst
deleted file mode 100644
index 492cf67..0000000
--- a/docs/operation.rst
+++ /dev/null
@@ -1,13 +0,0 @@
-Long-Running Operations
-=======================
-
-.. automodule:: google.api_core.operation
- :members:
- :show-inheritance:
-
-Long-Running Operations in AsyncIO
--------------------------------------
-
-.. automodule:: google.api_core.operation_async
- :members:
- :show-inheritance:
diff --git a/docs/operations_client.rst b/docs/operations_client.rst
deleted file mode 100644
index be466d3..0000000
--- a/docs/operations_client.rst
+++ /dev/null
@@ -1,6 +0,0 @@
-Long-Running Operations Client
-==============================
-
-.. automodule:: google.api_core.operations_v1
- :members:
- :show-inheritance:
diff --git a/docs/page_iterator.rst b/docs/page_iterator.rst
deleted file mode 100644
index 3652e6d..0000000
--- a/docs/page_iterator.rst
+++ /dev/null
@@ -1,13 +0,0 @@
-Page Iterators
-==============
-
-.. automodule:: google.api_core.page_iterator
- :members:
- :show-inheritance:
-
-Page Iterators in AsyncIO
--------------------------
-
-.. automodule:: google.api_core.page_iterator_async
- :members:
- :show-inheritance:
diff --git a/docs/path_template.rst b/docs/path_template.rst
deleted file mode 100644
index 220779e..0000000
--- a/docs/path_template.rst
+++ /dev/null
@@ -1,6 +0,0 @@
-Path Templates
-==============
-
-.. automodule:: google.api_core.path_template
- :members:
- :show-inheritance:
diff --git a/docs/retry.rst b/docs/retry.rst
deleted file mode 100644
index 6e165f5..0000000
--- a/docs/retry.rst
+++ /dev/null
@@ -1,14 +0,0 @@
-Retry
-=====
-
-.. automodule:: google.api_core.retry
- :members:
- :show-inheritance:
-
-Retry in AsyncIO
-----------------
-
-.. automodule:: google.api_core.retry_async
- :members:
- :noindex:
- :show-inheritance:
diff --git a/docs/timeout.rst b/docs/timeout.rst
deleted file mode 100644
index 943d425..0000000
--- a/docs/timeout.rst
+++ /dev/null
@@ -1,6 +0,0 @@
-Timeout
-=======
-
-.. automodule:: google.api_core.timeout
- :members:
- :show-inheritance:
diff --git a/google/api_core/__init__.py b/google/api_core/__init__.py
deleted file mode 100644
index a52ffe8..0000000
--- a/google/api_core/__init__.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Google API Core.
-
-This package contains common code and utilities used by Google client libraries.
-"""
-
-from google.api_core import _python_package_support
-from google.api_core import _python_version_support
-from google.api_core import version as api_core_version
-
-__version__ = api_core_version.__version__
-
-# NOTE: Until dependent artifacts require this version of
-# google.api_core, the functionality below must be made available
-# manually in those artifacts.
-
-# expose dependency checks for external callers
-check_python_version = _python_version_support.check_python_version
-check_dependency_versions = _python_package_support.check_dependency_versions
-parse_version_to_tuple = _python_package_support.parse_version_to_tuple
-warn_deprecation_for_versions_less_than = (
- _python_package_support.warn_deprecation_for_versions_less_than
-)
-DependencyConstraint = _python_package_support.DependencyConstraint
-
-# perform version checks against api_core, and emit warnings if needed
-check_python_version(package="google.api_core")
-check_dependency_versions("google.api_core")
diff --git a/google/api_core/_python_package_support.py b/google/api_core/_python_package_support.py
deleted file mode 100644
index b8732b6..0000000
--- a/google/api_core/_python_package_support.py
+++ /dev/null
@@ -1,227 +0,0 @@
-# Copyright 2025 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Code to check versions of dependencies used by Google Cloud Client Libraries."""
-
-import warnings
-from typing import Optional, Tuple
-
-from collections import namedtuple
-
-from ._python_version_support import (
- _flatten_message,
- _get_distribution_and_import_packages,
-)
-
-from importlib import metadata
-
-ParsedVersion = Tuple[int, ...]
-
-# Here we list all the packages for which we want to issue warnings
-# about deprecated and unsupported versions.
-DependencyConstraint = namedtuple(
- "DependencyConstraint",
- ["package_name", "minimum_fully_supported_version", "recommended_version"],
-)
-_PACKAGE_DEPENDENCY_WARNINGS = [
- DependencyConstraint(
- "google.protobuf",
- minimum_fully_supported_version="4.25.8",
- recommended_version="6.x",
- )
-]
-
-
-DependencyVersion = namedtuple("DependencyVersion", ["version", "version_string"])
-# Version string we provide in a DependencyVersion when we can't determine the version of a
-# package.
-UNKNOWN_VERSION_STRING = "--"
-
-
-def parse_version_to_tuple(version_string: str) -> ParsedVersion:
- """Safely converts a semantic version string to a comparable tuple of integers.
-
- Example: "4.25.8" -> (4, 25, 8)
- Ignores non-numeric parts and handles common version formats.
-
- Args:
- version_string: Version string in the format "x.y.z" or "x.y.z<suffix>"
-
- Returns:
- Tuple of integers for the parsed version string.
- """
- parts = []
- for part in version_string.split("."):
- try:
- parts.append(int(part))
- except ValueError:
- # If it's a non-numeric part (e.g., '1.0.0b1' -> 'b1'), stop here.
- # This is a simplification compared to 'packaging.parse_version', but sufficient
- # for comparing strictly numeric semantic versions.
- break
- return tuple(parts)
-
-
-def get_dependency_version(
- dependency_name: str,
-) -> DependencyVersion:
- """Get the parsed version of an installed package dependency.
-
- This function checks for an installed package and returns its version
- as a comparable tuple of integers object for safe comparison.
-
- Args:
- dependency_name: The distribution name of the package (e.g., 'requests').
-
- Returns:
- A DependencyVersion namedtuple with `version` (a tuple of integers) and
- `version_string` attributes, or `DependencyVersion(None,
- UNKNOWN_VERSION_STRING)` if the package is not found or
- another error occurs during version discovery.
-
- """
- try:
- version_string: str = metadata.version(dependency_name)
- parsed_version = parse_version_to_tuple(version_string)
- return DependencyVersion(parsed_version, version_string)
- except Exception:
- # Catch exceptions from metadata.version() (e.g., PackageNotFoundError)
- # or errors during parse_version_to_tuple
- return DependencyVersion(None, UNKNOWN_VERSION_STRING)
-
-
-def warn_deprecation_for_versions_less_than(
- consumer_import_package: str,
- dependency_import_package: str,
- minimum_fully_supported_version: str,
- recommended_version: Optional[str] = None,
- message_template: Optional[str] = None,
-):
- """Issue any needed deprecation warnings for `dependency_import_package`.
-
- If `dependency_import_package` is installed at a version less than
- `minimum_fully_supported_version`, this issues a warning using either a
- default `message_template` or one provided by the user. The
- default `message_template` informs the user that they will not receive
- future updates for `consumer_import_package` if
- `dependency_import_package` is somehow pinned to a version lower
- than `minimum_fully_supported_version`.
-
- Args:
- consumer_import_package: The import name of the package that
- needs `dependency_import_package`.
- dependency_import_package: The import name of the dependency to check.
- minimum_fully_supported_version: The dependency_import_package version number
- below which a deprecation warning will be logged.
- recommended_version: If provided, the recommended next version, which
- could be higher than `minimum_fully_supported_version`.
- message_template: A custom default message template to replace
- the default. This `message_template` is treated as an
- f-string, where the following variables are defined:
- `dependency_import_package`, `consumer_import_package` and
- `dependency_distribution_package` and
- `consumer_distribution_package` and `dependency_package`,
- `consumer_package` , which contain the import packages, the
- distribution packages, and pretty string with both the
- distribution and import packages for the dependency and the
- consumer, respectively; and `minimum_fully_supported_version`,
- `version_used`, and `version_used_string`, which refer to supported
- and currently-used versions of the dependency.
-
- """
- if (
- not consumer_import_package
- or not dependency_import_package
- or not minimum_fully_supported_version
- ): # pragma: NO COVER
- return
-
- dependency_version = get_dependency_version(dependency_import_package)
- if not dependency_version.version:
- return
-
- if dependency_version.version < parse_version_to_tuple(
- minimum_fully_supported_version
- ):
- (
- dependency_package,
- dependency_distribution_package,
- ) = _get_distribution_and_import_packages(dependency_import_package)
- (
- consumer_package,
- consumer_distribution_package,
- ) = _get_distribution_and_import_packages(consumer_import_package)
-
- recommendation = (
- " (we recommend {recommended_version})" if recommended_version else ""
- )
- message_template = message_template or _flatten_message(
- """
- DEPRECATION: Package {consumer_package} depends on
- {dependency_package}, currently installed at version
- {version_used_string}. Future updates to
- {consumer_package} will require {dependency_package} at
- version {minimum_fully_supported_version} or
- higher{recommendation}. Please ensure that either (a) your
- Python environment doesn't pin the version of
- {dependency_package}, so that updates to
- {consumer_package} can require the higher version, or (b)
- you manually update your Python environment to use at
- least version {minimum_fully_supported_version} of
- {dependency_package}.
- """
- )
- warnings.warn(
- message_template.format(
- consumer_import_package=consumer_import_package,
- dependency_import_package=dependency_import_package,
- consumer_distribution_package=consumer_distribution_package,
- dependency_distribution_package=dependency_distribution_package,
- dependency_package=dependency_package,
- consumer_package=consumer_package,
- minimum_fully_supported_version=minimum_fully_supported_version,
- recommendation=recommendation,
- version_used=dependency_version.version,
- version_used_string=dependency_version.version_string,
- ),
- FutureWarning,
- )
-
-
-def check_dependency_versions(
- consumer_import_package: str, *package_dependency_warnings: DependencyConstraint
-):
- """Bundle checks for all package dependencies.
-
- This function can be called by all consumers of google.api_core,
- to emit needed deprecation warnings for any of their
- dependencies. The dependencies to check can be passed as arguments, or if
- none are provided, it will default to the list in
- `_PACKAGE_DEPENDENCY_WARNINGS`.
-
- Args:
- consumer_import_package: The distribution name of the calling package, whose
- dependencies we're checking.
- *package_dependency_warnings: A variable number of DependencyConstraint
- objects, each specifying a dependency to check.
- """
- if not package_dependency_warnings:
- package_dependency_warnings = tuple(_PACKAGE_DEPENDENCY_WARNINGS)
- for package_info in package_dependency_warnings:
- warn_deprecation_for_versions_less_than(
- consumer_import_package,
- package_info.package_name,
- package_info.minimum_fully_supported_version,
- recommended_version=package_info.recommended_version,
- )
diff --git a/google/api_core/_python_version_support.py b/google/api_core/_python_version_support.py
deleted file mode 100644
index d0c0dfe..0000000
--- a/google/api_core/_python_version_support.py
+++ /dev/null
@@ -1,278 +0,0 @@
-# Copyright 2025 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Code to check Python versions supported by Google Cloud Client Libraries."""
-
-import datetime
-import enum
-import logging
-import warnings
-import sys
-import textwrap
-from typing import Any, List, NamedTuple, Optional, Dict, Tuple
-
-
-_LOGGER = logging.getLogger(__name__)
-
-
-class PythonVersionStatus(enum.Enum):
- """Support status of a Python version in this client library artifact release.
-
- "Support", in this context, means that this release of a client library
- artifact is configured to run on the currently configured version of
- Python.
- """
-
- PYTHON_VERSION_STATUS_UNSPECIFIED = "PYTHON_VERSION_STATUS_UNSPECIFIED"
-
- PYTHON_VERSION_SUPPORTED = "PYTHON_VERSION_SUPPORTED"
- """This Python version is fully supported, so the artifact running on this
- version will have all features and bug fixes."""
-
- PYTHON_VERSION_DEPRECATED = "PYTHON_VERSION_DEPRECATED"
- """This Python version is still supported, but support will end within a
- year. At that time, there will be no more releases for this artifact
- running under this Python version."""
-
- PYTHON_VERSION_EOL = "PYTHON_VERSION_EOL"
- """This Python version has reached its end of life in the Python community
- (see https://devguide.python.org/versions/), and this artifact will cease
- supporting this Python version within the next few releases."""
-
- PYTHON_VERSION_UNSUPPORTED = "PYTHON_VERSION_UNSUPPORTED"
- """This release of the client library artifact may not be the latest, since
- current releases no longer support this Python version."""
-
-
-class VersionInfo(NamedTuple):
- """Hold release and support date information for a Python version."""
-
- version: str
- python_beta: Optional[datetime.date]
- python_start: datetime.date
- python_eol: datetime.date
- gapic_start: Optional[datetime.date] = None # unused
- gapic_deprecation: Optional[datetime.date] = None
- gapic_end: Optional[datetime.date] = None
- dep_unpatchable_cve: Optional[datetime.date] = None # unused
-
-
-PYTHON_VERSIONS: List[VersionInfo] = [
- # Refer to https://devguide.python.org/versions/ and the PEPs linked therefrom.
- VersionInfo(
- version="3.7",
- python_beta=None,
- python_start=datetime.date(2018, 6, 27),
- python_eol=datetime.date(2023, 6, 27),
- ),
- VersionInfo(
- version="3.8",
- python_beta=None,
- python_start=datetime.date(2019, 10, 14),
- python_eol=datetime.date(2024, 10, 7),
- ),
- VersionInfo(
- version="3.9",
- python_beta=datetime.date(2020, 5, 18),
- python_start=datetime.date(2020, 10, 5),
- python_eol=datetime.date(2025, 10, 5),
- gapic_end=datetime.date(2025, 10, 5) + datetime.timedelta(days=90),
- ),
- VersionInfo(
- version="3.10",
- python_beta=datetime.date(2021, 5, 3),
- python_start=datetime.date(2021, 10, 4),
- python_eol=datetime.date(2026, 10, 4), # TODO: specify day when announced
- ),
- VersionInfo(
- version="3.11",
- python_beta=datetime.date(2022, 5, 8),
- python_start=datetime.date(2022, 10, 24),
- python_eol=datetime.date(2027, 10, 24), # TODO: specify day when announced
- ),
- VersionInfo(
- version="3.12",
- python_beta=datetime.date(2023, 5, 22),
- python_start=datetime.date(2023, 10, 2),
- python_eol=datetime.date(2028, 10, 2), # TODO: specify day when announced
- ),
- VersionInfo(
- version="3.13",
- python_beta=datetime.date(2024, 5, 8),
- python_start=datetime.date(2024, 10, 7),
- python_eol=datetime.date(2029, 10, 7), # TODO: specify day when announced
- ),
- VersionInfo(
- version="3.14",
- python_beta=datetime.date(2025, 5, 7),
- python_start=datetime.date(2025, 10, 7),
- python_eol=datetime.date(2030, 10, 7), # TODO: specify day when announced
- ),
-]
-
-PYTHON_VERSION_INFO: Dict[Tuple[int, int], VersionInfo] = {}
-for info in PYTHON_VERSIONS:
- major, minor = map(int, info.version.split("."))
- PYTHON_VERSION_INFO[(major, minor)] = info
-
-
-LOWEST_TRACKED_VERSION = min(PYTHON_VERSION_INFO.keys())
-_FAKE_PAST_DATE = datetime.date.min + datetime.timedelta(days=900)
-_FAKE_PAST_VERSION = VersionInfo(
- version="0.0",
- python_beta=_FAKE_PAST_DATE,
- python_start=_FAKE_PAST_DATE,
- python_eol=_FAKE_PAST_DATE,
-)
-_FAKE_FUTURE_DATE = datetime.date.max - datetime.timedelta(days=900)
-_FAKE_FUTURE_VERSION = VersionInfo(
- version="999.0",
- python_beta=_FAKE_FUTURE_DATE,
- python_start=_FAKE_FUTURE_DATE,
- python_eol=_FAKE_FUTURE_DATE,
-)
-DEPRECATION_WARNING_PERIOD = datetime.timedelta(days=365)
-EOL_GRACE_PERIOD = datetime.timedelta(weeks=1)
-
-
-def _flatten_message(text: str) -> str:
- """Dedent a multi-line string and flatten it into a single line."""
- return " ".join(textwrap.dedent(text).strip().split())
-
-
-# TODO(https://github.com/googleapis/python-api-core/issues/835):
-# Remove once we no longer support Python 3.9.
-# `importlib.metadata.packages_distributions()` is only supported in Python 3.10 and newer
-# https://docs.python.org/3/library/importlib.metadata.html#importlib.metadata.packages_distributions
-if sys.version_info < (3, 10):
-
- def _get_pypi_package_name(module_name): # pragma: NO COVER
- """Determine the PyPI package name for a given module name."""
- return None
-
-else:
- from importlib import metadata
-
- def _get_pypi_package_name(module_name):
- """Determine the PyPI package name for a given module name."""
- try:
- # Get the mapping of modules to distributions
- module_to_distributions = metadata.packages_distributions()
-
- # Check if the module is found in the mapping
- if module_name in module_to_distributions: # pragma: NO COVER
- # The value is a list of distribution names, take the first one
- return module_to_distributions[module_name][0]
- except Exception as e: # pragma: NO COVER
- _LOGGER.info(
- "An error occurred while determining PyPI package name for %s: %s",
- module_name,
- e,
- )
-
- return None
-
-
-def _get_distribution_and_import_packages(import_package: str) -> Tuple[str, Any]:
- """Return a pretty string with distribution & import package names."""
- distribution_package = _get_pypi_package_name(import_package)
- dependency_distribution_and_import_packages = (
- f"package {distribution_package} ({import_package})"
- if distribution_package
- else import_package
- )
- return dependency_distribution_and_import_packages, distribution_package
-
-
-def check_python_version(
- package: str = "this package", today: Optional[datetime.date] = None
-) -> PythonVersionStatus:
- """Check the running Python version and issue a support warning if needed.
-
- Args:
- today: The date to check against. Defaults to the current date.
-
- Returns:
- The support status of the current Python version.
- """
- today = today or datetime.date.today()
- package_label, _ = _get_distribution_and_import_packages(package)
-
- python_version = sys.version_info
- version_tuple = (python_version.major, python_version.minor)
- py_version_str = sys.version.split()[0]
-
- version_info = PYTHON_VERSION_INFO.get(version_tuple)
-
- if not version_info:
- if version_tuple < LOWEST_TRACKED_VERSION:
- version_info = _FAKE_PAST_VERSION
- else:
- version_info = _FAKE_FUTURE_VERSION
-
- gapic_deprecation = version_info.gapic_deprecation or (
- version_info.python_eol - DEPRECATION_WARNING_PERIOD
- )
- gapic_end = version_info.gapic_end or (version_info.python_eol + EOL_GRACE_PERIOD)
-
- def min_python(date: datetime.date) -> str:
- """Find the minimum supported Python version for a given date."""
- for version, info in sorted(PYTHON_VERSION_INFO.items()):
- if info.python_start <= date < info.python_eol:
- return f"{version[0]}.{version[1]}"
- return "at a currently supported version [https://devguide.python.org/versions]"
-
- if gapic_end < today:
- message = _flatten_message(
- f"""
- You are using a non-supported Python version ({py_version_str}).
- Google will not post any further updates to {package_label}
- supporting this Python version. Please upgrade to the latest Python
- version, or at least Python {min_python(today)}, and then update
- {package_label}.
- """
- )
- warnings.warn(message, FutureWarning)
- return PythonVersionStatus.PYTHON_VERSION_UNSUPPORTED
-
- eol_date = version_info.python_eol + EOL_GRACE_PERIOD
- if eol_date <= today <= gapic_end:
- message = _flatten_message(
- f"""
- You are using a Python version ({py_version_str})
- past its end of life. Google will update {package_label}
- with critical bug fixes on a best-effort basis, but not
- with any other fixes or features. Please upgrade
- to the latest Python version, or at least Python
- {min_python(today)}, and then update {package_label}.
- """
- )
- warnings.warn(message, FutureWarning)
- return PythonVersionStatus.PYTHON_VERSION_EOL
-
- if gapic_deprecation <= today <= gapic_end:
- message = _flatten_message(
- f"""
- You are using a Python version ({py_version_str}) which Google will
- stop supporting in new releases of {package_label} once it reaches
- its end of life ({version_info.python_eol}). Please upgrade to the
- latest Python version, or at least Python
- {min_python(version_info.python_eol)}, to continue receiving updates
- for {package_label} past that date.
- """
- )
- warnings.warn(message, FutureWarning)
- return PythonVersionStatus.PYTHON_VERSION_DEPRECATED
-
- return PythonVersionStatus.PYTHON_VERSION_SUPPORTED
diff --git a/google/api_core/_rest_streaming_base.py b/google/api_core/_rest_streaming_base.py
deleted file mode 100644
index 3bc87a9..0000000
--- a/google/api_core/_rest_streaming_base.py
+++ /dev/null
@@ -1,118 +0,0 @@
-# Copyright 2024 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Helpers for server-side streaming in REST."""
-
-from collections import deque
-import string
-from typing import Deque, Union
-import types
-
-import proto
-import google.protobuf.message
-from google.protobuf.json_format import Parse
-
-
-class BaseResponseIterator:
- """Base Iterator over REST API responses. This class should not be used directly.
-
- Args:
- response_message_cls (Union[proto.Message, google.protobuf.message.Message]): A response
- class expected to be returned from an API.
-
- Raises:
- ValueError: If `response_message_cls` is not a subclass of `proto.Message` or `google.protobuf.message.Message`.
- """
-
- def __init__(
- self,
- response_message_cls: Union[proto.Message, google.protobuf.message.Message],
- ):
- self._response_message_cls = response_message_cls
- # Contains a list of JSON responses ready to be sent to user.
- self._ready_objs: Deque[str] = deque()
- # Current JSON response being built.
- self._obj = ""
- # Keeps track of the nesting level within a JSON object.
- self._level = 0
- # Keeps track whether HTTP response is currently sending values
- # inside of a string value.
- self._in_string = False
- # Whether an escape symbol "\" was encountered.
- self._escape_next = False
-
- self._grab = types.MethodType(self._create_grab(), self)
-
- def _process_chunk(self, chunk: str):
- if self._level == 0:
- if chunk[0] != "[":
- raise ValueError(
- "Can only parse array of JSON objects, instead got %s" % chunk
- )
- for char in chunk:
- if char == "{":
- if self._level == 1:
- # Level 1 corresponds to the outermost JSON object
- # (i.e. the one we care about).
- self._obj = ""
- if not self._in_string:
- self._level += 1
- self._obj += char
- elif char == "}":
- self._obj += char
- if not self._in_string:
- self._level -= 1
- if not self._in_string and self._level == 1:
- self._ready_objs.append(self._obj)
- elif char == '"':
- # Helps to deal with an escaped quotes inside of a string.
- if not self._escape_next:
- self._in_string = not self._in_string
- self._obj += char
- elif char in string.whitespace:
- if self._in_string:
- self._obj += char
- elif char == "[":
- if self._level == 0:
- self._level += 1
- else:
- self._obj += char
- elif char == "]":
- if self._level == 1:
- self._level -= 1
- else:
- self._obj += char
- else:
- self._obj += char
- self._escape_next = not self._escape_next if char == "\\" else False
-
- def _create_grab(self):
- if issubclass(self._response_message_cls, proto.Message):
-
- def grab(this):
- return this._response_message_cls.from_json(
- this._ready_objs.popleft(), ignore_unknown_fields=True
- )
-
- return grab
- elif issubclass(self._response_message_cls, google.protobuf.message.Message):
-
- def grab(this):
- return Parse(this._ready_objs.popleft(), this._response_message_cls())
-
- return grab
- else:
- raise ValueError(
- "Response message class must be a subclass of proto.Message or google.protobuf.message.Message."
- )
diff --git a/google/api_core/bidi.py b/google/api_core/bidi.py
deleted file mode 100644
index 7f45c2a..0000000
--- a/google/api_core/bidi.py
+++ /dev/null
@@ -1,735 +0,0 @@
-# Copyright 2017, Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Helpers for synchronous bidirectional streaming RPCs."""
-
-import collections
-import datetime
-import logging
-import queue as queue_module
-import threading
-import time
-
-from google.api_core import exceptions
-from google.api_core.bidi_base import BidiRpcBase
-
-_LOGGER = logging.getLogger(__name__)
-_BIDIRECTIONAL_CONSUMER_NAME = "Thread-ConsumeBidirectionalStream"
-
-
-class _RequestQueueGenerator(object):
- """A helper for sending requests to a gRPC stream from a Queue.
-
- This generator takes requests off a given queue and yields them to gRPC.
-
- This helper is useful when you have an indeterminate, indefinite, or
- otherwise open-ended set of requests to send through a request-streaming
- (or bidirectional) RPC.
-
-
- Example::
-
- requests = request_queue_generator(q)
- call = stub.StreamingRequest(iter(requests))
- requests.call = call
-
- for response in call:
- print(response)
- q.put(...)
-
-
- Args:
- queue (queue_module.Queue): The request queue.
- period (float): The number of seconds to wait for items from the queue
- before checking if the RPC is cancelled. In practice, this
- determines the maximum amount of time the request consumption
- thread will live after the RPC is cancelled.
- initial_request (Union[protobuf.Message,
- Callable[None, protobuf.Message]]): The initial request to
- yield. This is done independently of the request queue to allow fo
- easily restarting streams that require some initial configuration
- request.
- """
-
- def __init__(self, queue, period=1, initial_request=None):
- self._queue = queue
- self._period = period
- self._initial_request = initial_request
- self.call = None
-
- def _is_active(self):
- # Note: there is a possibility that this starts *before* the call
- # property is set. So we have to check if self.call is set before
- # seeing if it's active. We need to return True if self.call is None.
- # See https://github.com/googleapis/python-api-core/issues/560.
- return self.call is None or self.call.is_active()
-
- def __iter__(self):
- # The reason this is necessary is because gRPC takes an iterator as the
- # request for request-streaming RPCs. gRPC consumes this iterator in
- # another thread to allow it to block while generating requests for
- # the stream. However, if the generator blocks indefinitely gRPC will
- # not be able to clean up the thread as it'll be blocked on
- # `next(iterator)` and not be able to check the channel status to stop
- # iterating. This helper mitigates that by waiting on the queue with
- # a timeout and checking the RPC state before yielding.
- #
- # Finally, it allows for retrying without swapping queues because if
- # it does pull an item off the queue when the RPC is inactive, it'll
- # immediately put it back and then exit. This is necessary because
- # yielding the item in this case will cause gRPC to discard it. In
- # practice, this means that the order of messages is not guaranteed.
- # If such a thing is necessary it would be easy to use a priority
- # queue.
- #
- # Note that it is possible to accomplish this behavior without
- # "spinning" (using a queue timeout). One possible way would be to use
- # more threads to multiplex the grpc end event with the queue, another
- # possible way is to use selectors and a custom event/queue object.
- # Both of these approaches are significant from an engineering
- # perspective for small benefit - the CPU consumed by spinning is
- # pretty minuscule.
-
- if self._initial_request is not None:
- if callable(self._initial_request):
- yield self._initial_request()
- else:
- yield self._initial_request
-
- while True:
- try:
- item = self._queue.get(timeout=self._period)
- except queue_module.Empty:
- if not self._is_active():
- _LOGGER.debug(
- "Empty queue and inactive call, exiting request " "generator."
- )
- return
- else:
- # call is still active, keep waiting for queue items.
- continue
-
- # The consumer explicitly sent "None", indicating that the request
- # should end.
- if item is None:
- _LOGGER.debug("Cleanly exiting request generator.")
- return
-
- if not self._is_active():
- # We have an item, but the call is closed. We should put the
- # item back on the queue so that the next call can consume it.
- self._queue.put(item)
- _LOGGER.debug(
- "Inactive call, replacing item on queue and exiting "
- "request generator."
- )
- return
-
- yield item
-
-
-class _Throttle(object):
- """A context manager limiting the total entries in a sliding time window.
-
- If more than ``access_limit`` attempts are made to enter the context manager
- instance in the last ``time window`` interval, the exceeding requests block
- until enough time elapses.
-
- The context manager instances are thread-safe and can be shared between
- multiple threads. If multiple requests are blocked and waiting to enter,
- the exact order in which they are allowed to proceed is not determined.
-
- Example::
-
- max_three_per_second = _Throttle(
- access_limit=3, time_window=datetime.timedelta(seconds=1)
- )
-
- for i in range(5):
- with max_three_per_second as time_waited:
- print("{}: Waited {} seconds to enter".format(i, time_waited))
-
- Args:
- access_limit (int): the maximum number of entries allowed in the time window
- time_window (datetime.timedelta): the width of the sliding time window
- """
-
- def __init__(self, access_limit, time_window):
- if access_limit < 1:
- raise ValueError("access_limit argument must be positive")
-
- if time_window <= datetime.timedelta(0):
- raise ValueError("time_window argument must be a positive timedelta")
-
- self._time_window = time_window
- self._access_limit = access_limit
- self._past_entries = collections.deque(
- maxlen=access_limit
- ) # least recent first
- self._entry_lock = threading.Lock()
-
- def __enter__(self):
- with self._entry_lock:
- cutoff_time = datetime.datetime.now() - self._time_window
-
- # drop the entries that are too old, as they are no longer relevant
- while self._past_entries and self._past_entries[0] < cutoff_time:
- self._past_entries.popleft()
-
- if len(self._past_entries) < self._access_limit:
- self._past_entries.append(datetime.datetime.now())
- return 0.0 # no waiting was needed
-
- to_wait = (self._past_entries[0] - cutoff_time).total_seconds()
- time.sleep(to_wait)
-
- self._past_entries.append(datetime.datetime.now())
- return to_wait
-
- def __exit__(self, *_):
- pass
-
- def __repr__(self):
- return "{}(access_limit={}, time_window={})".format(
- self.__class__.__name__, self._access_limit, repr(self._time_window)
- )
-
-
-class BidiRpc(BidiRpcBase):
- """A helper for consuming a bi-directional streaming RPC.
-
- This maps gRPC's built-in interface which uses a request iterator and a
- response iterator into a socket-like :func:`send` and :func:`recv`. This
- is a more useful pattern for long-running or asymmetric streams (streams
- where there is not a direct correlation between the requests and
- responses).
-
- Example::
-
- initial_request = example_pb2.StreamingRpcRequest(
- setting='example')
- rpc = BidiRpc(
- stub.StreamingRpc,
- initial_request=initial_request,
- metadata=[('name', 'value')]
- )
-
- rpc.open()
-
- while rpc.is_active():
- print(rpc.recv())
- rpc.send(example_pb2.StreamingRpcRequest(
- data='example'))
-
- rpc.close()
-
- This does *not* retry the stream on errors. See :class:`ResumableBidiRpc`.
-
- Args:
- start_rpc (grpc.StreamStreamMultiCallable): The gRPC method used to
- start the RPC.
- initial_request (Union[protobuf.Message,
- Callable[None, protobuf.Message]]): The initial request to
- yield. This is useful if an initial request is needed to start the
- stream.
- metadata (Sequence[Tuple(str, str)]): RPC metadata to include in
- the request.
- """
-
- def _create_queue(self):
- """Create a queue for requests."""
- return queue_module.Queue()
-
- def open(self):
- """Opens the stream."""
- if self.is_active:
- raise ValueError("Cannot open an already open stream.")
-
- request_generator = _RequestQueueGenerator(
- self._request_queue, initial_request=self._initial_request
- )
- try:
- call = self._start_rpc(iter(request_generator), metadata=self._rpc_metadata)
- except exceptions.GoogleAPICallError as exc:
- # The original `grpc.RpcError` (which is usually also a `grpc.Call`) is
- # available from the ``response`` property on the mapped exception.
- self._on_call_done(exc.response)
- raise
-
- request_generator.call = call
-
- # TODO: api_core should expose the future interface for wrapped
- # callables as well.
- if hasattr(call, "_wrapped"): # pragma: NO COVER
- call._wrapped.add_done_callback(self._on_call_done)
- else:
- call.add_done_callback(self._on_call_done)
-
- self._request_generator = request_generator
- self.call = call
-
- def close(self):
- """Closes the stream."""
- if self.call is not None:
- self.call.cancel()
-
- # Put None in request queue to signal termination.
- self._request_queue.put(None)
- self._request_generator = None
- self._initial_request = None
- self._callbacks = []
- # Don't set self.call to None. Keep it around so that send/recv can
- # raise the error.
-
- def send(self, request):
- """Queue a message to be sent on the stream.
-
- Send is non-blocking.
-
- If the underlying RPC has been closed, this will raise.
-
- Args:
- request (protobuf.Message): The request to send.
- """
- if self.call is None:
- raise ValueError("Cannot send on an RPC stream that has never been opened.")
-
- # Don't use self.is_active(), as ResumableBidiRpc will overload it
- # to mean something semantically different.
- if self.call.is_active():
- self._request_queue.put(request)
- else:
- # calling next should cause the call to raise.
- next(self.call)
-
- def recv(self):
- """Wait for a message to be returned from the stream.
-
- Recv is blocking.
-
- If the underlying RPC has been closed, this will raise.
-
- Returns:
- protobuf.Message: The received message.
- """
- if self.call is None:
- raise ValueError("Cannot recv on an RPC stream that has never been opened.")
-
- return next(self.call)
-
- @property
- def is_active(self):
- """True if this stream is currently open and active."""
- return self.call is not None and self.call.is_active()
-
-
-def _never_terminate(future_or_error):
- """By default, no errors cause BiDi termination."""
- return False
-
-
-class ResumableBidiRpc(BidiRpc):
- """A :class:`BidiRpc` that can automatically resume the stream on errors.
-
- It uses the ``should_recover`` arg to determine if it should re-establish
- the stream on error.
-
- Example::
-
- def should_recover(exc):
- return (
- isinstance(exc, grpc.RpcError) and
- exc.code() == grpc.StatusCode.UNAVAILABLE)
-
- initial_request = example_pb2.StreamingRpcRequest(
- setting='example')
-
- metadata = [('header_name', 'value')]
-
- rpc = ResumableBidiRpc(
- stub.StreamingRpc,
- should_recover=should_recover,
- initial_request=initial_request,
- metadata=metadata
- )
-
- rpc.open()
-
- while rpc.is_active():
- print(rpc.recv())
- rpc.send(example_pb2.StreamingRpcRequest(
- data='example'))
-
- Args:
- start_rpc (grpc.StreamStreamMultiCallable): The gRPC method used to
- start the RPC.
- initial_request (Union[protobuf.Message,
- Callable[None, protobuf.Message]]): The initial request to
- yield. This is useful if an initial request is needed to start the
- stream.
- should_recover (Callable[[Exception], bool]): A function that returns
- True if the stream should be recovered. This will be called
- whenever an error is encountered on the stream.
- should_terminate (Callable[[Exception], bool]): A function that returns
- True if the stream should be terminated. This will be called
- whenever an error is encountered on the stream.
- metadata Sequence[Tuple(str, str)]: RPC metadata to include in
- the request.
- throttle_reopen (bool): If ``True``, throttling will be applied to
- stream reopen calls. Defaults to ``False``.
- """
-
- def __init__(
- self,
- start_rpc,
- should_recover,
- should_terminate=_never_terminate,
- initial_request=None,
- metadata=None,
- throttle_reopen=False,
- ):
- super(ResumableBidiRpc, self).__init__(start_rpc, initial_request, metadata)
- self._should_recover = should_recover
- self._should_terminate = should_terminate
- self._operational_lock = threading.RLock()
- self._finalized = False
- self._finalize_lock = threading.Lock()
-
- if throttle_reopen:
- self._reopen_throttle = _Throttle(
- access_limit=5, time_window=datetime.timedelta(seconds=10)
- )
- else:
- self._reopen_throttle = None
-
- def _finalize(self, result):
- with self._finalize_lock:
- if self._finalized:
- return
-
- for callback in self._callbacks:
- callback(result)
-
- self._finalized = True
-
- def _on_call_done(self, future):
- # Unlike the base class, we only execute the callbacks on a terminal
- # error, not for errors that we can recover from. Note that grpc's
- # "future" here is also a grpc.RpcError.
- with self._operational_lock:
- if self._should_terminate(future):
- self._finalize(future)
- elif not self._should_recover(future):
- self._finalize(future)
- else:
- _LOGGER.debug("Re-opening stream from gRPC callback.")
- self._reopen()
-
- def _reopen(self):
- with self._operational_lock:
- # Another thread already managed to re-open this stream.
- if self.call is not None and self.call.is_active():
- _LOGGER.debug("Stream was already re-established.")
- return
-
- self.call = None
- # Request generator should exit cleanly since the RPC its bound to
- # has exited.
- self._request_generator = None
-
- # Note: we do not currently do any sort of backoff here. The
- # assumption is that re-establishing the stream under normal
- # circumstances will happen in intervals greater than 60s.
- # However, it is possible in a degenerative case that the server
- # closes the stream rapidly which would lead to thrashing here,
- # but hopefully in those cases the server would return a non-
- # retryable error.
-
- try:
- if self._reopen_throttle:
- with self._reopen_throttle:
- self.open()
- else:
- self.open()
- # If re-opening or re-calling the method fails for any reason,
- # consider it a terminal error and finalize the stream.
- except Exception as exc:
- _LOGGER.debug("Failed to re-open stream due to %s", exc)
- self._finalize(exc)
- raise
-
- _LOGGER.info("Re-established stream")
-
- def _recoverable(self, method, *args, **kwargs):
- """Wraps a method to recover the stream and retry on error.
-
- If a retryable error occurs while making the call, then the stream will
- be re-opened and the method will be retried. This happens indefinitely
- so long as the error is a retryable one. If an error occurs while
- re-opening the stream, then this method will raise immediately and
- trigger finalization of this object.
-
- Args:
- method (Callable[..., Any]): The method to call.
- args: The args to pass to the method.
- kwargs: The kwargs to pass to the method.
- """
- while True:
- try:
- return method(*args, **kwargs)
-
- except Exception as exc:
- with self._operational_lock:
- _LOGGER.debug("Call to retryable %r caused %s.", method, exc)
-
- if self._should_terminate(exc):
- self.close()
- _LOGGER.debug("Terminating %r due to %s.", method, exc)
- self._finalize(exc)
- break
-
- if not self._should_recover(exc):
- self.close()
- _LOGGER.debug("Not retrying %r due to %s.", method, exc)
- self._finalize(exc)
- raise exc
-
- _LOGGER.debug("Re-opening stream from retryable %r.", method)
- self._reopen()
-
- def _send(self, request):
- # Grab a reference to the RPC call. Because another thread (notably
- # the gRPC error thread) can modify self.call (by invoking reopen),
- # we should ensure our reference can not change underneath us.
- # If self.call is modified (such as replaced with a new RPC call) then
- # this will use the "old" RPC, which should result in the same
- # exception passed into gRPC's error handler being raised here, which
- # will be handled by the usual error handling in retryable.
- with self._operational_lock:
- call = self.call
-
- if call is None:
- raise ValueError("Cannot send on an RPC that has never been opened.")
-
- # Don't use self.is_active(), as ResumableBidiRpc will overload it
- # to mean something semantically different.
- if call.is_active():
- self._request_queue.put(request)
- pass
- else:
- # calling next should cause the call to raise.
- next(call)
-
- def send(self, request):
- return self._recoverable(self._send, request)
-
- def _recv(self):
- with self._operational_lock:
- call = self.call
-
- if call is None:
- raise ValueError("Cannot recv on an RPC that has never been opened.")
-
- return next(call)
-
- def recv(self):
- return self._recoverable(self._recv)
-
- def close(self):
- self._finalize(None)
- super(ResumableBidiRpc, self).close()
-
- @property
- def is_active(self):
- """bool: True if this stream is currently open and active."""
- # Use the operational lock. It's entirely possible for something
- # to check the active state *while* the RPC is being retried.
- # Also, use finalized to track the actual terminal state here.
- # This is because if the stream is re-established by the gRPC thread
- # it's technically possible to check this between when gRPC marks the
- # RPC as inactive and when gRPC executes our callback that re-opens
- # the stream.
- with self._operational_lock:
- return self.call is not None and not self._finalized
-
-
-class BackgroundConsumer(object):
- """A bi-directional stream consumer that runs in a separate thread.
-
- This maps the consumption of a stream into a callback-based model. It also
- provides :func:`pause` and :func:`resume` to allow for flow-control.
-
- Example::
-
- def should_recover(exc):
- return (
- isinstance(exc, grpc.RpcError) and
- exc.code() == grpc.StatusCode.UNAVAILABLE)
-
- initial_request = example_pb2.StreamingRpcRequest(
- setting='example')
-
- rpc = ResumeableBidiRpc(
- stub.StreamingRpc,
- initial_request=initial_request,
- should_recover=should_recover)
-
- def on_response(response):
- print(response)
-
- consumer = BackgroundConsumer(rpc, on_response)
- consumer.start()
-
- Note that error handling *must* be done by using the provided
- ``bidi_rpc``'s ``add_done_callback``. This helper will automatically exit
- whenever the RPC itself exits and will not provide any error details.
-
- Args:
- bidi_rpc (BidiRpc): The RPC to consume. Should not have been
- ``open()``ed yet.
- on_response (Callable[[protobuf.Message], None]): The callback to
- be called for every response on the stream.
- on_fatal_exception (Callable[[Exception], None]): The callback to
- be called on fatal errors during consumption. Default None.
- """
-
- def __init__(self, bidi_rpc, on_response, on_fatal_exception=None):
- self._bidi_rpc = bidi_rpc
- self._on_response = on_response
- self._paused = False
- self._on_fatal_exception = on_fatal_exception
- self._wake = threading.Condition()
- self._thread = None
- self._operational_lock = threading.Lock()
-
- def _on_call_done(self, future):
- # Resume the thread if it's paused, this prevents blocking forever
- # when the RPC has terminated.
- self.resume()
-
- def _thread_main(self, ready):
- try:
- ready.set()
- self._bidi_rpc.add_done_callback(self._on_call_done)
- self._bidi_rpc.open()
-
- while self._bidi_rpc.is_active:
- # Do not allow the paused status to change at all during this
- # section. There is a condition where we could be resumed
- # between checking if we are paused and calling wake.wait(),
- # which means that we will miss the notification to wake up
- # (oops!) and wait for a notification that will never come.
- # Keeping the lock throughout avoids that.
- # In the future, we could use `Condition.wait_for` if we drop
- # Python 2.7.
- # See: https://github.com/googleapis/python-api-core/issues/211
- with self._wake:
- while self._paused:
- _LOGGER.debug("paused, waiting for waking.")
- self._wake.wait()
- _LOGGER.debug("woken.")
-
- _LOGGER.debug("waiting for recv.")
- response = self._bidi_rpc.recv()
- _LOGGER.debug("recved response.")
- if self._on_response is not None:
- self._on_response(response)
-
- except exceptions.GoogleAPICallError as exc:
- _LOGGER.debug(
- "%s caught error %s and will exit. Generally this is due to "
- "the RPC itself being cancelled and the error will be "
- "surfaced to the calling code.",
- _BIDIRECTIONAL_CONSUMER_NAME,
- exc,
- exc_info=True,
- )
- if self._on_fatal_exception is not None:
- self._on_fatal_exception(exc)
-
- except Exception as exc:
- _LOGGER.exception(
- "%s caught unexpected exception %s and will exit.",
- _BIDIRECTIONAL_CONSUMER_NAME,
- exc,
- )
- if self._on_fatal_exception is not None:
- self._on_fatal_exception(exc)
-
- _LOGGER.info("%s exiting", _BIDIRECTIONAL_CONSUMER_NAME)
-
- def start(self):
- """Start the background thread and begin consuming the thread."""
- with self._operational_lock:
- ready = threading.Event()
- thread = threading.Thread(
- name=_BIDIRECTIONAL_CONSUMER_NAME,
- target=self._thread_main,
- args=(ready,),
- daemon=True,
- )
- thread.start()
- # Other parts of the code rely on `thread.is_alive` which
- # isn't sufficient to know if a thread is active, just that it may
- # soon be active. This can cause races. Further protect
- # against races by using a ready event and wait on it to be set.
- ready.wait()
- self._thread = thread
- _LOGGER.debug("Started helper thread %s", thread.name)
-
- def stop(self):
- """Stop consuming the stream and shutdown the background thread.
-
- NOTE: Cannot be called within `_thread_main`, since it is not
- possible to join a thread to itself.
- """
- with self._operational_lock:
- self._bidi_rpc.close()
-
- if self._thread is not None:
- # Resume the thread to wake it up in case it is sleeping.
- self.resume()
- # The daemonized thread may itself block, so don't wait
- # for it longer than a second.
- self._thread.join(1.0)
- if self._thread.is_alive(): # pragma: NO COVER
- _LOGGER.warning("Background thread did not exit.")
-
- self._thread = None
- self._on_response = None
- self._on_fatal_exception = None
-
- @property
- def is_active(self):
- """bool: True if the background thread is active."""
- return self._thread is not None and self._thread.is_alive()
-
- def pause(self):
- """Pauses the response stream.
-
- This does *not* pause the request stream.
- """
- with self._wake:
- self._paused = True
-
- def resume(self):
- """Resumes the response stream."""
- with self._wake:
- self._paused = False
- self._wake.notify_all()
-
- @property
- def is_paused(self):
- """bool: True if the response stream is paused."""
- return self._paused
diff --git a/google/api_core/bidi_async.py b/google/api_core/bidi_async.py
deleted file mode 100644
index 3770f69..0000000
--- a/google/api_core/bidi_async.py
+++ /dev/null
@@ -1,244 +0,0 @@
-# Copyright 2025, Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Asynchronous bi-directional streaming RPC helpers."""
-
-import asyncio
-import logging
-from typing import Callable, Optional, Union
-
-from grpc import aio
-
-from google.api_core import exceptions
-from google.api_core.bidi_base import BidiRpcBase
-
-from google.protobuf.message import Message as ProtobufMessage
-
-
-_LOGGER = logging.getLogger(__name__)
-
-
-class _AsyncRequestQueueGenerator:
- """_AsyncRequestQueueGenerator is a helper class for sending asynchronous
- requests to a gRPC stream from a Queue.
-
- This generator takes asynchronous requests off a given `asyncio.Queue` and
- yields them to gRPC.
-
- It's useful when you have an indeterminate, indefinite, or otherwise
- open-ended set of requests to send through a request-streaming (or
- bidirectional) RPC.
-
- Example::
-
- requests = _AsyncRequestQueueGenerator(q)
- call = await stub.StreamingRequest(requests)
- requests.call = call
-
- async for response in call:
- print(response)
- await q.put(...)
-
- Args:
- queue (asyncio.Queue): The request queue.
- initial_request (Union[ProtobufMessage,
- Callable[[], ProtobufMessage]]): The initial request to
- yield. This is done independently of the request queue to allow for
- easily restarting streams that require some initial configuration
- request.
- """
-
- def __init__(
- self,
- queue: asyncio.Queue,
- initial_request: Optional[
- Union[ProtobufMessage, Callable[[], ProtobufMessage]]
- ] = None,
- ) -> None:
- self._queue = queue
- self._initial_request = initial_request
- self.call: Optional[aio.Call] = None
-
- def _is_active(self) -> bool:
- """Returns true if the call is not set or not completed."""
- # Note: there is a possibility that this starts *before* the call
- # property is set. So we have to check if self.call is set before
- # seeing if it's active. We need to return True if self.call is None.
- # See https://github.com/googleapis/python-api-core/issues/560.
- return self.call is None or not self.call.done()
-
- async def __aiter__(self):
- # The reason this is necessary is because it lets the user have
- # control on when they would want to send requests proto messages
- # instead of sending all of them initially.
- #
- # This is achieved via asynchronous queue (asyncio.Queue),
- # gRPC awaits until there's a message in the queue.
- #
- # Finally, it allows for retrying without swapping queues because if
- # it does pull an item off the queue when the RPC is inactive, it'll
- # immediately put it back and then exit. This is necessary because
- # yielding the item in this case will cause gRPC to discard it. In
- # practice, this means that the order of messages is not guaranteed.
- # If preserving order is necessary it would be easy to use a priority
- # queue.
- if self._initial_request is not None:
- if callable(self._initial_request):
- yield self._initial_request()
- else:
- yield self._initial_request
-
- while True:
- item = await self._queue.get()
-
- # The consumer explicitly sent "None", indicating that the request
- # should end.
- if item is None:
- _LOGGER.debug("Cleanly exiting request generator.")
- return
-
- if not self._is_active():
- # We have an item, but the call is closed. We should put the
- # item back on the queue so that the next call can consume it.
- await self._queue.put(item)
- _LOGGER.debug(
- "Inactive call, replacing item on queue and exiting "
- "request generator."
- )
- return
-
- yield item
-
-
-class AsyncBidiRpc(BidiRpcBase):
- """A helper for consuming a async bi-directional streaming RPC.
-
- This maps gRPC's built-in interface which uses a request iterator and a
- response iterator into a socket-like :func:`send` and :func:`recv`. This
- is a more useful pattern for long-running or asymmetric streams (streams
- where there is not a direct correlation between the requests and
- responses).
-
- Example::
-
- initial_request = example_pb2.StreamingRpcRequest(
- setting='example')
- rpc = AsyncBidiRpc(
- stub.StreamingRpc,
- initial_request=initial_request,
- metadata=[('name', 'value')]
- )
-
- await rpc.open()
-
- while rpc.is_active:
- print(await rpc.recv())
- await rpc.send(example_pb2.StreamingRpcRequest(
- data='example'))
-
- await rpc.close()
-
- This does *not* retry the stream on errors.
-
- Args:
- start_rpc (grpc.aio.StreamStreamMultiCallable): The gRPC method used to
- start the RPC.
- initial_request (Union[ProtobufMessage,
- Callable[[], ProtobufMessage]]): The initial request to
- yield. This is useful if an initial request is needed to start the
- stream.
- metadata (Sequence[Tuple(str, str)]): RPC metadata to include in
- the request.
- """
-
- def _create_queue(self) -> asyncio.Queue:
- """Create a queue for requests."""
- return asyncio.Queue()
-
- async def open(self) -> None:
- """Opens the stream."""
- if self.is_active:
- raise ValueError("Cannot open an already open stream.")
-
- request_generator = _AsyncRequestQueueGenerator(
- self._request_queue, initial_request=self._initial_request
- )
- try:
- call = await self._start_rpc(request_generator, metadata=self._rpc_metadata)
- except exceptions.GoogleAPICallError as exc:
- # The original `grpc.aio.AioRpcError` (which is usually also a
- # `grpc.aio.Call`) is available from the ``response`` property on
- # the mapped exception.
- self._on_call_done(exc.response)
- raise
-
- request_generator.call = call
-
- # TODO: api_core should expose the future interface for wrapped
- # callables as well.
- if hasattr(call, "_wrapped"): # pragma: NO COVER
- call._wrapped.add_done_callback(self._on_call_done)
- else:
- call.add_done_callback(self._on_call_done)
-
- self._request_generator = request_generator
- self.call = call
-
- async def close(self) -> None:
- """Closes the stream."""
- if self.call is not None:
- self.call.cancel()
-
- # Put None in request queue to signal termination.
- await self._request_queue.put(None)
- self._request_generator = None
- self._initial_request = None
- self._callbacks = []
- # Don't set self.call to None. Keep it around so that send/recv can
- # raise the error.
-
- async def send(self, request: ProtobufMessage) -> None:
- """Queue a message to be sent on the stream.
-
- If the underlying RPC has been closed, this will raise.
-
- Args:
- request (ProtobufMessage): The request to send.
- """
- if self.call is None:
- raise ValueError("Cannot send on an RPC stream that has never been opened.")
-
- if not self.call.done():
- await self._request_queue.put(request)
- else:
- # calling read should cause the call to raise.
- await self.call.read()
-
- async def recv(self) -> ProtobufMessage:
- """Wait for a message to be returned from the stream.
-
- If the underlying RPC has been closed, this will raise.
-
- Returns:
- ProtobufMessage: The received message.
- """
- if self.call is None:
- raise ValueError("Cannot recv on an RPC stream that has never been opened.")
-
- return await self.call.read()
-
- @property
- def is_active(self) -> bool:
- """Whether the stream is currently open and active."""
- return self.call is not None and not self.call.done()
diff --git a/google/api_core/bidi_base.py b/google/api_core/bidi_base.py
deleted file mode 100644
index 9288fda..0000000
--- a/google/api_core/bidi_base.py
+++ /dev/null
@@ -1,88 +0,0 @@
-# Copyright 2025, Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# You may obtain a copy of the License at
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Base class for bi-directional streaming RPC helpers."""
-
-
-class BidiRpcBase:
- """A base class for consuming a bi-directional streaming RPC.
-
- This maps gRPC's built-in interface which uses a request iterator and a
- response iterator into a socket-like :func:`send` and :func:`recv`. This
- is a more useful pattern for long-running or asymmetric streams (streams
- where there is not a direct correlation between the requests and
- responses).
-
- This does *not* retry the stream on errors.
-
- Args:
- start_rpc (Union[grpc.StreamStreamMultiCallable,
- grpc.aio.StreamStreamMultiCallable]): The gRPC method used
- to start the RPC.
- initial_request (Union[protobuf.Message,
- Callable[[], protobuf.Message]]): The initial request to
- yield. This is useful if an initial request is needed to start the
- stream.
- metadata (Sequence[Tuple(str, str)]): RPC metadata to include in
- the request.
- """
-
- def __init__(self, start_rpc, initial_request=None, metadata=None):
- self._start_rpc = start_rpc
- self._initial_request = initial_request
- self._rpc_metadata = metadata
- self._request_queue = self._create_queue()
- self._request_generator = None
- self._callbacks = []
- self.call = None
-
- def _create_queue(self):
- """Create a queue for requests."""
- raise NotImplementedError("`_create_queue` is not implemented.")
-
- def add_done_callback(self, callback):
- """Adds a callback that will be called when the RPC terminates.
-
- This occurs when the RPC errors or is successfully terminated.
-
- Args:
- callback (Union[Callable[[grpc.Future], None], Callable[[Any], None]]):
- The callback to execute after gRPC call completed (success or
- failure).
-
- For sync streaming gRPC: Callable[[grpc.Future], None]
-
- For async streaming gRPC: Callable[[Any], None]
- """
- self._callbacks.append(callback)
-
- def _on_call_done(self, future):
- # This occurs when the RPC errors or is successfully terminated.
- # Note that grpc's "future" here can also be a grpc.RpcError.
- # See note in https://github.com/grpc/grpc/issues/10885#issuecomment-302651331
- # that `grpc.RpcError` is also `grpc.Call`.
- # for asynchronous gRPC call it would be `grpc.aio.AioRpcError`
-
- # Note: sync callbacks can be limiting for async code, because you can't
- # await anything in a sync callback.
- for callback in self._callbacks:
- callback(future)
-
- @property
- def is_active(self):
- """True if the gRPC call is not done yet."""
- raise NotImplementedError("`is_active` is not implemented.")
-
- @property
- def pending_requests(self):
- """Estimate of the number of queued requests."""
- return self._request_queue.qsize()
diff --git a/google/api_core/client_info.py b/google/api_core/client_info.py
deleted file mode 100644
index f0678d2..0000000
--- a/google/api_core/client_info.py
+++ /dev/null
@@ -1,114 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Helpers for providing client information.
-
-Client information is used to send information about the calling client,
-such as the library and Python version, to API services.
-"""
-
-import platform
-from typing import Union
-
-from google.api_core import version as api_core_version
-
-_PY_VERSION = platform.python_version()
-_API_CORE_VERSION = api_core_version.__version__
-
-_GRPC_VERSION: Union[str, None]
-
-try:
- import grpc
-
- _GRPC_VERSION = grpc.__version__
-except ImportError: # pragma: NO COVER
- _GRPC_VERSION = None
-
-
-class ClientInfo(object):
- """Client information used to generate a user-agent for API calls.
-
- This user-agent information is sent along with API calls to allow the
- receiving service to do analytics on which versions of Python and Google
- libraries are being used.
-
- Args:
- python_version (str): The Python interpreter version, for example,
- ``'3.9.6'``.
- grpc_version (Optional[str]): The gRPC library version.
- api_core_version (str): The google-api-core library version.
- gapic_version (Optional[str]): The version of gapic-generated client
- library, if the library was generated by gapic.
- client_library_version (Optional[str]): The version of the client
- library, generally used if the client library was not generated
- by gapic or if additional functionality was built on top of
- a gapic client library.
- user_agent (Optional[str]): Prefix to the user agent header. This is
- used to supply information such as application name or partner tool.
- Recommended format: ``application-or-tool-ID/major.minor.version``.
- rest_version (Optional[str]): A string with labeled versions of the
- dependencies used for REST transport.
- protobuf_runtime_version (Optional[str]): The protobuf runtime version.
- """
-
- def __init__(
- self,
- python_version=_PY_VERSION,
- grpc_version=_GRPC_VERSION,
- api_core_version=_API_CORE_VERSION,
- gapic_version=None,
- client_library_version=None,
- user_agent=None,
- rest_version=None,
- protobuf_runtime_version=None,
- ):
- self.python_version = python_version
- self.grpc_version = grpc_version
- self.api_core_version = api_core_version
- self.gapic_version = gapic_version
- self.client_library_version = client_library_version
- self.user_agent = user_agent
- self.rest_version = rest_version
- self.protobuf_runtime_version = protobuf_runtime_version
-
- def to_user_agent(self):
- """Returns the user-agent string for this client info."""
-
- # Note: the order here is important as the internal metrics system
- # expects these items to be in specific locations.
- ua = ""
-
- if self.user_agent is not None:
- ua += "{user_agent} "
-
- ua += "gl-python/{python_version} "
-
- if self.grpc_version is not None:
- ua += "grpc/{grpc_version} "
-
- if self.rest_version is not None:
- ua += "rest/{rest_version} "
-
- ua += "gax/{api_core_version} "
-
- if self.gapic_version is not None:
- ua += "gapic/{gapic_version} "
-
- if self.client_library_version is not None:
- ua += "gccl/{client_library_version} "
-
- if self.protobuf_runtime_version is not None:
- ua += "pb/{protobuf_runtime_version} "
-
- return ua.format(**self.__dict__).strip()
diff --git a/google/api_core/client_logging.py b/google/api_core/client_logging.py
deleted file mode 100644
index 837e3e0..0000000
--- a/google/api_core/client_logging.py
+++ /dev/null
@@ -1,144 +0,0 @@
-import logging
-import json
-import os
-
-from typing import List, Optional
-
-_LOGGING_INITIALIZED = False
-_BASE_LOGGER_NAME = "google"
-
-# Fields to be included in the StructuredLogFormatter.
-#
-# TODO(https://github.com/googleapis/python-api-core/issues/761): Update this list to support additional logging fields.
-_recognized_logging_fields = [
- "httpRequest",
- "rpcName",
- "serviceName",
- "credentialsType",
- "credentialsInfo",
- "universeDomain",
- "request",
- "response",
- "metadata",
- "retryAttempt",
- "httpResponse",
-] # Additional fields to be Logged.
-
-
-def logger_configured(logger) -> bool:
- """Determines whether `logger` has non-default configuration
-
- Args:
- logger: The logger to check.
-
- Returns:
- bool: Whether the logger has any non-default configuration.
- """
- return (
- logger.handlers != [] or logger.level != logging.NOTSET or not logger.propagate
- )
-
-
-def initialize_logging():
- """Initializes "google" loggers, partly based on the environment variable
-
- Initializes the "google" logger and any loggers (at the "google"
- level or lower) specified by the environment variable
- GOOGLE_SDK_PYTHON_LOGGING_SCOPE, as long as none of these loggers
- were previously configured. If any such loggers (including the
- "google" logger) are initialized, they are set to NOT propagate
- log events up to their parent loggers.
-
- This initialization is executed only once, and hence the
- environment variable is only processed the first time this
- function is called.
- """
- global _LOGGING_INITIALIZED
- if _LOGGING_INITIALIZED:
- return
- scopes = os.getenv("GOOGLE_SDK_PYTHON_LOGGING_SCOPE", "")
- setup_logging(scopes)
- _LOGGING_INITIALIZED = True
-
-
-def parse_logging_scopes(scopes: Optional[str] = None) -> List[str]:
- """Returns a list of logger names.
-
- Splits the single string of comma-separated logger names into a list of individual logger name strings.
-
- Args:
- scopes: The name of a single logger. (In the future, this will be a comma-separated list of multiple loggers.)
-
- Returns:
- A list of all the logger names in scopes.
- """
- if not scopes:
- return []
- # TODO(https://github.com/googleapis/python-api-core/issues/759): check if the namespace is a valid namespace.
- # TODO(b/380481951): Support logging multiple scopes.
- # TODO(b/380483756): Raise or log a warning for an invalid scope.
- namespaces = [scopes]
- return namespaces
-
-
-def configure_defaults(logger):
- """Configures `logger` to emit structured info to stdout."""
- if not logger_configured(logger):
- console_handler = logging.StreamHandler()
- logger.setLevel("DEBUG")
- logger.propagate = False
- formatter = StructuredLogFormatter()
- console_handler.setFormatter(formatter)
- logger.addHandler(console_handler)
-
-
-def setup_logging(scopes: str = ""):
- """Sets up logging for the specified `scopes`.
-
- If the loggers specified in `scopes` have not been previously
- configured, this will configure them to emit structured log
- entries to stdout, and to not propagate their log events to their
- parent loggers. Additionally, if the "google" logger (whether it
- was specified in `scopes` or not) was not previously configured,
- it will also configure it to not propagate log events to the root
- logger.
-
- Args:
- scopes: The name of a single logger. (In the future, this will be a comma-separated list of multiple loggers.)
-
- """
-
- # only returns valid logger scopes (namespaces)
- # this list has at most one element.
- logger_names = parse_logging_scopes(scopes)
-
- for namespace in logger_names:
- # This will either create a module level logger or get the reference of the base logger instantiated above.
- logger = logging.getLogger(namespace)
-
- # Configure default settings.
- configure_defaults(logger)
-
- # disable log propagation at base logger level to the root logger only if a base logger is not already configured via code changes.
- base_logger = logging.getLogger(_BASE_LOGGER_NAME)
- if not logger_configured(base_logger):
- base_logger.propagate = False
-
-
-# TODO(https://github.com/googleapis/python-api-core/issues/763): Expand documentation.
-class StructuredLogFormatter(logging.Formatter):
- # TODO(https://github.com/googleapis/python-api-core/issues/761): ensure that additional fields such as
- # function name, file name, and line no. appear in a log output.
- def format(self, record: logging.LogRecord):
- log_obj = {
- "timestamp": self.formatTime(record),
- "severity": record.levelname,
- "name": record.name,
- "message": record.getMessage(),
- }
-
- for field_name in _recognized_logging_fields:
- value = getattr(record, field_name, None)
- if value is not None:
- log_obj[field_name] = value
- return json.dumps(log_obj)
diff --git a/google/api_core/client_options.py b/google/api_core/client_options.py
deleted file mode 100644
index 30bff48..0000000
--- a/google/api_core/client_options.py
+++ /dev/null
@@ -1,160 +0,0 @@
-# Copyright 2019 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Client options class.
-
-Client options provide a consistent interface for user options to be defined
-across clients.
-
-You can pass a client options object to a client.
-
-.. code-block:: python
-
- from google.api_core.client_options import ClientOptions
- from google.cloud.vision_v1 import ImageAnnotatorClient
-
- def get_client_cert():
- # code to load client certificate and private key.
- return client_cert_bytes, client_private_key_bytes
-
- options = ClientOptions(api_endpoint="foo.googleapis.com",
- client_cert_source=get_client_cert)
-
- client = ImageAnnotatorClient(client_options=options)
-
-You can also pass a mapping object.
-
-.. code-block:: python
-
- from google.cloud.vision_v1 import ImageAnnotatorClient
-
- client = ImageAnnotatorClient(
- client_options={
- "api_endpoint": "foo.googleapis.com",
- "client_cert_source" : get_client_cert
- })
-
-
-"""
-
-from typing import Callable, Mapping, Optional, Sequence, Tuple
-import warnings
-
-from google.api_core import general_helpers
-
-
-class ClientOptions(object):
- """Client Options used to set options on clients.
-
- Args:
- api_endpoint (Optional[str]): The desired API endpoint, e.g.,
- compute.googleapis.com
- client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback
- which returns client certificate bytes and private key bytes both in
- PEM format. ``client_cert_source`` and ``client_encrypted_cert_source``
- are mutually exclusive.
- client_encrypted_cert_source (Optional[Callable[[], Tuple[str, str, bytes]]]):
- A callback which returns client certificate file path, encrypted
- private key file path, and the passphrase bytes.``client_cert_source``
- and ``client_encrypted_cert_source`` are mutually exclusive.
- quota_project_id (Optional[str]): A project name that a client's
- quota belongs to.
- credentials_file (Optional[str]): Deprecated. A path to a file storing credentials.
- ``credentials_file` and ``api_key`` are mutually exclusive. This argument will be
- removed in the next major version of `google-api-core`.
-
- .. warning::
- Important: If you accept a credential configuration (credential JSON/File/Stream)
- from an external source for authentication to Google Cloud Platform, you must
- validate it before providing it to any Google API or client library. Providing an
- unvalidated credential configuration to Google APIs or libraries can compromise
- the security of your systems and data. For more information, refer to
- `Validate credential configurations from external sources`_.
-
- .. _Validate credential configurations from external sources:
-
- https://cloud.google.com/docs/authentication/external/externally-sourced-credentials
- scopes (Optional[Sequence[str]]): OAuth access token override scopes.
- api_key (Optional[str]): Google API key. ``credentials_file`` and
- ``api_key`` are mutually exclusive.
- api_audience (Optional[str]): The intended audience for the API calls
- to the service that will be set when using certain 3rd party
- authentication flows. Audience is typically a resource identifier.
- If not set, the service endpoint value will be used as a default.
- An example of a valid ``api_audience`` is: "https://language.googleapis.com".
- universe_domain (Optional[str]): The desired universe domain. This must match
- the one in credentials. If not set, the default universe domain is
- `googleapis.com`. If both `api_endpoint` and `universe_domain` are set,
- then `api_endpoint` is used as the service endpoint. If `api_endpoint` is
- not specified, the format will be `{service}.{universe_domain}`.
-
- Raises:
- ValueError: If both ``client_cert_source`` and ``client_encrypted_cert_source``
- are provided, or both ``credentials_file`` and ``api_key`` are provided.
- """
-
- def __init__(
- self,
- api_endpoint: Optional[str] = None,
- client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
- client_encrypted_cert_source: Optional[
- Callable[[], Tuple[str, str, bytes]]
- ] = None,
- quota_project_id: Optional[str] = None,
- credentials_file: Optional[str] = None,
- scopes: Optional[Sequence[str]] = None,
- api_key: Optional[str] = None,
- api_audience: Optional[str] = None,
- universe_domain: Optional[str] = None,
- ):
- if credentials_file is not None:
- warnings.warn(general_helpers._CREDENTIALS_FILE_WARNING, DeprecationWarning)
-
- if client_cert_source and client_encrypted_cert_source:
- raise ValueError(
- "client_cert_source and client_encrypted_cert_source are mutually exclusive"
- )
- if api_key and credentials_file:
- raise ValueError("api_key and credentials_file are mutually exclusive")
- self.api_endpoint = api_endpoint
- self.client_cert_source = client_cert_source
- self.client_encrypted_cert_source = client_encrypted_cert_source
- self.quota_project_id = quota_project_id
- self.credentials_file = credentials_file
- self.scopes = scopes
- self.api_key = api_key
- self.api_audience = api_audience
- self.universe_domain = universe_domain
-
- def __repr__(self) -> str:
- return "ClientOptions: " + repr(self.__dict__)
-
-
-def from_dict(options: Mapping[str, object]) -> ClientOptions:
- """Construct a client options object from a mapping object.
-
- Args:
- options (collections.abc.Mapping): A mapping object with client options.
- See the docstring for ClientOptions for details on valid arguments.
- """
-
- client_options = ClientOptions()
-
- for key, value in options.items():
- if hasattr(client_options, key):
- setattr(client_options, key, value)
- else:
- raise ValueError("ClientOptions does not accept an option '" + key + "'")
-
- return client_options
diff --git a/google/api_core/datetime_helpers.py b/google/api_core/datetime_helpers.py
deleted file mode 100644
index c379230..0000000
--- a/google/api_core/datetime_helpers.py
+++ /dev/null
@@ -1,298 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Helpers for :mod:`datetime`."""
-
-import calendar
-import datetime
-import re
-
-from google.protobuf import timestamp_pb2
-
-
-_UTC_EPOCH = datetime.datetime(1970, 1, 1, tzinfo=datetime.timezone.utc)
-_RFC3339_MICROS = "%Y-%m-%dT%H:%M:%S.%fZ"
-_RFC3339_NO_FRACTION = "%Y-%m-%dT%H:%M:%S"
-# datetime.strptime cannot handle nanosecond precision: parse w/ regex
-_RFC3339_NANOS = re.compile(
- r"""
- (?P<no_fraction>
- \d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2} # YYYY-MM-DDTHH:MM:SS
- )
- ( # Optional decimal part
- \. # decimal point
- (?P<nanos>\d{1,9}) # nanoseconds, maybe truncated
- )?
- Z # Zulu
-""",
- re.VERBOSE,
-)
-
-
-def utcnow():
- """A :meth:`datetime.datetime.utcnow()` alias to allow mocking in tests."""
- return datetime.datetime.now(tz=datetime.timezone.utc).replace(tzinfo=None)
-
-
-def to_milliseconds(value):
- """Convert a zone-aware datetime to milliseconds since the unix epoch.
-
- Args:
- value (datetime.datetime): The datetime to covert.
-
- Returns:
- int: Milliseconds since the unix epoch.
- """
- micros = to_microseconds(value)
- return micros // 1000
-
-
-def from_microseconds(value):
- """Convert timestamp in microseconds since the unix epoch to datetime.
-
- Args:
- value (float): The timestamp to convert, in microseconds.
-
- Returns:
- datetime.datetime: The datetime object equivalent to the timestamp in
- UTC.
- """
- return _UTC_EPOCH + datetime.timedelta(microseconds=value)
-
-
-def to_microseconds(value):
- """Convert a datetime to microseconds since the unix epoch.
-
- Args:
- value (datetime.datetime): The datetime to covert.
-
- Returns:
- int: Microseconds since the unix epoch.
- """
- if not value.tzinfo:
- value = value.replace(tzinfo=datetime.timezone.utc)
- # Regardless of what timezone is on the value, convert it to UTC.
- value = value.astimezone(datetime.timezone.utc)
- # Convert the datetime to a microsecond timestamp.
- return int(calendar.timegm(value.timetuple()) * 1e6) + value.microsecond
-
-
-def from_iso8601_date(value):
- """Convert a ISO8601 date string to a date.
-
- Args:
- value (str): The ISO8601 date string.
-
- Returns:
- datetime.date: A date equivalent to the date string.
- """
- return datetime.datetime.strptime(value, "%Y-%m-%d").date()
-
-
-def from_iso8601_time(value):
- """Convert a zoneless ISO8601 time string to a time.
-
- Args:
- value (str): The ISO8601 time string.
-
- Returns:
- datetime.time: A time equivalent to the time string.
- """
- return datetime.datetime.strptime(value, "%H:%M:%S").time()
-
-
-def from_rfc3339(value):
- """Convert an RFC3339-format timestamp to a native datetime.
-
- Supported formats include those without fractional seconds, or with
- any fraction up to nanosecond precision.
-
- .. note::
- Python datetimes do not support nanosecond precision; this function
- therefore truncates such values to microseconds.
-
- Args:
- value (str): The RFC3339 string to convert.
-
- Returns:
- datetime.datetime: The datetime object equivalent to the timestamp
- in UTC.
-
- Raises:
- ValueError: If the timestamp does not match the RFC3339
- regular expression.
- """
- with_nanos = _RFC3339_NANOS.match(value)
-
- if with_nanos is None:
- raise ValueError(
- "Timestamp: {!r}, does not match pattern: {!r}".format(
- value, _RFC3339_NANOS.pattern
- )
- )
-
- bare_seconds = datetime.datetime.strptime(
- with_nanos.group("no_fraction"), _RFC3339_NO_FRACTION
- )
- fraction = with_nanos.group("nanos")
-
- if fraction is None:
- micros = 0
- else:
- scale = 9 - len(fraction)
- nanos = int(fraction) * (10**scale)
- micros = nanos // 1000
-
- return bare_seconds.replace(microsecond=micros, tzinfo=datetime.timezone.utc)
-
-
-from_rfc3339_nanos = from_rfc3339 # from_rfc3339_nanos method was deprecated.
-
-
-def to_rfc3339(value, ignore_zone=True):
- """Convert a datetime to an RFC3339 timestamp string.
-
- Args:
- value (datetime.datetime):
- The datetime object to be converted to a string.
- ignore_zone (bool): If True, then the timezone (if any) of the
- datetime object is ignored and the datetime is treated as UTC.
-
- Returns:
- str: The RFC3339 formatted string representing the datetime.
- """
- if not ignore_zone and value.tzinfo is not None:
- # Convert to UTC and remove the time zone info.
- value = value.replace(tzinfo=None) - value.utcoffset()
-
- return value.strftime(_RFC3339_MICROS)
-
-
-class DatetimeWithNanoseconds(datetime.datetime):
- """Track nanosecond in addition to normal datetime attrs.
-
- Nanosecond can be passed only as a keyword argument.
- """
-
- __slots__ = ("_nanosecond",)
-
- # pylint: disable=arguments-differ
- def __new__(cls, *args, **kw):
- nanos = kw.pop("nanosecond", 0)
- if nanos > 0:
- if "microsecond" in kw:
- raise TypeError("Specify only one of 'microsecond' or 'nanosecond'")
- kw["microsecond"] = nanos // 1000
- inst = datetime.datetime.__new__(cls, *args, **kw)
- inst._nanosecond = nanos or 0
- return inst
-
- # pylint: disable=arguments-differ
-
- @property
- def nanosecond(self):
- """Read-only: nanosecond precision."""
- return self._nanosecond
-
- def rfc3339(self):
- """Return an RFC3339-compliant timestamp.
-
- Returns:
- (str): Timestamp string according to RFC3339 spec.
- """
- if self._nanosecond == 0:
- return to_rfc3339(self)
- nanos = str(self._nanosecond).rjust(9, "0").rstrip("0")
- return "{}.{}Z".format(self.strftime(_RFC3339_NO_FRACTION), nanos)
-
- @classmethod
- def from_rfc3339(cls, stamp):
- """Parse RFC3339-compliant timestamp, preserving nanoseconds.
-
- Args:
- stamp (str): RFC3339 stamp, with up to nanosecond precision
-
- Returns:
- :class:`DatetimeWithNanoseconds`:
- an instance matching the timestamp string
-
- Raises:
- ValueError: if `stamp` does not match the expected format
- """
- with_nanos = _RFC3339_NANOS.match(stamp)
- if with_nanos is None:
- raise ValueError(
- "Timestamp: {}, does not match pattern: {}".format(
- stamp, _RFC3339_NANOS.pattern
- )
- )
- bare = datetime.datetime.strptime(
- with_nanos.group("no_fraction"), _RFC3339_NO_FRACTION
- )
- fraction = with_nanos.group("nanos")
- if fraction is None:
- nanos = 0
- else:
- scale = 9 - len(fraction)
- nanos = int(fraction) * (10**scale)
- return cls(
- bare.year,
- bare.month,
- bare.day,
- bare.hour,
- bare.minute,
- bare.second,
- nanosecond=nanos,
- tzinfo=datetime.timezone.utc,
- )
-
- def timestamp_pb(self):
- """Return a timestamp message.
-
- Returns:
- (:class:`~google.protobuf.timestamp_pb2.Timestamp`): Timestamp message
- """
- inst = (
- self
- if self.tzinfo is not None
- else self.replace(tzinfo=datetime.timezone.utc)
- )
- delta = inst - _UTC_EPOCH
- seconds = int(delta.total_seconds())
- nanos = self._nanosecond or self.microsecond * 1000
- return timestamp_pb2.Timestamp(seconds=seconds, nanos=nanos)
-
- @classmethod
- def from_timestamp_pb(cls, stamp):
- """Parse RFC3339-compliant timestamp, preserving nanoseconds.
-
- Args:
- stamp (:class:`~google.protobuf.timestamp_pb2.Timestamp`): timestamp message
-
- Returns:
- :class:`DatetimeWithNanoseconds`:
- an instance matching the timestamp message
- """
- microseconds = int(stamp.seconds * 1e6)
- bare = from_microseconds(microseconds)
- return cls(
- bare.year,
- bare.month,
- bare.day,
- bare.hour,
- bare.minute,
- bare.second,
- nanosecond=stamp.nanos,
- tzinfo=datetime.timezone.utc,
- )
diff --git a/google/api_core/exceptions.py b/google/api_core/exceptions.py
deleted file mode 100644
index e3eb696..0000000
--- a/google/api_core/exceptions.py
+++ /dev/null
@@ -1,670 +0,0 @@
-# Copyright 2014 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Exceptions raised by Google API core & clients.
-
-This module provides base classes for all errors raised by libraries based
-on :mod:`google.api_core`, including both HTTP and gRPC clients.
-"""
-
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
-import http.client
-from typing import Optional, Dict
-from typing import Union
-import warnings
-
-from google.rpc import error_details_pb2
-
-
-def _warn_could_not_import_grpcio_status():
- warnings.warn(
- "Please install grpcio-status to obtain helpful grpc error messages.",
- ImportWarning,
- ) # pragma: NO COVER
-
-
-try:
- import grpc
-
- try:
- from grpc_status import rpc_status
- except ImportError: # pragma: NO COVER
- _warn_could_not_import_grpcio_status()
- rpc_status = None
-except ImportError: # pragma: NO COVER
- grpc = None
-
-# Lookup tables for mapping exceptions from HTTP and gRPC transports.
-# Populated by _GoogleAPICallErrorMeta
-_HTTP_CODE_TO_EXCEPTION: Dict[int, Exception] = {}
-_GRPC_CODE_TO_EXCEPTION: Dict[int, Exception] = {}
-
-# Additional lookup table to map integer status codes to grpc status code
-# grpc does not currently support initializing enums from ints
-# i.e., grpc.StatusCode(5) raises an error
-_INT_TO_GRPC_CODE = {}
-if grpc is not None: # pragma: no branch
- for x in grpc.StatusCode:
- _INT_TO_GRPC_CODE[x.value[0]] = x
-
-
-class GoogleAPIError(Exception):
- """Base class for all exceptions raised by Google API Clients."""
-
- pass
-
-
-class DuplicateCredentialArgs(GoogleAPIError):
- """Raised when multiple credentials are passed."""
-
- pass
-
-
-class RetryError(GoogleAPIError):
- """Raised when a function has exhausted all of its available retries.
-
- Args:
- message (str): The exception message.
- cause (Exception): The last exception raised when retrying the
- function.
- """
-
- def __init__(self, message, cause):
- super(RetryError, self).__init__(message)
- self.message = message
- self._cause = cause
-
- @property
- def cause(self):
- """The last exception raised when retrying the function."""
- return self._cause
-
- def __str__(self):
- return "{}, last exception: {}".format(self.message, self.cause)
-
-
-class _GoogleAPICallErrorMeta(type):
- """Metaclass for registering GoogleAPICallError subclasses."""
-
- def __new__(mcs, name, bases, class_dict):
- cls = type.__new__(mcs, name, bases, class_dict)
- if cls.code is not None:
- _HTTP_CODE_TO_EXCEPTION.setdefault(cls.code, cls)
- if cls.grpc_status_code is not None:
- _GRPC_CODE_TO_EXCEPTION.setdefault(cls.grpc_status_code, cls)
- return cls
-
-
-class GoogleAPICallError(GoogleAPIError, metaclass=_GoogleAPICallErrorMeta):
- """Base class for exceptions raised by calling API methods.
-
- Args:
- message (str): The exception message.
- errors (Sequence[Any]): An optional list of error details.
- details (Sequence[Any]): An optional list of objects defined in google.rpc.error_details.
- response (Union[requests.Request, grpc.Call]): The response or
- gRPC call metadata.
- error_info (Union[error_details_pb2.ErrorInfo, None]): An optional object containing error info
- (google.rpc.error_details.ErrorInfo).
- """
-
- code: Union[int, None] = None
- """Optional[int]: The HTTP status code associated with this error.
-
- This may be ``None`` if the exception does not have a direct mapping
- to an HTTP error.
-
- See http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html
- """
-
- grpc_status_code = None
- """Optional[grpc.StatusCode]: The gRPC status code associated with this
- error.
-
- This may be ``None`` if the exception does not match up to a gRPC error.
- """
-
- def __init__(self, message, errors=(), details=(), response=None, error_info=None):
- super(GoogleAPICallError, self).__init__(message)
- self.message = message
- """str: The exception message."""
- self._errors = errors
- self._details = details
- self._response = response
- self._error_info = error_info
-
- def __str__(self):
- error_msg = "{} {}".format(self.code, self.message)
- if self.details:
- error_msg = "{} {}".format(error_msg, self.details)
- # Note: This else condition can be removed once proposal A from
- # b/284179390 is implemented.
- else:
- if self.errors:
- errors = [
- f"{error.code}: {error.message}"
- for error in self.errors
- if hasattr(error, "code") and hasattr(error, "message")
- ]
- if errors:
- error_msg = "{} {}".format(error_msg, "\n".join(errors))
- return error_msg
-
- @property
- def reason(self):
- """The reason of the error.
-
- Reference:
- https://github.com/googleapis/googleapis/blob/master/google/rpc/error_details.proto#L112
-
- Returns:
- Union[str, None]: An optional string containing reason of the error.
- """
- return self._error_info.reason if self._error_info else None
-
- @property
- def domain(self):
- """The logical grouping to which the "reason" belongs.
-
- Reference:
- https://github.com/googleapis/googleapis/blob/master/google/rpc/error_details.proto#L112
-
- Returns:
- Union[str, None]: An optional string containing a logical grouping to which the "reason" belongs.
- """
- return self._error_info.domain if self._error_info else None
-
- @property
- def metadata(self):
- """Additional structured details about this error.
-
- Reference:
- https://github.com/googleapis/googleapis/blob/master/google/rpc/error_details.proto#L112
-
- Returns:
- Union[Dict[str, str], None]: An optional object containing structured details about the error.
- """
- return self._error_info.metadata if self._error_info else None
-
- @property
- def errors(self):
- """Detailed error information.
-
- Returns:
- Sequence[Any]: A list of additional error details.
- """
- return list(self._errors)
-
- @property
- def details(self):
- """Information contained in google.rpc.status.details.
-
- Reference:
- https://github.com/googleapis/googleapis/blob/master/google/rpc/status.proto
- https://github.com/googleapis/googleapis/blob/master/google/rpc/error_details.proto
-
- Returns:
- Sequence[Any]: A list of structured objects from error_details.proto
- """
- return list(self._details)
-
- @property
- def response(self):
- """Optional[Union[requests.Request, grpc.Call]]: The response or
- gRPC call metadata."""
- return self._response
-
-
-class Redirection(GoogleAPICallError):
- """Base class for for all redirection (HTTP 3xx) responses."""
-
-
-class MovedPermanently(Redirection):
- """Exception mapping a ``301 Moved Permanently`` response."""
-
- code = http.client.MOVED_PERMANENTLY
-
-
-class NotModified(Redirection):
- """Exception mapping a ``304 Not Modified`` response."""
-
- code = http.client.NOT_MODIFIED
-
-
-class TemporaryRedirect(Redirection):
- """Exception mapping a ``307 Temporary Redirect`` response."""
-
- code = http.client.TEMPORARY_REDIRECT
-
-
-class ResumeIncomplete(Redirection):
- """Exception mapping a ``308 Resume Incomplete`` response.
-
- .. note:: :attr:`http.client.PERMANENT_REDIRECT` is ``308``, but Google
- APIs differ in their use of this status code.
- """
-
- code = 308
-
-
-class ClientError(GoogleAPICallError):
- """Base class for all client error (HTTP 4xx) responses."""
-
-
-class BadRequest(ClientError):
- """Exception mapping a ``400 Bad Request`` response."""
-
- code = http.client.BAD_REQUEST
-
-
-class InvalidArgument(BadRequest):
- """Exception mapping a :attr:`grpc.StatusCode.INVALID_ARGUMENT` error."""
-
- grpc_status_code = grpc.StatusCode.INVALID_ARGUMENT if grpc is not None else None
-
-
-class FailedPrecondition(BadRequest):
- """Exception mapping a :attr:`grpc.StatusCode.FAILED_PRECONDITION`
- error."""
-
- grpc_status_code = grpc.StatusCode.FAILED_PRECONDITION if grpc is not None else None
-
-
-class OutOfRange(BadRequest):
- """Exception mapping a :attr:`grpc.StatusCode.OUT_OF_RANGE` error."""
-
- grpc_status_code = grpc.StatusCode.OUT_OF_RANGE if grpc is not None else None
-
-
-class Unauthorized(ClientError):
- """Exception mapping a ``401 Unauthorized`` response."""
-
- code = http.client.UNAUTHORIZED
-
-
-class Unauthenticated(Unauthorized):
- """Exception mapping a :attr:`grpc.StatusCode.UNAUTHENTICATED` error."""
-
- grpc_status_code = grpc.StatusCode.UNAUTHENTICATED if grpc is not None else None
-
-
-class Forbidden(ClientError):
- """Exception mapping a ``403 Forbidden`` response."""
-
- code = http.client.FORBIDDEN
-
-
-class PermissionDenied(Forbidden):
- """Exception mapping a :attr:`grpc.StatusCode.PERMISSION_DENIED` error."""
-
- grpc_status_code = grpc.StatusCode.PERMISSION_DENIED if grpc is not None else None
-
-
-class NotFound(ClientError):
- """Exception mapping a ``404 Not Found`` response or a
- :attr:`grpc.StatusCode.NOT_FOUND` error."""
-
- code = http.client.NOT_FOUND
- grpc_status_code = grpc.StatusCode.NOT_FOUND if grpc is not None else None
-
-
-class MethodNotAllowed(ClientError):
- """Exception mapping a ``405 Method Not Allowed`` response."""
-
- code = http.client.METHOD_NOT_ALLOWED
-
-
-class Conflict(ClientError):
- """Exception mapping a ``409 Conflict`` response."""
-
- code = http.client.CONFLICT
-
-
-class AlreadyExists(Conflict):
- """Exception mapping a :attr:`grpc.StatusCode.ALREADY_EXISTS` error."""
-
- grpc_status_code = grpc.StatusCode.ALREADY_EXISTS if grpc is not None else None
-
-
-class Aborted(Conflict):
- """Exception mapping a :attr:`grpc.StatusCode.ABORTED` error."""
-
- grpc_status_code = grpc.StatusCode.ABORTED if grpc is not None else None
-
-
-class LengthRequired(ClientError):
- """Exception mapping a ``411 Length Required`` response."""
-
- code = http.client.LENGTH_REQUIRED
-
-
-class PreconditionFailed(ClientError):
- """Exception mapping a ``412 Precondition Failed`` response."""
-
- code = http.client.PRECONDITION_FAILED
-
-
-class RequestRangeNotSatisfiable(ClientError):
- """Exception mapping a ``416 Request Range Not Satisfiable`` response."""
-
- code = http.client.REQUESTED_RANGE_NOT_SATISFIABLE
-
-
-class TooManyRequests(ClientError):
- """Exception mapping a ``429 Too Many Requests`` response."""
-
- code = http.client.TOO_MANY_REQUESTS
-
-
-class ResourceExhausted(TooManyRequests):
- """Exception mapping a :attr:`grpc.StatusCode.RESOURCE_EXHAUSTED` error."""
-
- grpc_status_code = grpc.StatusCode.RESOURCE_EXHAUSTED if grpc is not None else None
-
-
-class Cancelled(ClientError):
- """Exception mapping a :attr:`grpc.StatusCode.CANCELLED` error."""
-
- # This maps to HTTP status code 499. See
- # https://github.com/googleapis/googleapis/blob/master/google/rpc/code.proto
- code = 499
- grpc_status_code = grpc.StatusCode.CANCELLED if grpc is not None else None
-
-
-class ServerError(GoogleAPICallError):
- """Base for 5xx responses."""
-
-
-class InternalServerError(ServerError):
- """Exception mapping a ``500 Internal Server Error`` response. or a
- :attr:`grpc.StatusCode.INTERNAL` error."""
-
- code = http.client.INTERNAL_SERVER_ERROR
- grpc_status_code = grpc.StatusCode.INTERNAL if grpc is not None else None
-
-
-class Unknown(ServerError):
- """Exception mapping a :attr:`grpc.StatusCode.UNKNOWN` error."""
-
- grpc_status_code = grpc.StatusCode.UNKNOWN if grpc is not None else None
-
-
-class DataLoss(ServerError):
- """Exception mapping a :attr:`grpc.StatusCode.DATA_LOSS` error."""
-
- grpc_status_code = grpc.StatusCode.DATA_LOSS if grpc is not None else None
-
-
-class MethodNotImplemented(ServerError):
- """Exception mapping a ``501 Not Implemented`` response or a
- :attr:`grpc.StatusCode.UNIMPLEMENTED` error."""
-
- code = http.client.NOT_IMPLEMENTED
- grpc_status_code = grpc.StatusCode.UNIMPLEMENTED if grpc is not None else None
-
-
-class BadGateway(ServerError):
- """Exception mapping a ``502 Bad Gateway`` response."""
-
- code = http.client.BAD_GATEWAY
-
-
-class ServiceUnavailable(ServerError):
- """Exception mapping a ``503 Service Unavailable`` response or a
- :attr:`grpc.StatusCode.UNAVAILABLE` error."""
-
- code = http.client.SERVICE_UNAVAILABLE
- grpc_status_code = grpc.StatusCode.UNAVAILABLE if grpc is not None else None
-
-
-class GatewayTimeout(ServerError):
- """Exception mapping a ``504 Gateway Timeout`` response."""
-
- code = http.client.GATEWAY_TIMEOUT
-
-
-class DeadlineExceeded(GatewayTimeout):
- """Exception mapping a :attr:`grpc.StatusCode.DEADLINE_EXCEEDED` error."""
-
- grpc_status_code = grpc.StatusCode.DEADLINE_EXCEEDED if grpc is not None else None
-
-
-class AsyncRestUnsupportedParameterError(NotImplementedError):
- """Raised when an unsupported parameter is configured against async rest transport."""
-
- pass
-
-
-def exception_class_for_http_status(status_code):
- """Return the exception class for a specific HTTP status code.
-
- Args:
- status_code (int): The HTTP status code.
-
- Returns:
- :func:`type`: the appropriate subclass of :class:`GoogleAPICallError`.
- """
- return _HTTP_CODE_TO_EXCEPTION.get(status_code, GoogleAPICallError)
-
-
-def from_http_status(status_code, message, **kwargs):
- """Create a :class:`GoogleAPICallError` from an HTTP status code.
-
- Args:
- status_code (int): The HTTP status code.
- message (str): The exception message.
- kwargs: Additional arguments passed to the :class:`GoogleAPICallError`
- constructor.
-
- Returns:
- GoogleAPICallError: An instance of the appropriate subclass of
- :class:`GoogleAPICallError`.
- """
- error_class = exception_class_for_http_status(status_code)
- error = error_class(message, **kwargs)
-
- if error.code is None:
- error.code = status_code
-
- return error
-
-
-def _format_rest_error_message(error, method, url):
- method = method.upper() if method else None
- message = "{method} {url}: {error}".format(
- method=method,
- url=url,
- error=error,
- )
- return message
-
-
-# NOTE: We're moving away from `from_http_status` because it expects an aiohttp response compared
-# to `format_http_response_error` which expects a more abstract response from google.auth and is
-# compatible with both sync and async response types.
-# TODO(https://github.com/googleapis/python-api-core/issues/691): Add type hint for response.
-def format_http_response_error(
- response, method: str, url: str, payload: Optional[Dict] = None
-):
- """Create a :class:`GoogleAPICallError` from a google auth rest response.
-
- Args:
- response Union[google.auth.transport.Response, google.auth.aio.transport.Response]: The HTTP response.
- method Optional(str): The HTTP request method.
- url Optional(str): The HTTP request url.
- payload Optional(dict): The HTTP response payload. If not passed in, it is read from response for a response type of google.auth.transport.Response.
-
- Returns:
- GoogleAPICallError: An instance of the appropriate subclass of
- :class:`GoogleAPICallError`, with the message and errors populated
- from the response.
- """
- payload = {} if not payload else payload
- error_message = payload.get("error", {}).get("message", "unknown error")
- errors = payload.get("error", {}).get("errors", ())
- # In JSON, details are already formatted in developer-friendly way.
- details = payload.get("error", {}).get("details", ())
- error_info_list = list(
- filter(
- lambda detail: detail.get("@type", "")
- == "type.googleapis.com/google.rpc.ErrorInfo",
- details,
- )
- )
- error_info = error_info_list[0] if error_info_list else None
- message = _format_rest_error_message(error_message, method, url)
-
- exception = from_http_status(
- response.status_code,
- message,
- errors=errors,
- details=details,
- response=response,
- error_info=error_info,
- )
- return exception
-
-
-def from_http_response(response):
- """Create a :class:`GoogleAPICallError` from a :class:`requests.Response`.
-
- Args:
- response (requests.Response): The HTTP response.
-
- Returns:
- GoogleAPICallError: An instance of the appropriate subclass of
- :class:`GoogleAPICallError`, with the message and errors populated
- from the response.
- """
- try:
- payload = response.json()
- except ValueError:
- payload = {"error": {"message": response.text or "unknown error"}}
- return format_http_response_error(
- response, response.request.method, response.request.url, payload
- )
-
-
-def exception_class_for_grpc_status(status_code):
- """Return the exception class for a specific :class:`grpc.StatusCode`.
-
- Args:
- status_code (grpc.StatusCode): The gRPC status code.
-
- Returns:
- :func:`type`: the appropriate subclass of :class:`GoogleAPICallError`.
- """
- return _GRPC_CODE_TO_EXCEPTION.get(status_code, GoogleAPICallError)
-
-
-def from_grpc_status(status_code, message, **kwargs):
- """Create a :class:`GoogleAPICallError` from a :class:`grpc.StatusCode`.
-
- Args:
- status_code (Union[grpc.StatusCode, int]): The gRPC status code.
- message (str): The exception message.
- kwargs: Additional arguments passed to the :class:`GoogleAPICallError`
- constructor.
-
- Returns:
- GoogleAPICallError: An instance of the appropriate subclass of
- :class:`GoogleAPICallError`.
- """
-
- if isinstance(status_code, int):
- status_code = _INT_TO_GRPC_CODE.get(status_code, status_code)
-
- error_class = exception_class_for_grpc_status(status_code)
- error = error_class(message, **kwargs)
-
- if error.grpc_status_code is None:
- error.grpc_status_code = status_code
-
- return error
-
-
-def _is_informative_grpc_error(rpc_exc):
- return hasattr(rpc_exc, "code") and hasattr(rpc_exc, "details")
-
-
-def _parse_grpc_error_details(rpc_exc):
- if not rpc_status: # pragma: NO COVER
- _warn_could_not_import_grpcio_status()
- return [], None
- try:
- status = rpc_status.from_call(rpc_exc)
- except NotImplementedError: # workaround
- return [], None
-
- if not status:
- return [], None
-
- possible_errors = [
- error_details_pb2.BadRequest,
- error_details_pb2.PreconditionFailure,
- error_details_pb2.QuotaFailure,
- error_details_pb2.ErrorInfo,
- error_details_pb2.RetryInfo,
- error_details_pb2.ResourceInfo,
- error_details_pb2.RequestInfo,
- error_details_pb2.DebugInfo,
- error_details_pb2.Help,
- error_details_pb2.LocalizedMessage,
- ]
- error_info = None
- error_details = []
- for detail in status.details:
- matched_detail_cls = list(
- filter(lambda x: detail.Is(x.DESCRIPTOR), possible_errors)
- )
- # If nothing matched, use detail directly.
- if len(matched_detail_cls) == 0:
- info = detail
- else:
- info = matched_detail_cls[0]()
- detail.Unpack(info)
- error_details.append(info)
- if isinstance(info, error_details_pb2.ErrorInfo):
- error_info = info
- return error_details, error_info
-
-
-def from_grpc_error(rpc_exc):
- """Create a :class:`GoogleAPICallError` from a :class:`grpc.RpcError`.
-
- Args:
- rpc_exc (grpc.RpcError): The gRPC error.
-
- Returns:
- GoogleAPICallError: An instance of the appropriate subclass of
- :class:`GoogleAPICallError`.
- """
- # NOTE(lidiz) All gRPC error shares the parent class grpc.RpcError.
- # However, check for grpc.RpcError breaks backward compatibility.
- if (
- grpc is not None and isinstance(rpc_exc, grpc.Call)
- ) or _is_informative_grpc_error(rpc_exc):
- details, err_info = _parse_grpc_error_details(rpc_exc)
- return from_grpc_status(
- rpc_exc.code(),
- rpc_exc.details(),
- errors=(rpc_exc,),
- details=details,
- response=rpc_exc,
- error_info=err_info,
- )
- else:
- return GoogleAPICallError(str(rpc_exc), errors=(rpc_exc,), response=rpc_exc)
diff --git a/google/api_core/extended_operation.py b/google/api_core/extended_operation.py
deleted file mode 100644
index d474632..0000000
--- a/google/api_core/extended_operation.py
+++ /dev/null
@@ -1,225 +0,0 @@
-# Copyright 2022 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Futures for extended long-running operations returned from Google Cloud APIs.
-
-These futures can be used to synchronously wait for the result of a
-long-running operations using :meth:`ExtendedOperation.result`:
-
-.. code-block:: python
-
- extended_operation = my_api_client.long_running_method()
-
- extended_operation.result()
-
-Or asynchronously using callbacks and :meth:`Operation.add_done_callback`:
-
-.. code-block:: python
-
- extended_operation = my_api_client.long_running_method()
-
- def my_callback(ex_op):
- print(f"Operation {ex_op.name} completed")
-
- extended_operation.add_done_callback(my_callback)
-
-"""
-
-import threading
-
-from google.api_core import exceptions
-from google.api_core.future import polling
-
-
-class ExtendedOperation(polling.PollingFuture):
- """An ExtendedOperation future for interacting with a Google API Long-Running Operation.
-
- Args:
- extended_operation (proto.Message): The initial operation.
- refresh (Callable[[], type(extended_operation)]): A callable that returns
- the latest state of the operation.
- cancel (Callable[[], None]): A callable that tries to cancel the operation.
- polling Optional(google.api_core.retry.Retry): The configuration used
- for polling. This can be used to control how often :meth:`done`
- is polled. If the ``timeout`` argument to :meth:`result` is
- specified it will override the ``polling.timeout`` property.
- retry Optional(google.api_core.retry.Retry): DEPRECATED use ``polling``
- instead. If specified it will override ``polling`` parameter to
- maintain backward compatibility.
-
- Note: Most long-running API methods use google.api_core.operation.Operation
- This class is a wrapper for a subset of methods that use alternative
- Long-Running Operation (LRO) semantics.
-
- Note: there is not a concrete type the extended operation must be.
- It MUST have fields that correspond to the following, POSSIBLY WITH DIFFERENT NAMES:
- * name: str
- * status: Union[str, bool, enum.Enum]
- * error_code: int
- * error_message: str
- """
-
- def __init__(
- self,
- extended_operation,
- refresh,
- cancel,
- polling=polling.DEFAULT_POLLING,
- **kwargs,
- ):
- super().__init__(polling=polling, **kwargs)
- self._extended_operation = extended_operation
- self._refresh = refresh
- self._cancel = cancel
- # Note: the extended operation does not give a good way to indicate cancellation.
- # We make do with manually tracking cancellation and checking for doneness.
- self._cancelled = False
- self._completion_lock = threading.Lock()
- # Invoke in case the operation came back already complete.
- self._handle_refreshed_operation()
-
- # Note: the following four properties MUST be overridden in a subclass
- # if, and only if, the fields in the corresponding extended operation message
- # have different names.
- #
- # E.g. we have an extended operation class that looks like
- #
- # class MyOperation(proto.Message):
- # moniker = proto.Field(proto.STRING, number=1)
- # status_msg = proto.Field(proto.STRING, number=2)
- # optional http_error_code = proto.Field(proto.INT32, number=3)
- # optional http_error_msg = proto.Field(proto.STRING, number=4)
- #
- # the ExtendedOperation subclass would provide property overrides that map
- # to these (poorly named) fields.
- @property
- def name(self):
- return self._extended_operation.name
-
- @property
- def status(self):
- return self._extended_operation.status
-
- @property
- def error_code(self):
- return self._extended_operation.error_code
-
- @property
- def error_message(self):
- return self._extended_operation.error_message
-
- def __getattr__(self, name):
- return getattr(self._extended_operation, name)
-
- def done(self, retry=None):
- self._refresh_and_update(retry)
- return self._extended_operation.done
-
- def cancel(self):
- if self.done():
- return False
-
- self._cancel()
- self._cancelled = True
- return True
-
- def cancelled(self):
- # TODO(dovs): there is not currently a good way to determine whether the
- # operation has been cancelled.
- # The best we can do is manually keep track of cancellation
- # and check for doneness.
- if not self._cancelled:
- return False
-
- self._refresh_and_update()
- return self._extended_operation.done
-
- def _refresh_and_update(self, retry=None):
- if not self._extended_operation.done:
- self._extended_operation = (
- self._refresh(retry=retry) if retry else self._refresh()
- )
- self._handle_refreshed_operation()
-
- def _handle_refreshed_operation(self):
- with self._completion_lock:
- if not self._extended_operation.done:
- return
-
- if self.error_code and self.error_message:
- # Note: `errors` can be removed once proposal A from
- # b/284179390 is implemented.
- errors = []
- if hasattr(self, "error") and hasattr(self.error, "errors"):
- errors = self.error.errors
- exception = exceptions.from_http_status(
- status_code=self.error_code,
- message=self.error_message,
- response=self._extended_operation,
- errors=errors,
- )
- self.set_exception(exception)
- elif self.error_code or self.error_message:
- exception = exceptions.GoogleAPICallError(
- f"Unexpected error {self.error_code}: {self.error_message}"
- )
- self.set_exception(exception)
- else:
- # Extended operations have no payload.
- self.set_result(None)
-
- @classmethod
- def make(cls, refresh, cancel, extended_operation, **kwargs):
- """
- Return an instantiated ExtendedOperation (or child) that wraps
- * a refresh callable
- * a cancel callable (can be a no-op)
- * an initial result
-
- .. note::
- It is the caller's responsibility to set up refresh and cancel
- with their correct request argument.
- The reason for this is that the services that use Extended Operations
- have rpcs that look something like the following:
-
- // service.proto
- service MyLongService {
- rpc StartLongTask(StartLongTaskRequest) returns (ExtendedOperation) {
- option (google.cloud.operation_service) = "CustomOperationService";
- }
- }
-
- service CustomOperationService {
- rpc Get(GetOperationRequest) returns (ExtendedOperation) {
- option (google.cloud.operation_polling_method) = true;
- }
- }
-
- Any info needed for the poll, e.g. a name, path params, etc.
- is held in the request, which the initial client method is in a much
- better position to make made because the caller made the initial request.
-
- TL;DR: the caller sets up closures for refresh and cancel that carry
- the properly configured requests.
-
- Args:
- refresh (Callable[Optional[Retry]][type(extended_operation)]): A callable that
- returns the latest state of the operation.
- cancel (Callable[][Any]): A callable that tries to cancel the operation
- on a best effort basis.
- extended_operation (Any): The initial response of the long running method.
- See the docstring for ExtendedOperation.__init__ for requirements on
- the type and fields of extended_operation
- """
- return cls(extended_operation, refresh, cancel, **kwargs)
diff --git a/google/api_core/future/__init__.py b/google/api_core/future/__init__.py
deleted file mode 100644
index 3768b2c..0000000
--- a/google/api_core/future/__init__.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2017, Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Futures for dealing with asynchronous operations."""
-
-from google.api_core.future.base import Future
-
-__all__ = ["Future"]
diff --git a/google/api_core/future/_helpers.py b/google/api_core/future/_helpers.py
deleted file mode 100644
index 9e88ca9..0000000
--- a/google/api_core/future/_helpers.py
+++ /dev/null
@@ -1,39 +0,0 @@
-# Copyright 2017, Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Private helpers for futures."""
-
-import logging
-import threading
-
-
-_LOGGER = logging.getLogger(__name__)
-
-
-def start_daemon_thread(*args, **kwargs):
- """Starts a thread and marks it as a daemon thread."""
- thread = threading.Thread(*args, **kwargs)
- thread.daemon = True
- thread.start()
- return thread
-
-
-def safe_invoke_callback(callback, *args, **kwargs):
- """Invoke a callback, swallowing and logging any exceptions."""
- # pylint: disable=bare-except
- # We intentionally want to swallow all exceptions.
- try:
- return callback(*args, **kwargs)
- except Exception:
- _LOGGER.exception("Error while executing Future callback.")
diff --git a/google/api_core/future/async_future.py b/google/api_core/future/async_future.py
deleted file mode 100644
index 325ee9c..0000000
--- a/google/api_core/future/async_future.py
+++ /dev/null
@@ -1,162 +0,0 @@
-# Copyright 2020, Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""AsyncIO implementation of the abstract base Future class."""
-
-import asyncio
-
-from google.api_core import exceptions
-from google.api_core import retry
-from google.api_core import retry_async
-from google.api_core.future import base
-
-
-class _OperationNotComplete(Exception):
- """Private exception used for polling via retry."""
-
- pass
-
-
-RETRY_PREDICATE = retry.if_exception_type(
- _OperationNotComplete,
- exceptions.TooManyRequests,
- exceptions.InternalServerError,
- exceptions.BadGateway,
-)
-DEFAULT_RETRY = retry_async.AsyncRetry(predicate=RETRY_PREDICATE)
-
-
-class AsyncFuture(base.Future):
- """A Future that polls peer service to self-update.
-
- The :meth:`done` method should be implemented by subclasses. The polling
- behavior will repeatedly call ``done`` until it returns True.
-
- .. note::
-
- Privacy here is intended to prevent the final class from
- overexposing, not to prevent subclasses from accessing methods.
-
- Args:
- retry (google.api_core.retry.Retry): The retry configuration used
- when polling. This can be used to control how often :meth:`done`
- is polled. Regardless of the retry's ``deadline``, it will be
- overridden by the ``timeout`` argument to :meth:`result`.
- """
-
- def __init__(self, retry=DEFAULT_RETRY):
- super().__init__()
- self._retry = retry
- self._future = asyncio.get_event_loop().create_future()
- self._background_task = None
-
- async def done(self, retry=DEFAULT_RETRY):
- """Checks to see if the operation is complete.
-
- Args:
- retry (google.api_core.retry.Retry): (Optional) How to retry the RPC.
-
- Returns:
- bool: True if the operation is complete, False otherwise.
- """
- # pylint: disable=redundant-returns-doc, missing-raises-doc
- raise NotImplementedError()
-
- async def _done_or_raise(self):
- """Check if the future is done and raise if it's not."""
- result = await self.done()
- if not result:
- raise _OperationNotComplete()
-
- async def running(self):
- """True if the operation is currently running."""
- result = await self.done()
- return not result
-
- async def _blocking_poll(self, timeout=None):
- """Poll and await for the Future to be resolved.
-
- Args:
- timeout (int):
- How long (in seconds) to wait for the operation to complete.
- If None, wait indefinitely.
- """
- if self._future.done():
- return
-
- retry_ = self._retry.with_timeout(timeout)
-
- try:
- await retry_(self._done_or_raise)()
- except exceptions.RetryError:
- raise asyncio.TimeoutError(
- "Operation did not complete within the designated " "timeout."
- )
-
- async def result(self, timeout=None):
- """Get the result of the operation.
-
- Args:
- timeout (int):
- How long (in seconds) to wait for the operation to complete.
- If None, wait indefinitely.
-
- Returns:
- google.protobuf.Message: The Operation's result.
-
- Raises:
- google.api_core.GoogleAPICallError: If the operation errors or if
- the timeout is reached before the operation completes.
- """
- await self._blocking_poll(timeout=timeout)
- return self._future.result()
-
- async def exception(self, timeout=None):
- """Get the exception from the operation.
-
- Args:
- timeout (int): How long to wait for the operation to complete.
- If None, wait indefinitely.
-
- Returns:
- Optional[google.api_core.GoogleAPICallError]: The operation's
- error.
- """
- await self._blocking_poll(timeout=timeout)
- return self._future.exception()
-
- def add_done_callback(self, fn):
- """Add a callback to be executed when the operation is complete.
-
- If the operation is completed, the callback will be scheduled onto the
- event loop. Otherwise, the callback will be stored and invoked when the
- future is done.
-
- Args:
- fn (Callable[Future]): The callback to execute when the operation
- is complete.
- """
- if self._background_task is None:
- self._background_task = asyncio.get_event_loop().create_task(
- self._blocking_poll()
- )
- self._future.add_done_callback(fn)
-
- def set_result(self, result):
- """Set the Future's result."""
- self._future.set_result(result)
-
- def set_exception(self, exception):
- """Set the Future's exception."""
- self._future.set_exception(exception)
diff --git a/google/api_core/future/base.py b/google/api_core/future/base.py
deleted file mode 100644
index f300586..0000000
--- a/google/api_core/future/base.py
+++ /dev/null
@@ -1,64 +0,0 @@
-# Copyright 2017, Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Abstract and helper bases for Future implementations."""
-
-import abc
-
-
-class Future(object, metaclass=abc.ABCMeta):
- # pylint: disable=missing-docstring
- # We inherit the interfaces here from concurrent.futures.
-
- """Future interface.
-
- This interface is based on :class:`concurrent.futures.Future`.
- """
-
- @abc.abstractmethod
- def cancel(self):
- raise NotImplementedError()
-
- @abc.abstractmethod
- def cancelled(self):
- raise NotImplementedError()
-
- @abc.abstractmethod
- def running(self):
- raise NotImplementedError()
-
- @abc.abstractmethod
- def done(self):
- raise NotImplementedError()
-
- @abc.abstractmethod
- def result(self, timeout=None):
- raise NotImplementedError()
-
- @abc.abstractmethod
- def exception(self, timeout=None):
- raise NotImplementedError()
-
- @abc.abstractmethod
- def add_done_callback(self, fn):
- # pylint: disable=invalid-name
- raise NotImplementedError()
-
- @abc.abstractmethod
- def set_result(self, result):
- raise NotImplementedError()
-
- @abc.abstractmethod
- def set_exception(self, exception):
- raise NotImplementedError()
diff --git a/google/api_core/future/polling.py b/google/api_core/future/polling.py
deleted file mode 100644
index f1e2a18..0000000
--- a/google/api_core/future/polling.py
+++ /dev/null
@@ -1,323 +0,0 @@
-# Copyright 2017, Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Abstract and helper bases for Future implementations."""
-
-import abc
-import concurrent.futures
-
-from google.api_core import exceptions
-from google.api_core import retry as retries
-from google.api_core.future import _helpers
-from google.api_core.future import base
-
-
-class _OperationNotComplete(Exception):
- """Private exception used for polling via retry."""
-
- pass
-
-
-# DEPRECATED as it conflates RPC retry and polling concepts into one.
-# Use POLLING_PREDICATE instead to configure polling.
-RETRY_PREDICATE = retries.if_exception_type(
- _OperationNotComplete,
- exceptions.TooManyRequests,
- exceptions.InternalServerError,
- exceptions.BadGateway,
- exceptions.ServiceUnavailable,
-)
-
-# DEPRECATED: use DEFAULT_POLLING to configure LRO polling logic. Construct
-# Retry object using its default values as a baseline for any custom retry logic
-# (not to be confused with polling logic).
-DEFAULT_RETRY = retries.Retry(predicate=RETRY_PREDICATE)
-
-# POLLING_PREDICATE is supposed to poll only on _OperationNotComplete.
-# Any RPC-specific errors (like ServiceUnavailable) will be handled
-# by retry logic (not to be confused with polling logic) which is triggered for
-# every polling RPC independently of polling logic but within its context.
-POLLING_PREDICATE = retries.if_exception_type(
- _OperationNotComplete,
-)
-
-# Default polling configuration
-DEFAULT_POLLING = retries.Retry(
- predicate=POLLING_PREDICATE,
- initial=1.0, # seconds
- maximum=20.0, # seconds
- multiplier=1.5,
- timeout=900, # seconds
-)
-
-
-class PollingFuture(base.Future):
- """A Future that needs to poll some service to check its status.
-
- The :meth:`done` method should be implemented by subclasses. The polling
- behavior will repeatedly call ``done`` until it returns True.
-
- The actual polling logic is encapsulated in :meth:`result` method. See
- documentation for that method for details on how polling works.
-
- .. note::
-
- Privacy here is intended to prevent the final class from
- overexposing, not to prevent subclasses from accessing methods.
-
- Args:
- polling (google.api_core.retry.Retry): The configuration used for polling.
- This parameter controls how often :meth:`done` is polled. If the
- ``timeout`` argument is specified in :meth:`result` method it will
- override the ``polling.timeout`` property.
- retry (google.api_core.retry.Retry): DEPRECATED use ``polling`` instead.
- If set, it will override ``polling`` parameter for backward
- compatibility.
- """
-
- _DEFAULT_VALUE = object()
-
- def __init__(self, polling=DEFAULT_POLLING, **kwargs):
- super(PollingFuture, self).__init__()
- self._polling = kwargs.get("retry", polling)
- self._result = None
- self._exception = None
- self._result_set = False
- """bool: Set to True when the result has been set via set_result or
- set_exception."""
- self._polling_thread = None
- self._done_callbacks = []
-
- @abc.abstractmethod
- def done(self, retry=None):
- """Checks to see if the operation is complete.
-
- Args:
- retry (google.api_core.retry.Retry): (Optional) How to retry the
- polling RPC (to not be confused with polling configuration. See
- the documentation for :meth:`result` for details).
-
- Returns:
- bool: True if the operation is complete, False otherwise.
- """
- # pylint: disable=redundant-returns-doc, missing-raises-doc
- raise NotImplementedError()
-
- def _done_or_raise(self, retry=None):
- """Check if the future is done and raise if it's not."""
- if not self.done(retry=retry):
- raise _OperationNotComplete()
-
- def running(self):
- """True if the operation is currently running."""
- return not self.done()
-
- def _blocking_poll(self, timeout=_DEFAULT_VALUE, retry=None, polling=None):
- """Poll and wait for the Future to be resolved."""
-
- if self._result_set:
- return
-
- polling = polling or self._polling
- if timeout is not PollingFuture._DEFAULT_VALUE:
- polling = polling.with_timeout(timeout)
-
- try:
- polling(self._done_or_raise)(retry=retry)
- except exceptions.RetryError:
- raise concurrent.futures.TimeoutError(
- f"Operation did not complete within the designated timeout of "
- f"{polling.timeout} seconds."
- )
-
- def result(self, timeout=_DEFAULT_VALUE, retry=None, polling=None):
- """Get the result of the operation.
-
- This method will poll for operation status periodically, blocking if
- necessary. If you just want to make sure that this method does not block
- for more than X seconds and you do not care about the nitty-gritty of
- how this method operates, just call it with ``result(timeout=X)``. The
- other parameters are for advanced use only.
-
- Every call to this method is controlled by the following three
- parameters, each of which has a specific, distinct role, even though all three
- may look very similar: ``timeout``, ``retry`` and ``polling``. In most
- cases users do not need to specify any custom values for any of these
- parameters and may simply rely on default ones instead.
-
- If you choose to specify custom parameters, please make sure you've
- read the documentation below carefully.
-
- First, please check :class:`google.api_core.retry.Retry`
- class documentation for the proper definition of timeout and deadline
- terms and for the definition the three different types of timeouts.
- This class operates in terms of Retry Timeout and Polling Timeout. It
- does not let customizing RPC timeout and the user is expected to rely on
- default behavior for it.
-
- The roles of each argument of this method are as follows:
-
- ``timeout`` (int): (Optional) The Polling Timeout as defined in
- :class:`google.api_core.retry.Retry`. If the operation does not complete
- within this timeout an exception will be thrown. This parameter affects
- neither Retry Timeout nor RPC Timeout.
-
- ``retry`` (google.api_core.retry.Retry): (Optional) How to retry the
- polling RPC. The ``retry.timeout`` property of this parameter is the
- Retry Timeout as defined in :class:`google.api_core.retry.Retry`.
- This parameter defines ONLY how the polling RPC call is retried
- (i.e. what to do if the RPC we used for polling returned an error). It
- does NOT define how the polling is done (i.e. how frequently and for
- how long to call the polling RPC); use the ``polling`` parameter for that.
- If a polling RPC throws and error and retrying it fails, the whole
- future fails with the corresponding exception. If you want to tune which
- server response error codes are not fatal for operation polling, use this
- parameter to control that (``retry.predicate`` in particular).
-
- ``polling`` (google.api_core.retry.Retry): (Optional) How often and
- for how long to call the polling RPC periodically (i.e. what to do if
- a polling rpc returned successfully but its returned result indicates
- that the long running operation is not completed yet, so we need to
- check it again at some point in future). This parameter does NOT define
- how to retry each individual polling RPC in case of an error; use the
- ``retry`` parameter for that. The ``polling.timeout`` of this parameter
- is Polling Timeout as defined in as defined in
- :class:`google.api_core.retry.Retry`.
-
- For each of the arguments, there are also default values in place, which
- will be used if a user does not specify their own. The default values
- for the three parameters are not to be confused with the default values
- for the corresponding arguments in this method (those serve as "not set"
- markers for the resolution logic).
-
- If ``timeout`` is provided (i.e.``timeout is not _DEFAULT VALUE``; note
- the ``None`` value means "infinite timeout"), it will be used to control
- the actual Polling Timeout. Otherwise, the ``polling.timeout`` value
- will be used instead (see below for how the ``polling`` config itself
- gets resolved). In other words, this parameter effectively overrides
- the ``polling.timeout`` value if specified. This is so to preserve
- backward compatibility.
-
- If ``retry`` is provided (i.e. ``retry is not None``) it will be used to
- control retry behavior for the polling RPC and the ``retry.timeout``
- will determine the Retry Timeout. If not provided, the
- polling RPC will be called with whichever default retry config was
- specified for the polling RPC at the moment of the construction of the
- polling RPC's client. For example, if the polling RPC is
- ``operations_client.get_operation()``, the ``retry`` parameter will be
- controlling its retry behavior (not polling behavior) and, if not
- specified, that specific method (``operations_client.get_operation()``)
- will be retried according to the default retry config provided during
- creation of ``operations_client`` client instead. This argument exists
- mainly for backward compatibility; users are very unlikely to ever need
- to set this parameter explicitly.
-
- If ``polling`` is provided (i.e. ``polling is not None``), it will be used
- to control the overall polling behavior and ``polling.timeout`` will
- control Polling Timeout unless it is overridden by ``timeout`` parameter
- as described above. If not provided, the``polling`` parameter specified
- during construction of this future (the ``polling`` argument in the
- constructor) will be used instead. Note: since the ``timeout`` argument may
- override ``polling.timeout`` value, this parameter should be viewed as
- coupled with the ``timeout`` parameter as described above.
-
- Args:
- timeout (int): (Optional) How long (in seconds) to wait for the
- operation to complete. If None, wait indefinitely.
- retry (google.api_core.retry.Retry): (Optional) How to retry the
- polling RPC. This defines ONLY how the polling RPC call is
- retried (i.e. what to do if the RPC we used for polling returned
- an error). It does NOT define how the polling is done (i.e. how
- frequently and for how long to call the polling RPC).
- polling (google.api_core.retry.Retry): (Optional) How often and
- for how long to call polling RPC periodically. This parameter
- does NOT define how to retry each individual polling RPC call
- (use the ``retry`` parameter for that).
-
- Returns:
- google.protobuf.Message: The Operation's result.
-
- Raises:
- google.api_core.GoogleAPICallError: If the operation errors or if
- the timeout is reached before the operation completes.
- """
-
- self._blocking_poll(timeout=timeout, retry=retry, polling=polling)
-
- if self._exception is not None:
- # pylint: disable=raising-bad-type
- # Pylint doesn't recognize that this is valid in this case.
- raise self._exception
-
- return self._result
-
- def exception(self, timeout=_DEFAULT_VALUE):
- """Get the exception from the operation, blocking if necessary.
-
- See the documentation for the :meth:`result` method for details on how
- this method operates, as both ``result`` and this method rely on the
- exact same polling logic. The only difference is that this method does
- not accept ``retry`` and ``polling`` arguments but relies on the default ones
- instead.
-
- Args:
- timeout (int): How long to wait for the operation to complete.
- If None, wait indefinitely.
-
- Returns:
- Optional[google.api_core.GoogleAPICallError]: The operation's
- error.
- """
- self._blocking_poll(timeout=timeout)
- return self._exception
-
- def add_done_callback(self, fn):
- """Add a callback to be executed when the operation is complete.
-
- If the operation is not already complete, this will start a helper
- thread to poll for the status of the operation in the background.
-
- Args:
- fn (Callable[Future]): The callback to execute when the operation
- is complete.
- """
- if self._result_set:
- _helpers.safe_invoke_callback(fn, self)
- return
-
- self._done_callbacks.append(fn)
-
- if self._polling_thread is None:
- # The polling thread will exit on its own as soon as the operation
- # is done.
- self._polling_thread = _helpers.start_daemon_thread(
- target=self._blocking_poll
- )
-
- def _invoke_callbacks(self, *args, **kwargs):
- """Invoke all done callbacks."""
- for callback in self._done_callbacks:
- _helpers.safe_invoke_callback(callback, *args, **kwargs)
-
- def set_result(self, result):
- """Set the Future's result."""
- self._result = result
- self._result_set = True
- self._invoke_callbacks(self)
-
- def set_exception(self, exception):
- """Set the Future's exception."""
- self._exception = exception
- self._result_set = True
- self._invoke_callbacks(self)
diff --git a/google/api_core/gapic_v1/__init__.py b/google/api_core/gapic_v1/__init__.py
deleted file mode 100644
index e5b7ad3..0000000
--- a/google/api_core/gapic_v1/__init__.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from google.api_core.gapic_v1 import client_info
-from google.api_core.gapic_v1 import config
-from google.api_core.gapic_v1 import config_async
-from google.api_core.gapic_v1 import method
-from google.api_core.gapic_v1 import method_async
-from google.api_core.gapic_v1 import routing_header
-
-__all__ = [
- "client_info",
- "config",
- "config_async",
- "method",
- "method_async",
- "routing_header",
-]
diff --git a/google/api_core/gapic_v1/client_info.py b/google/api_core/gapic_v1/client_info.py
deleted file mode 100644
index 4b3b564..0000000
--- a/google/api_core/gapic_v1/client_info.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Helpers for providing client information.
-
-Client information is used to send information about the calling client,
-such as the library and Python version, to API services.
-"""
-
-from google.api_core import client_info
-
-
-METRICS_METADATA_KEY = "x-goog-api-client"
-
-
-class ClientInfo(client_info.ClientInfo):
- """Client information used to generate a user-agent for API calls.
-
- This user-agent information is sent along with API calls to allow the
- receiving service to do analytics on which versions of Python and Google
- libraries are being used.
-
- Args:
- python_version (str): The Python interpreter version, for example,
- ``'3.9.6'``.
- grpc_version (Optional[str]): The gRPC library version.
- api_core_version (str): The google-api-core library version.
- gapic_version (Optional[str]): The version of gapic-generated client
- library, if the library was generated by gapic.
- client_library_version (Optional[str]): The version of the client
- library, generally used if the client library was not generated
- by gapic or if additional functionality was built on top of
- a gapic client library.
- user_agent (Optional[str]): Prefix to the user agent header. This is
- used to supply information such as application name or partner tool.
- Recommended format: ``application-or-tool-ID/major.minor.version``.
- rest_version (Optional[str]): A string with labeled versions of the
- dependencies used for REST transport.
- protobuf_runtime_version (Optional[str]): The protobuf runtime version.
- """
-
- def to_grpc_metadata(self):
- """Returns the gRPC metadata for this client info."""
- return (METRICS_METADATA_KEY, self.to_user_agent())
-
-
-DEFAULT_CLIENT_INFO = ClientInfo()
diff --git a/google/api_core/gapic_v1/config.py b/google/api_core/gapic_v1/config.py
deleted file mode 100644
index 36b50d9..0000000
--- a/google/api_core/gapic_v1/config.py
+++ /dev/null
@@ -1,175 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Helpers for loading gapic configuration data.
-
-The Google API generator creates supplementary configuration for each RPC
-method to tell the client library how to deal with retries and timeouts.
-"""
-
-import collections
-
-import grpc
-
-from google.api_core import exceptions
-from google.api_core import retry
-from google.api_core import timeout
-
-
-_MILLIS_PER_SECOND = 1000.0
-
-
-def _exception_class_for_grpc_status_name(name):
- """Returns the Google API exception class for a gRPC error code name.
-
- DEPRECATED: use ``exceptions.exception_class_for_grpc_status`` method
- directly instead.
-
- Args:
- name (str): The name of the gRPC status code, for example,
- ``UNAVAILABLE``.
-
- Returns:
- :func:`type`: The appropriate subclass of
- :class:`google.api_core.exceptions.GoogleAPICallError`.
- """
- return exceptions.exception_class_for_grpc_status(getattr(grpc.StatusCode, name))
-
-
-def _retry_from_retry_config(retry_params, retry_codes, retry_impl=retry.Retry):
- """Creates a Retry object given a gapic retry configuration.
-
- DEPRECATED: instantiate retry and timeout classes directly instead.
-
- Args:
- retry_params (dict): The retry parameter values, for example::
-
- {
- "initial_retry_delay_millis": 1000,
- "retry_delay_multiplier": 2.5,
- "max_retry_delay_millis": 120000,
- "initial_rpc_timeout_millis": 120000,
- "rpc_timeout_multiplier": 1.0,
- "max_rpc_timeout_millis": 120000,
- "total_timeout_millis": 600000
- }
-
- retry_codes (sequence[str]): The list of retryable gRPC error code
- names.
-
- Returns:
- google.api_core.retry.Retry: The default retry object for the method.
- """
- exception_classes = [
- _exception_class_for_grpc_status_name(code) for code in retry_codes
- ]
- return retry_impl(
- retry.if_exception_type(*exception_classes),
- initial=(retry_params["initial_retry_delay_millis"] / _MILLIS_PER_SECOND),
- maximum=(retry_params["max_retry_delay_millis"] / _MILLIS_PER_SECOND),
- multiplier=retry_params["retry_delay_multiplier"],
- deadline=retry_params["total_timeout_millis"] / _MILLIS_PER_SECOND,
- )
-
-
-def _timeout_from_retry_config(retry_params):
- """Creates a ExponentialTimeout object given a gapic retry configuration.
-
- DEPRECATED: instantiate retry and timeout classes directly instead.
-
- Args:
- retry_params (dict): The retry parameter values, for example::
-
- {
- "initial_retry_delay_millis": 1000,
- "retry_delay_multiplier": 2.5,
- "max_retry_delay_millis": 120000,
- "initial_rpc_timeout_millis": 120000,
- "rpc_timeout_multiplier": 1.0,
- "max_rpc_timeout_millis": 120000,
- "total_timeout_millis": 600000
- }
-
- Returns:
- google.api_core.retry.ExponentialTimeout: The default time object for
- the method.
- """
- return timeout.ExponentialTimeout(
- initial=(retry_params["initial_rpc_timeout_millis"] / _MILLIS_PER_SECOND),
- maximum=(retry_params["max_rpc_timeout_millis"] / _MILLIS_PER_SECOND),
- multiplier=retry_params["rpc_timeout_multiplier"],
- deadline=(retry_params["total_timeout_millis"] / _MILLIS_PER_SECOND),
- )
-
-
-MethodConfig = collections.namedtuple("MethodConfig", ["retry", "timeout"])
-
-
-def parse_method_configs(interface_config, retry_impl=retry.Retry):
- """Creates default retry and timeout objects for each method in a gapic
- interface config.
-
- DEPRECATED: instantiate retry and timeout classes directly instead.
-
- Args:
- interface_config (Mapping): The interface config section of the full
- gapic library config. For example, If the full configuration has
- an interface named ``google.example.v1.ExampleService`` you would
- pass in just that interface's configuration, for example
- ``gapic_config['interfaces']['google.example.v1.ExampleService']``.
- retry_impl (Callable): The constructor that creates a retry decorator
- that will be applied to the method based on method configs.
-
- Returns:
- Mapping[str, MethodConfig]: A mapping of RPC method names to their
- configuration.
- """
- # Grab all the retry codes
- retry_codes_map = {
- name: retry_codes
- for name, retry_codes in interface_config.get("retry_codes", {}).items()
- }
-
- # Grab all of the retry params
- retry_params_map = {
- name: retry_params
- for name, retry_params in interface_config.get("retry_params", {}).items()
- }
-
- # Iterate through all the API methods and create a flat MethodConfig
- # instance for each one.
- method_configs = {}
-
- for method_name, method_params in interface_config.get("methods", {}).items():
- retry_params_name = method_params.get("retry_params_name")
-
- if retry_params_name is not None:
- retry_params = retry_params_map[retry_params_name]
- retry_ = _retry_from_retry_config(
- retry_params,
- retry_codes_map[method_params["retry_codes_name"]],
- retry_impl,
- )
- timeout_ = _timeout_from_retry_config(retry_params)
-
- # No retry config, so this is a non-retryable method.
- else:
- retry_ = None
- timeout_ = timeout.ConstantTimeout(
- method_params["timeout_millis"] / _MILLIS_PER_SECOND
- )
-
- method_configs[method_name] = MethodConfig(retry=retry_, timeout=timeout_)
-
- return method_configs
diff --git a/google/api_core/gapic_v1/config_async.py b/google/api_core/gapic_v1/config_async.py
deleted file mode 100644
index 13d6a48..0000000
--- a/google/api_core/gapic_v1/config_async.py
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""AsyncIO helpers for loading gapic configuration data.
-
-The Google API generator creates supplementary configuration for each RPC
-method to tell the client library how to deal with retries and timeouts.
-"""
-
-from google.api_core import retry_async
-from google.api_core.gapic_v1 import config
-from google.api_core.gapic_v1.config import MethodConfig # noqa: F401
-
-
-def parse_method_configs(interface_config):
- """Creates default retry and timeout objects for each method in a gapic
- interface config with AsyncIO semantics.
-
- Args:
- interface_config (Mapping): The interface config section of the full
- gapic library config. For example, If the full configuration has
- an interface named ``google.example.v1.ExampleService`` you would
- pass in just that interface's configuration, for example
- ``gapic_config['interfaces']['google.example.v1.ExampleService']``.
-
- Returns:
- Mapping[str, MethodConfig]: A mapping of RPC method names to their
- configuration.
- """
- return config.parse_method_configs(
- interface_config, retry_impl=retry_async.AsyncRetry
- )
diff --git a/google/api_core/gapic_v1/method.py b/google/api_core/gapic_v1/method.py
deleted file mode 100644
index b4481ca..0000000
--- a/google/api_core/gapic_v1/method.py
+++ /dev/null
@@ -1,250 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Helpers for wrapping low-level gRPC methods with common functionality.
-
-This is used by gapic clients to provide common error mapping, retry, timeout,
-compression, pagination, and long-running operations to gRPC methods.
-"""
-
-import enum
-import functools
-
-from google.api_core import grpc_helpers
-from google.api_core.gapic_v1 import client_info
-from google.api_core.timeout import TimeToDeadlineTimeout
-
-USE_DEFAULT_METADATA = object()
-
-
-class _MethodDefault(enum.Enum):
- # Uses enum so that pytype/mypy knows that this is the only possible value.
- # https://stackoverflow.com/a/60605919/101923
- _DEFAULT_VALUE = object()
-
-
-DEFAULT = _MethodDefault._DEFAULT_VALUE
-"""Sentinel value indicating that a retry, timeout, or compression argument was unspecified,
-so the default should be used."""
-
-
-def _is_not_none_or_false(value):
- return value is not None and value is not False
-
-
-def _apply_decorators(func, decorators):
- """Apply a list of decorators to a given function.
-
- ``decorators`` may contain items that are ``None`` or ``False`` which will
- be ignored.
- """
- filtered_decorators = filter(_is_not_none_or_false, reversed(decorators))
-
- for decorator in filtered_decorators:
- func = decorator(func)
-
- return func
-
-
-class _GapicCallable(object):
- """Callable that applies retry, timeout, and metadata logic.
-
- Args:
- target (Callable): The low-level RPC method.
- retry (google.api_core.retry.Retry): The default retry for the
- callable. If ``None``, this callable will not retry by default
- timeout (google.api_core.timeout.Timeout): The default timeout for the
- callable (i.e. duration of time within which an RPC must terminate
- after its start, not to be confused with deadline). If ``None``,
- this callable will not specify a timeout argument to the low-level
- RPC method.
- compression (grpc.Compression): The default compression for the callable.
- If ``None``, this callable will not specify a compression argument
- to the low-level RPC method.
- metadata (Sequence[Tuple[str, str]]): Additional metadata that is
- provided to the RPC method on every invocation. This is merged with
- any metadata specified during invocation. If ``None``, no
- additional metadata will be passed to the RPC method.
- """
-
- def __init__(
- self,
- target,
- retry,
- timeout,
- compression,
- metadata=None,
- ):
- self._target = target
- self._retry = retry
- self._timeout = timeout
- self._compression = compression
- self._metadata = metadata
-
- def __call__(
- self, *args, timeout=DEFAULT, retry=DEFAULT, compression=DEFAULT, **kwargs
- ):
- """Invoke the low-level RPC with retry, timeout, compression, and metadata."""
-
- if retry is DEFAULT:
- retry = self._retry
-
- if timeout is DEFAULT:
- timeout = self._timeout
-
- if compression is DEFAULT:
- compression = self._compression
-
- if isinstance(timeout, (int, float)):
- timeout = TimeToDeadlineTimeout(timeout=timeout)
-
- # Apply all applicable decorators.
- wrapped_func = _apply_decorators(self._target, [retry, timeout])
-
- # Add the user agent metadata to the call.
- if self._metadata is not None:
- metadata = kwargs.get("metadata", [])
- # Due to the nature of invocation, None should be treated the same
- # as not specified.
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- metadata.extend(self._metadata)
- kwargs["metadata"] = metadata
- if self._compression is not None:
- kwargs["compression"] = compression
-
- return wrapped_func(*args, **kwargs)
-
-
-def wrap_method(
- func,
- default_retry=None,
- default_timeout=None,
- default_compression=None,
- client_info=client_info.DEFAULT_CLIENT_INFO,
- *,
- with_call=False,
-):
- """Wrap an RPC method with common behavior.
-
- This applies common error wrapping, retry, timeout, and compression behavior to a function.
- The wrapped function will take optional ``retry``, ``timeout``, and ``compression``
- arguments.
-
- For example::
-
- import google.api_core.gapic_v1.method
- from google.api_core import retry
- from google.api_core import timeout
- from grpc import Compression
-
- # The original RPC method.
- def get_topic(name, timeout=None):
- request = publisher_v2.GetTopicRequest(name=name)
- return publisher_stub.GetTopic(request, timeout=timeout)
-
- default_retry = retry.Retry(deadline=60)
- default_timeout = timeout.Timeout(deadline=60)
- default_compression = Compression.NoCompression
- wrapped_get_topic = google.api_core.gapic_v1.method.wrap_method(
- get_topic, default_retry)
-
- # Execute get_topic with default retry and timeout:
- response = wrapped_get_topic()
-
- # Execute get_topic without doing any retying but with the default
- # timeout:
- response = wrapped_get_topic(retry=None)
-
- # Execute get_topic but only retry on 5xx errors:
- my_retry = retry.Retry(retry.if_exception_type(
- exceptions.InternalServerError))
- response = wrapped_get_topic(retry=my_retry)
-
- The way this works is by late-wrapping the given function with the retry
- and timeout decorators. Essentially, when ``wrapped_get_topic()`` is
- called:
-
- * ``get_topic()`` is first wrapped with the ``timeout`` into
- ``get_topic_with_timeout``.
- * ``get_topic_with_timeout`` is wrapped with the ``retry`` into
- ``get_topic_with_timeout_and_retry()``.
- * The final ``get_topic_with_timeout_and_retry`` is called passing through
- the ``args`` and ``kwargs``.
-
- The callstack is therefore::
-
- method.__call__() ->
- Retry.__call__() ->
- Timeout.__call__() ->
- wrap_errors() ->
- get_topic()
-
- Note that if ``timeout`` or ``retry`` is ``None``, then they are not
- applied to the function. For example,
- ``wrapped_get_topic(timeout=None, retry=None)`` is more or less
- equivalent to just calling ``get_topic`` but with error re-mapping.
-
- Args:
- func (Callable[Any]): The function to wrap. It should accept an
- optional ``timeout`` argument. If ``metadata`` is not ``None``, it
- should accept a ``metadata`` argument.
- default_retry (Optional[google.api_core.Retry]): The default retry
- strategy. If ``None``, the method will not retry by default.
- default_timeout (Optional[google.api_core.Timeout]): The default
- timeout strategy. Can also be specified as an int or float. If
- ``None``, the method will not have timeout specified by default.
- default_compression (Optional[grpc.Compression]): The default
- grpc.Compression. If ``None``, the method will not have
- compression specified by default.
- client_info
- (Optional[google.api_core.gapic_v1.client_info.ClientInfo]):
- Client information used to create a user-agent string that's
- passed as gRPC metadata to the method. If unspecified, then
- a sane default will be used. If ``None``, then no user agent
- metadata will be provided to the RPC method.
- with_call (bool): If True, wrapped grpc.UnaryUnaryMulticallables will
- return a tuple of (response, grpc.Call) instead of just the response.
- This is useful for extracting trailing metadata from unary calls.
- Defaults to False.
-
- Returns:
- Callable: A new callable that takes optional ``retry``, ``timeout``,
- and ``compression``
- arguments and applies the common error mapping, retry, timeout, compression,
- and metadata behavior to the low-level RPC method.
- """
- if with_call:
- try:
- func = func.with_call
- except AttributeError as exc:
- raise ValueError(
- "with_call=True is only supported for unary calls."
- ) from exc
- func = grpc_helpers.wrap_errors(func)
- if client_info is not None:
- user_agent_metadata = [client_info.to_grpc_metadata()]
- else:
- user_agent_metadata = None
-
- return functools.wraps(func)(
- _GapicCallable(
- func,
- default_retry,
- default_timeout,
- default_compression,
- metadata=user_agent_metadata,
- )
- )
diff --git a/google/api_core/gapic_v1/method_async.py b/google/api_core/gapic_v1/method_async.py
deleted file mode 100644
index c0f38c0..0000000
--- a/google/api_core/gapic_v1/method_async.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""AsyncIO helpers for wrapping gRPC methods with common functionality.
-
-This is used by gapic clients to provide common error mapping, retry, timeout,
-compression, pagination, and long-running operations to gRPC methods.
-"""
-
-import functools
-
-from google.api_core import grpc_helpers_async
-from google.api_core.gapic_v1 import client_info
-from google.api_core.gapic_v1.method import _GapicCallable
-from google.api_core.gapic_v1.method import DEFAULT # noqa: F401
-from google.api_core.gapic_v1.method import USE_DEFAULT_METADATA # noqa: F401
-
-_DEFAULT_ASYNC_TRANSPORT_KIND = "grpc_asyncio"
-
-
-def wrap_method(
- func,
- default_retry=None,
- default_timeout=None,
- default_compression=None,
- client_info=client_info.DEFAULT_CLIENT_INFO,
- kind=_DEFAULT_ASYNC_TRANSPORT_KIND,
-):
- """Wrap an async RPC method with common behavior.
-
- Returns:
- Callable: A new callable that takes optional ``retry``, ``timeout``,
- and ``compression`` arguments and applies the common error mapping,
- retry, timeout, metadata, and compression behavior to the low-level RPC method.
- """
- if kind == _DEFAULT_ASYNC_TRANSPORT_KIND:
- func = grpc_helpers_async.wrap_errors(func)
-
- metadata = [client_info.to_grpc_metadata()] if client_info is not None else None
-
- return functools.wraps(func)(
- _GapicCallable(
- func,
- default_retry,
- default_timeout,
- default_compression,
- metadata=metadata,
- )
- )
diff --git a/google/api_core/gapic_v1/routing_header.py b/google/api_core/gapic_v1/routing_header.py
deleted file mode 100644
index c0c6f64..0000000
--- a/google/api_core/gapic_v1/routing_header.py
+++ /dev/null
@@ -1,87 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Helpers for constructing routing headers.
-
-These headers are used by Google infrastructure to determine how to route
-requests, especially for services that are regional.
-
-Generally, these headers are specified as gRPC metadata.
-"""
-
-import functools
-from enum import Enum
-from urllib.parse import urlencode
-
-ROUTING_METADATA_KEY = "x-goog-request-params"
-# This is the value for the `maxsize` argument of @functools.lru_cache
-# https://docs.python.org/3/library/functools.html#functools.lru_cache
-# This represents the number of recent function calls to store.
-ROUTING_PARAM_CACHE_SIZE = 32
-
-
-def to_routing_header(params, qualified_enums=True):
- """Returns a routing header string for the given request parameters.
-
- Args:
- params (Mapping[str, str | bytes | Enum]): A dictionary containing the request
- parameters used for routing.
- qualified_enums (bool): Whether to represent enum values
- as their type-qualified symbol names instead of as their
- unqualified symbol names.
-
- Returns:
- str: The routing header string.
- """
- tuples = params.items() if isinstance(params, dict) else params
- if not qualified_enums:
- tuples = [(x[0], x[1].name) if isinstance(x[1], Enum) else x for x in tuples]
- return "&".join([_urlencode_param(*t) for t in tuples])
-
-
-def to_grpc_metadata(params, qualified_enums=True):
- """Returns the gRPC metadata containing the routing headers for the given
- request parameters.
-
- Args:
- params (Mapping[str, str | bytes | Enum]): A dictionary containing the request
- parameters used for routing.
- qualified_enums (bool): Whether to represent enum values
- as their type-qualified symbol names instead of as their
- unqualified symbol names.
-
- Returns:
- Tuple(str, str): The gRPC metadata containing the routing header key
- and value.
- """
- return (ROUTING_METADATA_KEY, to_routing_header(params, qualified_enums))
-
-
-# use caching to avoid repeated computation
-@functools.lru_cache(maxsize=ROUTING_PARAM_CACHE_SIZE)
-def _urlencode_param(key, value):
- """Cacheable wrapper over urlencode
-
- Args:
- key (str): The key of the parameter to encode.
- value (str | bytes | Enum): The value of the parameter to encode.
-
- Returns:
- str: The encoded parameter.
- """
- return urlencode(
- {key: value},
- # Per Google API policy (go/api-url-encoding), / is not encoded.
- safe="/",
- )
diff --git a/google/api_core/general_helpers.py b/google/api_core/general_helpers.py
deleted file mode 100644
index 0628229..0000000
--- a/google/api_core/general_helpers.py
+++ /dev/null
@@ -1,52 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# This import for backward compatibility only.
-from functools import wraps # noqa: F401 pragma: NO COVER
-
-_CREDENTIALS_FILE_WARNING = """\
-The `credentials_file` argument is deprecated because of a potential security risk.
-
-The `google.auth.load_credentials_from_file` method does not validate the credential
-configuration. The security risk occurs when a credential configuration is accepted
-from a source that is not under your control and used without validation on your side.
-
-If you know that you will be loading credential configurations of a
-specific type, it is recommended to use a credential-type-specific
-load method.
-
-This will ensure that an unexpected credential type with potential for
-malicious intent is not loaded unintentionally. You might still have to do
-validation for certain credential types. Please follow the recommendations
-for that method. For example, if you want to load only service accounts,
-you can create the service account credentials explicitly:
-
-```
-from google.cloud.vision_v1 import ImageAnnotatorClient
-from google.oauth2 import service_account
-
-credentials = service_account.Credentials.from_service_account_file(filename)
-client = ImageAnnotatorClient(credentials=credentials)
-```
-
-If you are loading your credential configuration from an untrusted source and have
-not mitigated the risks (e.g. by validating the configuration yourself), make
-these changes as soon as possible to prevent security risks to your environment.
-
-Regardless of the method used, it is always your responsibility to validate
-configurations received from external sources.
-
-Refer to https://cloud.google.com/docs/authentication/external/externally-sourced-credentials
-for more details.
-"""
diff --git a/google/api_core/grpc_helpers.py b/google/api_core/grpc_helpers.py
deleted file mode 100644
index 30ba19c..0000000
--- a/google/api_core/grpc_helpers.py
+++ /dev/null
@@ -1,614 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Helpers for :mod:`grpc`."""
-import collections
-import functools
-from typing import Generic, Iterator, Optional, TypeVar
-import warnings
-
-import google.auth
-import google.auth.credentials
-import google.auth.transport.grpc
-import google.auth.transport.requests
-import google.protobuf
-import grpc
-
-from google.api_core import exceptions, general_helpers
-
-
-# The list of gRPC Callable interfaces that return iterators.
-_STREAM_WRAP_CLASSES = (grpc.UnaryStreamMultiCallable, grpc.StreamStreamMultiCallable)
-
-# denotes the proto response type for grpc calls
-P = TypeVar("P")
-
-
-def _patch_callable_name(callable_):
- """Fix-up gRPC callable attributes.
-
- gRPC callable lack the ``__name__`` attribute which causes
- :func:`functools.wraps` to error. This adds the attribute if needed.
- """
- if not hasattr(callable_, "__name__"):
- callable_.__name__ = callable_.__class__.__name__
-
-
-def _wrap_unary_errors(callable_):
- """Map errors for Unary-Unary and Stream-Unary gRPC callables."""
- _patch_callable_name(callable_)
-
- @functools.wraps(callable_)
- def error_remapped_callable(*args, **kwargs):
- try:
- return callable_(*args, **kwargs)
- except grpc.RpcError as exc:
- raise exceptions.from_grpc_error(exc) from exc
-
- return error_remapped_callable
-
-
-class _StreamingResponseIterator(Generic[P], grpc.Call):
- def __init__(self, wrapped, prefetch_first_result=True):
- self._wrapped = wrapped
-
- # This iterator is used in a retry context, and returned outside after init.
- # gRPC will not throw an exception until the stream is consumed, so we need
- # to retrieve the first result, in order to fail, in order to trigger a retry.
- try:
- if prefetch_first_result:
- self._stored_first_result = next(self._wrapped)
- except TypeError:
- # It is possible the wrapped method isn't an iterable (a grpc.Call
- # for instance). If this happens don't store the first result.
- pass
- except StopIteration:
- # ignore stop iteration at this time. This should be handled outside of retry.
- pass
-
- def __iter__(self) -> Iterator[P]:
- """This iterator is also an iterable that returns itself."""
- return self
-
- def __next__(self) -> P:
- """Get the next response from the stream.
-
- Returns:
- protobuf.Message: A single response from the stream.
- """
- try:
- if hasattr(self, "_stored_first_result"):
- result = self._stored_first_result
- del self._stored_first_result
- return result
- return next(self._wrapped)
- except grpc.RpcError as exc:
- # If the stream has already returned data, we cannot recover here.
- raise exceptions.from_grpc_error(exc) from exc
-
- # grpc.Call & grpc.RpcContext interface
-
- def add_callback(self, callback):
- return self._wrapped.add_callback(callback)
-
- def cancel(self):
- return self._wrapped.cancel()
-
- def code(self):
- return self._wrapped.code()
-
- def details(self):
- return self._wrapped.details()
-
- def initial_metadata(self):
- return self._wrapped.initial_metadata()
-
- def is_active(self):
- return self._wrapped.is_active()
-
- def time_remaining(self):
- return self._wrapped.time_remaining()
-
- def trailing_metadata(self):
- return self._wrapped.trailing_metadata()
-
-
-# public type alias denoting the return type of streaming gapic calls
-GrpcStream = _StreamingResponseIterator[P]
-
-
-def _wrap_stream_errors(callable_):
- """Wrap errors for Unary-Stream and Stream-Stream gRPC callables.
-
- The callables that return iterators require a bit more logic to re-map
- errors when iterating. This wraps both the initial invocation and the
- iterator of the return value to re-map errors.
- """
- _patch_callable_name(callable_)
-
- @functools.wraps(callable_)
- def error_remapped_callable(*args, **kwargs):
- try:
- result = callable_(*args, **kwargs)
- # Auto-fetching the first result causes PubSub client's streaming pull
- # to hang when re-opening the stream, thus we need examine the hacky
- # hidden flag to see if pre-fetching is disabled.
- # https://github.com/googleapis/python-pubsub/issues/93#issuecomment-630762257
- prefetch_first = getattr(callable_, "_prefetch_first_result_", True)
- return _StreamingResponseIterator(
- result, prefetch_first_result=prefetch_first
- )
- except grpc.RpcError as exc:
- raise exceptions.from_grpc_error(exc) from exc
-
- return error_remapped_callable
-
-
-def wrap_errors(callable_):
- """Wrap a gRPC callable and map :class:`grpc.RpcErrors` to friendly error
- classes.
-
- Errors raised by the gRPC callable are mapped to the appropriate
- :class:`google.api_core.exceptions.GoogleAPICallError` subclasses.
- The original `grpc.RpcError` (which is usually also a `grpc.Call`) is
- available from the ``response`` property on the mapped exception. This
- is useful for extracting metadata from the original error.
-
- Args:
- callable_ (Callable): A gRPC callable.
-
- Returns:
- Callable: The wrapped gRPC callable.
- """
- if isinstance(callable_, _STREAM_WRAP_CLASSES):
- return _wrap_stream_errors(callable_)
- else:
- return _wrap_unary_errors(callable_)
-
-
-def _create_composite_credentials(
- credentials=None,
- credentials_file=None,
- default_scopes=None,
- scopes=None,
- ssl_credentials=None,
- quota_project_id=None,
- default_host=None,
-):
- """Create the composite credentials for secure channels.
-
- Args:
- credentials (google.auth.credentials.Credentials): The credentials. If
- not specified, then this function will attempt to ascertain the
- credentials from the environment using :func:`google.auth.default`.
- credentials_file (str): Deprecated. A file with credentials that can be loaded with
- :func:`google.auth.load_credentials_from_file`. This argument is
- mutually exclusive with credentials. This argument will be
- removed in the next major version of `google-api-core`.
-
- .. warning::
- Important: If you accept a credential configuration (credential JSON/File/Stream)
- from an external source for authentication to Google Cloud Platform, you must
- validate it before providing it to any Google API or client library. Providing an
- unvalidated credential configuration to Google APIs or libraries can compromise
- the security of your systems and data. For more information, refer to
- `Validate credential configurations from external sources`_.
-
- .. _Validate credential configurations from external sources:
-
- https://cloud.google.com/docs/authentication/external/externally-sourced-credentials
- default_scopes (Sequence[str]): A optional list of scopes needed for this
- service. These are only used when credentials are not specified and
- are passed to :func:`google.auth.default`.
- scopes (Sequence[str]): A optional list of scopes needed for this
- service. These are only used when credentials are not specified and
- are passed to :func:`google.auth.default`.
- ssl_credentials (grpc.ChannelCredentials): Optional SSL channel
- credentials. This can be used to specify different certificates.
- quota_project_id (str): An optional project to use for billing and quota.
- default_host (str): The default endpoint. e.g., "pubsub.googleapis.com".
-
- Returns:
- grpc.ChannelCredentials: The composed channel credentials object.
-
- Raises:
- google.api_core.DuplicateCredentialArgs: If both a credentials object and credentials_file are passed.
- """
- if credentials_file is not None:
- warnings.warn(general_helpers._CREDENTIALS_FILE_WARNING, DeprecationWarning)
-
- if credentials and credentials_file:
- raise exceptions.DuplicateCredentialArgs(
- "'credentials' and 'credentials_file' are mutually exclusive."
- )
-
- if credentials_file:
- credentials, _ = google.auth.load_credentials_from_file(
- credentials_file, scopes=scopes, default_scopes=default_scopes
- )
- elif credentials:
- credentials = google.auth.credentials.with_scopes_if_required(
- credentials, scopes=scopes, default_scopes=default_scopes
- )
- else:
- credentials, _ = google.auth.default(
- scopes=scopes, default_scopes=default_scopes
- )
-
- if quota_project_id and isinstance(
- credentials, google.auth.credentials.CredentialsWithQuotaProject
- ):
- credentials = credentials.with_quota_project(quota_project_id)
-
- request = google.auth.transport.requests.Request()
-
- # Create the metadata plugin for inserting the authorization header.
- metadata_plugin = google.auth.transport.grpc.AuthMetadataPlugin(
- credentials,
- request,
- default_host=default_host,
- )
-
- # Create a set of grpc.CallCredentials using the metadata plugin.
- google_auth_credentials = grpc.metadata_call_credentials(metadata_plugin)
-
- # if `ssl_credentials` is set, use `grpc.composite_channel_credentials` instead of
- # `grpc.compute_engine_channel_credentials` as the former supports passing
- # `ssl_credentials` via `channel_credentials` which is needed for mTLS.
- if ssl_credentials:
- # Combine the ssl credentials and the authorization credentials.
- # See https://grpc.github.io/grpc/python/grpc.html#grpc.composite_channel_credentials
- return grpc.composite_channel_credentials(
- ssl_credentials, google_auth_credentials
- )
- else:
- # Use grpc.compute_engine_channel_credentials in order to support Direct Path.
- # See https://grpc.github.io/grpc/python/grpc.html#grpc.compute_engine_channel_credentials
- # TODO(https://github.com/googleapis/python-api-core/issues/598):
- # Although `grpc.compute_engine_channel_credentials` returns channel credentials
- # outside of a Google Compute Engine environment (GCE), we should determine if
- # there is a way to reliably detect a GCE environment so that
- # `grpc.compute_engine_channel_credentials` is not called outside of GCE.
- return grpc.compute_engine_channel_credentials(google_auth_credentials)
-
-
-def create_channel(
- target,
- credentials=None,
- scopes=None,
- ssl_credentials=None,
- credentials_file=None,
- quota_project_id=None,
- default_scopes=None,
- default_host=None,
- compression=None,
- attempt_direct_path: Optional[bool] = False,
- **kwargs,
-):
- """Create a secure channel with credentials.
-
- Args:
- target (str): The target service address in the format 'hostname:port'.
- credentials (google.auth.credentials.Credentials): The credentials. If
- not specified, then this function will attempt to ascertain the
- credentials from the environment using :func:`google.auth.default`.
- scopes (Sequence[str]): A optional list of scopes needed for this
- service. These are only used when credentials are not specified and
- are passed to :func:`google.auth.default`.
- ssl_credentials (grpc.ChannelCredentials): Optional SSL channel
- credentials. This can be used to specify different certificates.
- credentials_file (str): A file with credentials that can be loaded with
- :func:`google.auth.load_credentials_from_file`. This argument is
- mutually exclusive with credentials.
-
- .. warning::
- Important: If you accept a credential configuration (credential JSON/File/Stream)
- from an external source for authentication to Google Cloud Platform, you must
- validate it before providing it to any Google API or client library. Providing an
- unvalidated credential configuration to Google APIs or libraries can compromise
- the security of your systems and data. For more information, refer to
- `Validate credential configurations from external sources`_.
-
- .. _Validate credential configurations from external sources:
-
- https://cloud.google.com/docs/authentication/external/externally-sourced-credentials
- quota_project_id (str): An optional project to use for billing and quota.
- default_scopes (Sequence[str]): Default scopes passed by a Google client
- library. Use 'scopes' for user-defined scopes.
- default_host (str): The default endpoint. e.g., "pubsub.googleapis.com".
- compression (grpc.Compression): An optional value indicating the
- compression method to be used over the lifetime of the channel.
- attempt_direct_path (Optional[bool]): If set, Direct Path will be attempted
- when the request is made. Direct Path is only available within a Google
- Compute Engine (GCE) environment and provides a proxyless connection
- which increases the available throughput, reduces latency, and increases
- reliability. Note:
-
- - This argument should only be set in a GCE environment and for Services
- that are known to support Direct Path.
- - If this argument is set outside of GCE, then this request will fail
- unless the back-end service happens to have configured fall-back to DNS.
- - If the request causes a `ServiceUnavailable` response, it is recommended
- that the client repeat the request with `attempt_direct_path` set to
- `False` as the Service may not support Direct Path.
- - Using `ssl_credentials` with `attempt_direct_path` set to `True` will
- result in `ValueError` as this combination is not yet supported.
-
- kwargs: Additional key-word args passed to
- :func:`grpc.secure_channel`.
-
- Returns:
- grpc.Channel: The created channel.
-
- Raises:
- google.api_core.DuplicateCredentialArgs: If both a credentials object and credentials_file are passed.
- ValueError: If `ssl_credentials` is set and `attempt_direct_path` is set to `True`.
- """
-
- # If `ssl_credentials` is set and `attempt_direct_path` is set to `True`,
- # raise ValueError as this is not yet supported.
- # See https://github.com/googleapis/python-api-core/issues/590
- if ssl_credentials and attempt_direct_path:
- raise ValueError("Using ssl_credentials with Direct Path is not supported")
-
- composite_credentials = _create_composite_credentials(
- credentials=credentials,
- credentials_file=credentials_file,
- default_scopes=default_scopes,
- scopes=scopes,
- ssl_credentials=ssl_credentials,
- quota_project_id=quota_project_id,
- default_host=default_host,
- )
-
- if attempt_direct_path:
- target = _modify_target_for_direct_path(target)
-
- return grpc.secure_channel(
- target, composite_credentials, compression=compression, **kwargs
- )
-
-
-def _modify_target_for_direct_path(target: str) -> str:
- """
- Given a target, return a modified version which is compatible with Direct Path.
-
- Args:
- target (str): The target service address in the format 'hostname[:port]' or
- 'dns://hostname[:port]'.
-
- Returns:
- target (str): The target service address which is converted into a format compatible with Direct Path.
- If the target contains `dns:///` or does not contain `:///`, the target will be converted in
- a format compatible with Direct Path; otherwise the original target will be returned as the
- original target may already denote Direct Path.
- """
-
- # A DNS prefix may be included with the target to indicate the endpoint is living in the Internet,
- # outside of Google Cloud Platform.
- dns_prefix = "dns:///"
- # Remove "dns:///" if `attempt_direct_path` is set to True as
- # the Direct Path prefix `google-c2p:///` will be used instead.
- target = target.replace(dns_prefix, "")
-
- direct_path_separator = ":///"
- if direct_path_separator not in target:
- target_without_port = target.split(":")[0]
- # Modify the target to use Direct Path by adding the `google-c2p:///` prefix
- target = f"google-c2p{direct_path_separator}{target_without_port}"
- return target
-
-
-_MethodCall = collections.namedtuple(
- "_MethodCall", ("request", "timeout", "metadata", "credentials", "compression")
-)
-
-_ChannelRequest = collections.namedtuple("_ChannelRequest", ("method", "request"))
-
-
-class _CallableStub(object):
- """Stub for the grpc.*MultiCallable interfaces."""
-
- def __init__(self, method, channel):
- self._method = method
- self._channel = channel
- self.response = None
- """Union[protobuf.Message, Callable[protobuf.Message], exception]:
- The response to give when invoking this callable. If this is a
- callable, it will be invoked with the request protobuf. If it's an
- exception, the exception will be raised when this is invoked.
- """
- self.responses = None
- """Iterator[
- Union[protobuf.Message, Callable[protobuf.Message], exception]]:
- An iterator of responses. If specified, self.response will be populated
- on each invocation by calling ``next(self.responses)``."""
- self.requests = []
- """List[protobuf.Message]: All requests sent to this callable."""
- self.calls = []
- """List[Tuple]: All invocations of this callable. Each tuple is the
- request, timeout, metadata, compression, and credentials."""
-
- def __call__(
- self, request, timeout=None, metadata=None, credentials=None, compression=None
- ):
- self._channel.requests.append(_ChannelRequest(self._method, request))
- self.calls.append(
- _MethodCall(request, timeout, metadata, credentials, compression)
- )
- self.requests.append(request)
-
- response = self.response
- if self.responses is not None:
- if response is None:
- response = next(self.responses)
- else:
- raise ValueError(
- "{method}.response and {method}.responses are mutually "
- "exclusive.".format(method=self._method)
- )
-
- if callable(response):
- return response(request)
-
- if isinstance(response, Exception):
- raise response
-
- if response is not None:
- return response
-
- raise ValueError('Method stub for "{}" has no response.'.format(self._method))
-
-
-def _simplify_method_name(method):
- """Simplifies a gRPC method name.
-
- When gRPC invokes the channel to create a callable, it gives a full
- method name like "/google.pubsub.v1.Publisher/CreateTopic". This
- returns just the name of the method, in this case "CreateTopic".
-
- Args:
- method (str): The name of the method.
-
- Returns:
- str: The simplified name of the method.
- """
- return method.rsplit("/", 1).pop()
-
-
-class ChannelStub(grpc.Channel):
- """A testing stub for the grpc.Channel interface.
-
- This can be used to test any client that eventually uses a gRPC channel
- to communicate. By passing in a channel stub, you can configure which
- responses are returned and track which requests are made.
-
- For example:
-
- .. code-block:: python
-
- channel_stub = grpc_helpers.ChannelStub()
- client = FooClient(channel=channel_stub)
-
- channel_stub.GetFoo.response = foo_pb2.Foo(name='bar')
-
- foo = client.get_foo(labels=['baz'])
-
- assert foo.name == 'bar'
- assert channel_stub.GetFoo.requests[0].labels = ['baz']
-
- Each method on the stub can be accessed and configured on the channel.
- Here's some examples of various configurations:
-
- .. code-block:: python
-
- # Return a basic response:
-
- channel_stub.GetFoo.response = foo_pb2.Foo(name='bar')
- assert client.get_foo().name == 'bar'
-
- # Raise an exception:
- channel_stub.GetFoo.response = NotFound('...')
-
- with pytest.raises(NotFound):
- client.get_foo()
-
- # Use a sequence of responses:
- channel_stub.GetFoo.responses = iter([
- foo_pb2.Foo(name='bar'),
- foo_pb2.Foo(name='baz'),
- ])
-
- assert client.get_foo().name == 'bar'
- assert client.get_foo().name == 'baz'
-
- # Use a callable
-
- def on_get_foo(request):
- return foo_pb2.Foo(name='bar' + request.id)
-
- channel_stub.GetFoo.response = on_get_foo
-
- assert client.get_foo(id='123').name == 'bar123'
- """
-
- def __init__(self, responses=[]):
- self.requests = []
- """Sequence[Tuple[str, protobuf.Message]]: A list of all requests made
- on this channel in order. The tuple is of method name, request
- message."""
- self._method_stubs = {}
-
- def _stub_for_method(self, method):
- method = _simplify_method_name(method)
- self._method_stubs[method] = _CallableStub(method, self)
- return self._method_stubs[method]
-
- def __getattr__(self, key):
- try:
- return self._method_stubs[key]
- except KeyError:
- raise AttributeError
-
- def unary_unary(
- self,
- method,
- request_serializer=None,
- response_deserializer=None,
- _registered_method=False,
- ):
- """grpc.Channel.unary_unary implementation."""
- return self._stub_for_method(method)
-
- def unary_stream(
- self,
- method,
- request_serializer=None,
- response_deserializer=None,
- _registered_method=False,
- ):
- """grpc.Channel.unary_stream implementation."""
- return self._stub_for_method(method)
-
- def stream_unary(
- self,
- method,
- request_serializer=None,
- response_deserializer=None,
- _registered_method=False,
- ):
- """grpc.Channel.stream_unary implementation."""
- return self._stub_for_method(method)
-
- def stream_stream(
- self,
- method,
- request_serializer=None,
- response_deserializer=None,
- _registered_method=False,
- ):
- """grpc.Channel.stream_stream implementation."""
- return self._stub_for_method(method)
-
- def subscribe(self, callback, try_to_connect=False):
- """grpc.Channel.subscribe implementation."""
- pass
-
- def unsubscribe(self, callback):
- """grpc.Channel.unsubscribe implementation."""
- pass
-
- def close(self):
- """grpc.Channel.close implementation."""
- pass
diff --git a/google/api_core/grpc_helpers_async.py b/google/api_core/grpc_helpers_async.py
deleted file mode 100644
index 9e1ad11..0000000
--- a/google/api_core/grpc_helpers_async.py
+++ /dev/null
@@ -1,348 +0,0 @@
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""AsyncIO helpers for :mod:`grpc` supporting 3.7+.
-
-Please combine more detailed docstring in grpc_helpers.py to use following
-functions. This module is implementing the same surface with AsyncIO semantics.
-"""
-
-import asyncio
-import functools
-import warnings
-
-from typing import AsyncGenerator, Generic, Iterator, Optional, TypeVar
-
-import grpc
-from grpc import aio
-
-from google.api_core import exceptions, general_helpers, grpc_helpers
-
-# denotes the proto response type for grpc calls
-P = TypeVar("P")
-
-# NOTE(lidiz) Alternatively, we can hack "__getattribute__" to perform
-# automatic patching for us. But that means the overhead of creating an
-# extra Python function spreads to every single send and receive.
-
-
-class _WrappedCall(aio.Call):
- def __init__(self):
- self._call = None
-
- def with_call(self, call):
- """Supplies the call object separately to keep __init__ clean."""
- self._call = call
- return self
-
- async def initial_metadata(self):
- return await self._call.initial_metadata()
-
- async def trailing_metadata(self):
- return await self._call.trailing_metadata()
-
- async def code(self):
- return await self._call.code()
-
- async def details(self):
- return await self._call.details()
-
- def cancelled(self):
- return self._call.cancelled()
-
- def done(self):
- return self._call.done()
-
- def time_remaining(self):
- return self._call.time_remaining()
-
- def cancel(self):
- return self._call.cancel()
-
- def add_done_callback(self, callback):
- self._call.add_done_callback(callback)
-
- async def wait_for_connection(self):
- try:
- await self._call.wait_for_connection()
- except grpc.RpcError as rpc_error:
- raise exceptions.from_grpc_error(rpc_error) from rpc_error
-
-
-class _WrappedUnaryResponseMixin(Generic[P], _WrappedCall):
- def __await__(self) -> Iterator[P]:
- try:
- response = yield from self._call.__await__()
- return response
- except grpc.RpcError as rpc_error:
- raise exceptions.from_grpc_error(rpc_error) from rpc_error
-
-
-class _WrappedStreamResponseMixin(Generic[P], _WrappedCall):
- def __init__(self):
- self._wrapped_async_generator = None
-
- async def read(self) -> P:
- try:
- return await self._call.read()
- except grpc.RpcError as rpc_error:
- raise exceptions.from_grpc_error(rpc_error) from rpc_error
-
- async def _wrapped_aiter(self) -> AsyncGenerator[P, None]:
- try:
- # NOTE(lidiz) coverage doesn't understand the exception raised from
- # __anext__ method. It is covered by test case:
- # test_wrap_stream_errors_aiter_non_rpc_error
- async for response in self._call: # pragma: no branch
- yield response
- except grpc.RpcError as rpc_error:
- raise exceptions.from_grpc_error(rpc_error) from rpc_error
-
- def __aiter__(self) -> AsyncGenerator[P, None]:
- if not self._wrapped_async_generator:
- self._wrapped_async_generator = self._wrapped_aiter()
- return self._wrapped_async_generator
-
-
-class _WrappedStreamRequestMixin(_WrappedCall):
- async def write(self, request):
- try:
- await self._call.write(request)
- except grpc.RpcError as rpc_error:
- raise exceptions.from_grpc_error(rpc_error) from rpc_error
-
- async def done_writing(self):
- try:
- await self._call.done_writing()
- except grpc.RpcError as rpc_error:
- raise exceptions.from_grpc_error(rpc_error) from rpc_error
-
-
-# NOTE(lidiz) Implementing each individual class separately, so we don't
-# expose any API that should not be seen. E.g., __aiter__ in unary-unary
-# RPC, or __await__ in stream-stream RPC.
-class _WrappedUnaryUnaryCall(_WrappedUnaryResponseMixin[P], aio.UnaryUnaryCall):
- """Wrapped UnaryUnaryCall to map exceptions."""
-
-
-class _WrappedUnaryStreamCall(_WrappedStreamResponseMixin[P], aio.UnaryStreamCall):
- """Wrapped UnaryStreamCall to map exceptions."""
-
-
-class _WrappedStreamUnaryCall(
- _WrappedUnaryResponseMixin[P], _WrappedStreamRequestMixin, aio.StreamUnaryCall
-):
- """Wrapped StreamUnaryCall to map exceptions."""
-
-
-class _WrappedStreamStreamCall(
- _WrappedStreamRequestMixin, _WrappedStreamResponseMixin[P], aio.StreamStreamCall
-):
- """Wrapped StreamStreamCall to map exceptions."""
-
-
-# public type alias denoting the return type of async streaming gapic calls
-GrpcAsyncStream = _WrappedStreamResponseMixin
-# public type alias denoting the return type of unary gapic calls
-AwaitableGrpcCall = _WrappedUnaryResponseMixin
-
-
-def _wrap_unary_errors(callable_):
- """Map errors for Unary-Unary async callables."""
-
- @functools.wraps(callable_)
- def error_remapped_callable(*args, **kwargs):
- call = callable_(*args, **kwargs)
- return _WrappedUnaryUnaryCall().with_call(call)
-
- return error_remapped_callable
-
-
-def _wrap_stream_errors(callable_, wrapper_type):
- """Map errors for streaming RPC async callables."""
-
- @functools.wraps(callable_)
- async def error_remapped_callable(*args, **kwargs):
- call = callable_(*args, **kwargs)
- call = wrapper_type().with_call(call)
- await call.wait_for_connection()
- return call
-
- return error_remapped_callable
-
-
-def wrap_errors(callable_):
- """Wrap a gRPC async callable and map :class:`grpc.RpcErrors` to
- friendly error classes.
-
- Errors raised by the gRPC callable are mapped to the appropriate
- :class:`google.api_core.exceptions.GoogleAPICallError` subclasses. The
- original `grpc.RpcError` (which is usually also a `grpc.Call`) is
- available from the ``response`` property on the mapped exception. This
- is useful for extracting metadata from the original error.
-
- Args:
- callable_ (Callable): A gRPC callable.
-
- Returns: Callable: The wrapped gRPC callable.
- """
- grpc_helpers._patch_callable_name(callable_)
-
- if isinstance(callable_, aio.UnaryStreamMultiCallable):
- return _wrap_stream_errors(callable_, _WrappedUnaryStreamCall)
- elif isinstance(callable_, aio.StreamUnaryMultiCallable):
- return _wrap_stream_errors(callable_, _WrappedStreamUnaryCall)
- elif isinstance(callable_, aio.StreamStreamMultiCallable):
- return _wrap_stream_errors(callable_, _WrappedStreamStreamCall)
- else:
- return _wrap_unary_errors(callable_)
-
-
-def create_channel(
- target,
- credentials=None,
- scopes=None,
- ssl_credentials=None,
- credentials_file=None,
- quota_project_id=None,
- default_scopes=None,
- default_host=None,
- compression=None,
- attempt_direct_path: Optional[bool] = False,
- **kwargs,
-):
- """Create an AsyncIO secure channel with credentials.
-
- Args:
- target (str): The target service address in the format 'hostname:port'.
- credentials (google.auth.credentials.Credentials): The credentials. If
- not specified, then this function will attempt to ascertain the
- credentials from the environment using :func:`google.auth.default`.
- scopes (Sequence[str]): A optional list of scopes needed for this
- service. These are only used when credentials are not specified and
- are passed to :func:`google.auth.default`.
- ssl_credentials (grpc.ChannelCredentials): Optional SSL channel
- credentials. This can be used to specify different certificates.
- credentials_file (str): Deprecated. A file with credentials that can be loaded with
- :func:`google.auth.load_credentials_from_file`. This argument is
- mutually exclusive with credentials. This argument will be
- removed in the next major version of `google-api-core`.
-
- .. warning::
- Important: If you accept a credential configuration (credential JSON/File/Stream)
- from an external source for authentication to Google Cloud Platform, you must
- validate it before providing it to any Google API or client library. Providing an
- unvalidated credential configuration to Google APIs or libraries can compromise
- the security of your systems and data. For more information, refer to
- `Validate credential configurations from external sources`_.
-
- .. _Validate credential configurations from external sources:
-
- https://cloud.google.com/docs/authentication/external/externally-sourced-credentials
- quota_project_id (str): An optional project to use for billing and quota.
- default_scopes (Sequence[str]): Default scopes passed by a Google client
- library. Use 'scopes' for user-defined scopes.
- default_host (str): The default endpoint. e.g., "pubsub.googleapis.com".
- compression (grpc.Compression): An optional value indicating the
- compression method to be used over the lifetime of the channel.
- attempt_direct_path (Optional[bool]): If set, Direct Path will be attempted
- when the request is made. Direct Path is only available within a Google
- Compute Engine (GCE) environment and provides a proxyless connection
- which increases the available throughput, reduces latency, and increases
- reliability. Note:
-
- - This argument should only be set in a GCE environment and for Services
- that are known to support Direct Path.
- - If this argument is set outside of GCE, then this request will fail
- unless the back-end service happens to have configured fall-back to DNS.
- - If the request causes a `ServiceUnavailable` response, it is recommended
- that the client repeat the request with `attempt_direct_path` set to
- `False` as the Service may not support Direct Path.
- - Using `ssl_credentials` with `attempt_direct_path` set to `True` will
- result in `ValueError` as this combination is not yet supported.
-
- kwargs: Additional key-word args passed to :func:`aio.secure_channel`.
-
- Returns:
- aio.Channel: The created channel.
-
- Raises:
- google.api_core.DuplicateCredentialArgs: If both a credentials object and credentials_file are passed.
- ValueError: If `ssl_credentials` is set and `attempt_direct_path` is set to `True`.
- """
-
- if credentials_file is not None:
- warnings.warn(general_helpers._CREDENTIALS_FILE_WARNING, DeprecationWarning)
-
- # If `ssl_credentials` is set and `attempt_direct_path` is set to `True`,
- # raise ValueError as this is not yet supported.
- # See https://github.com/googleapis/python-api-core/issues/590
- if ssl_credentials and attempt_direct_path:
- raise ValueError("Using ssl_credentials with Direct Path is not supported")
-
- composite_credentials = grpc_helpers._create_composite_credentials(
- credentials=credentials,
- credentials_file=credentials_file,
- scopes=scopes,
- default_scopes=default_scopes,
- ssl_credentials=ssl_credentials,
- quota_project_id=quota_project_id,
- default_host=default_host,
- )
-
- if attempt_direct_path:
- target = grpc_helpers._modify_target_for_direct_path(target)
-
- return aio.secure_channel(
- target, composite_credentials, compression=compression, **kwargs
- )
-
-
-class FakeUnaryUnaryCall(_WrappedUnaryUnaryCall):
- """Fake implementation for unary-unary RPCs.
-
- It is a dummy object for response message. Supply the intended response
- upon the initialization, and the coroutine will return the exact response
- message.
- """
-
- def __init__(self, response=object()):
- self.response = response
- self._future = asyncio.get_event_loop().create_future()
- self._future.set_result(self.response)
-
- def __await__(self):
- response = yield from self._future.__await__()
- return response
-
-
-class FakeStreamUnaryCall(_WrappedStreamUnaryCall):
- """Fake implementation for stream-unary RPCs.
-
- It is a dummy object for response message. Supply the intended response
- upon the initialization, and the coroutine will return the exact response
- message.
- """
-
- def __init__(self, response=object()):
- self.response = response
- self._future = asyncio.get_event_loop().create_future()
- self._future.set_result(self.response)
-
- def __await__(self):
- response = yield from self._future.__await__()
- return response
-
- async def wait_for_connection(self):
- pass
diff --git a/google/api_core/iam.py b/google/api_core/iam.py
deleted file mode 100644
index 4437c70..0000000
--- a/google/api_core/iam.py
+++ /dev/null
@@ -1,427 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Non-API-specific IAM policy definitions
-
-For allowed roles / permissions, see:
-https://cloud.google.com/iam/docs/understanding-roles
-
-Example usage:
-
-.. code-block:: python
-
- # ``get_iam_policy`` returns a :class:'~google.api_core.iam.Policy`.
- policy = resource.get_iam_policy(requested_policy_version=3)
-
- phred = "user:phred@example.com"
- admin_group = "group:admins@groups.example.com"
- account = "serviceAccount:account-1234@accounts.example.com"
-
- policy.version = 3
- policy.bindings = [
- {
- "role": "roles/owner",
- "members": {phred, admin_group, account}
- },
- {
- "role": "roles/editor",
- "members": {"allAuthenticatedUsers"}
- },
- {
- "role": "roles/viewer",
- "members": {"allUsers"}
- "condition": {
- "title": "request_time",
- "description": "Requests made before 2021-01-01T00:00:00Z",
- "expression": "request.time < timestamp(\"2021-01-01T00:00:00Z\")"
- }
- }
- ]
-
- resource.set_iam_policy(policy)
-"""
-
-import collections
-import collections.abc
-import operator
-import warnings
-
-# Generic IAM roles
-
-OWNER_ROLE = "roles/owner"
-"""Generic role implying all rights to an object."""
-
-EDITOR_ROLE = "roles/editor"
-"""Generic role implying rights to modify an object."""
-
-VIEWER_ROLE = "roles/viewer"
-"""Generic role implying rights to access an object."""
-
-_ASSIGNMENT_DEPRECATED_MSG = """\
-Assigning to '{}' is deprecated. Use the `policy.bindings` property to modify bindings instead."""
-
-_DICT_ACCESS_MSG = """\
-Dict access is not supported on policies with version > 1 or with conditional bindings."""
-
-
-class InvalidOperationException(Exception):
- """Raised when trying to use Policy class as a dict."""
-
- pass
-
-
-class Policy(collections.abc.MutableMapping):
- """IAM Policy
-
- Args:
- etag (Optional[str]): ETag used to identify a unique of the policy
- version (Optional[int]): The syntax schema version of the policy.
-
- Note:
- Using conditions in bindings requires the policy's version to be set
- to `3` or greater, depending on the versions that are currently supported.
-
- Accessing the policy using dict operations will raise InvalidOperationException
- when the policy's version is set to 3.
-
- Use the policy.bindings getter/setter to retrieve and modify the policy's bindings.
-
- See:
- IAM Policy https://cloud.google.com/iam/reference/rest/v1/Policy
- Policy versions https://cloud.google.com/iam/docs/policies#versions
- Conditions overview https://cloud.google.com/iam/docs/conditions-overview.
- """
-
- _OWNER_ROLES = (OWNER_ROLE,)
- """Roles mapped onto our ``owners`` attribute."""
-
- _EDITOR_ROLES = (EDITOR_ROLE,)
- """Roles mapped onto our ``editors`` attribute."""
-
- _VIEWER_ROLES = (VIEWER_ROLE,)
- """Roles mapped onto our ``viewers`` attribute."""
-
- def __init__(self, etag=None, version=None):
- self.etag = etag
- self.version = version
- self._bindings = []
-
- def __iter__(self):
- self.__check_version__()
- # Exclude bindings with no members
- return (binding["role"] for binding in self._bindings if binding["members"])
-
- def __len__(self):
- self.__check_version__()
- # Exclude bindings with no members
- return len(list(self.__iter__()))
-
- def __getitem__(self, key):
- self.__check_version__()
- for b in self._bindings:
- if b["role"] == key:
- return b["members"]
- # If the binding does not yet exist, create one
- # NOTE: This will create bindings with no members
- # which are ignored by __iter__ and __len__
- new_binding = {"role": key, "members": set()}
- self._bindings.append(new_binding)
- return new_binding["members"]
-
- def __setitem__(self, key, value):
- self.__check_version__()
- value = set(value)
- for binding in self._bindings:
- if binding["role"] == key:
- binding["members"] = value
- return
- self._bindings.append({"role": key, "members": value})
-
- def __delitem__(self, key):
- self.__check_version__()
- for b in self._bindings:
- if b["role"] == key:
- self._bindings.remove(b)
- return
- raise KeyError(key)
-
- def __check_version__(self):
- """Raise InvalidOperationException if version is greater than 1 or policy contains conditions."""
- raise_version = self.version is not None and self.version > 1
-
- if raise_version or self._contains_conditions():
- raise InvalidOperationException(_DICT_ACCESS_MSG)
-
- def _contains_conditions(self):
- for b in self._bindings:
- if b.get("condition") is not None:
- return True
- return False
-
- @property
- def bindings(self):
- """The policy's list of bindings.
-
- A binding is specified by a dictionary with keys:
-
- * role (str): Role that is assigned to `members`.
-
- * members (:obj:`set` of str): Specifies the identities associated to this binding.
-
- * condition (:obj:`dict` of str:str): Specifies a condition under which this binding will apply.
-
- * title (str): Title for the condition.
-
- * description (:obj:str, optional): Description of the condition.
-
- * expression: A CEL expression.
-
- Type:
- :obj:`list` of :obj:`dict`
-
- See:
- Policy versions https://cloud.google.com/iam/docs/policies#versions
- Conditions overview https://cloud.google.com/iam/docs/conditions-overview.
-
- Example:
-
- .. code-block:: python
-
- USER = "user:phred@example.com"
- ADMIN_GROUP = "group:admins@groups.example.com"
- SERVICE_ACCOUNT = "serviceAccount:account-1234@accounts.example.com"
- CONDITION = {
- "title": "request_time",
- "description": "Requests made before 2021-01-01T00:00:00Z", # Optional
- "expression": "request.time < timestamp(\"2021-01-01T00:00:00Z\")"
- }
-
- # Set policy's version to 3 before setting bindings containing conditions.
- policy.version = 3
-
- policy.bindings = [
- {
- "role": "roles/viewer",
- "members": {USER, ADMIN_GROUP, SERVICE_ACCOUNT},
- "condition": CONDITION
- },
- ...
- ]
- """
- return self._bindings
-
- @bindings.setter
- def bindings(self, bindings):
- self._bindings = bindings
-
- @property
- def owners(self):
- """Legacy access to owner role.
-
- Raise InvalidOperationException if version is greater than 1 or policy contains conditions.
-
- DEPRECATED: use `policy.bindings` to access bindings instead.
- """
- result = set()
- for role in self._OWNER_ROLES:
- for member in self.get(role, ()):
- result.add(member)
- return frozenset(result)
-
- @owners.setter
- def owners(self, value):
- """Update owners.
-
- Raise InvalidOperationException if version is greater than 1 or policy contains conditions.
-
- DEPRECATED: use `policy.bindings` to access bindings instead.
- """
- warnings.warn(
- _ASSIGNMENT_DEPRECATED_MSG.format("owners", OWNER_ROLE), DeprecationWarning
- )
- self[OWNER_ROLE] = value
-
- @property
- def editors(self):
- """Legacy access to editor role.
-
- Raise InvalidOperationException if version is greater than 1 or policy contains conditions.
-
- DEPRECATED: use `policy.bindings` to access bindings instead.
- """
- result = set()
- for role in self._EDITOR_ROLES:
- for member in self.get(role, ()):
- result.add(member)
- return frozenset(result)
-
- @editors.setter
- def editors(self, value):
- """Update editors.
-
- Raise InvalidOperationException if version is greater than 1 or policy contains conditions.
-
- DEPRECATED: use `policy.bindings` to modify bindings instead.
- """
- warnings.warn(
- _ASSIGNMENT_DEPRECATED_MSG.format("editors", EDITOR_ROLE),
- DeprecationWarning,
- )
- self[EDITOR_ROLE] = value
-
- @property
- def viewers(self):
- """Legacy access to viewer role.
-
- Raise InvalidOperationException if version is greater than 1 or policy contains conditions.
-
- DEPRECATED: use `policy.bindings` to modify bindings instead.
- """
- result = set()
- for role in self._VIEWER_ROLES:
- for member in self.get(role, ()):
- result.add(member)
- return frozenset(result)
-
- @viewers.setter
- def viewers(self, value):
- """Update viewers.
-
- Raise InvalidOperationException if version is greater than 1 or policy contains conditions.
-
- DEPRECATED: use `policy.bindings` to modify bindings instead.
- """
- warnings.warn(
- _ASSIGNMENT_DEPRECATED_MSG.format("viewers", VIEWER_ROLE),
- DeprecationWarning,
- )
- self[VIEWER_ROLE] = value
-
- @staticmethod
- def user(email):
- """Factory method for a user member.
-
- Args:
- email (str): E-mail for this particular user.
-
- Returns:
- str: A member string corresponding to the given user.
- """
- return "user:%s" % (email,)
-
- @staticmethod
- def service_account(email):
- """Factory method for a service account member.
-
- Args:
- email (str): E-mail for this particular service account.
-
- Returns:
- str: A member string corresponding to the given service account.
-
- """
- return "serviceAccount:%s" % (email,)
-
- @staticmethod
- def group(email):
- """Factory method for a group member.
-
- Args:
- email (str): An id or e-mail for this particular group.
-
- Returns:
- str: A member string corresponding to the given group.
- """
- return "group:%s" % (email,)
-
- @staticmethod
- def domain(domain):
- """Factory method for a domain member.
-
- Args:
- domain (str): The domain for this member.
-
- Returns:
- str: A member string corresponding to the given domain.
- """
- return "domain:%s" % (domain,)
-
- @staticmethod
- def all_users():
- """Factory method for a member representing all users.
-
- Returns:
- str: A member string representing all users.
- """
- return "allUsers"
-
- @staticmethod
- def authenticated_users():
- """Factory method for a member representing all authenticated users.
-
- Returns:
- str: A member string representing all authenticated users.
- """
- return "allAuthenticatedUsers"
-
- @classmethod
- def from_api_repr(cls, resource):
- """Factory: create a policy from a JSON resource.
-
- Args:
- resource (dict): policy resource returned by ``getIamPolicy`` API.
-
- Returns:
- :class:`Policy`: the parsed policy
- """
- version = resource.get("version")
- etag = resource.get("etag")
- policy = cls(etag, version)
- policy.bindings = resource.get("bindings", [])
-
- for binding in policy.bindings:
- binding["members"] = set(binding.get("members", ()))
-
- return policy
-
- def to_api_repr(self):
- """Render a JSON policy resource.
-
- Returns:
- dict: a resource to be passed to the ``setIamPolicy`` API.
- """
- resource = {}
-
- if self.etag is not None:
- resource["etag"] = self.etag
-
- if self.version is not None:
- resource["version"] = self.version
-
- if self._bindings and len(self._bindings) > 0:
- bindings = []
- for binding in self._bindings:
- members = binding.get("members")
- if members:
- new_binding = {"role": binding["role"], "members": sorted(members)}
- condition = binding.get("condition")
- if condition:
- new_binding["condition"] = condition
- bindings.append(new_binding)
-
- if bindings:
- # Sort bindings by role
- key = operator.itemgetter("role")
- resource["bindings"] = sorted(bindings, key=key)
-
- return resource
diff --git a/google/api_core/operation.py b/google/api_core/operation.py
deleted file mode 100644
index 5206243..0000000
--- a/google/api_core/operation.py
+++ /dev/null
@@ -1,365 +0,0 @@
-# Copyright 2016 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Futures for long-running operations returned from Google Cloud APIs.
-
-These futures can be used to synchronously wait for the result of a
-long-running operation using :meth:`Operation.result`:
-
-
-.. code-block:: python
-
- operation = my_api_client.long_running_method()
- result = operation.result()
-
-Or asynchronously using callbacks and :meth:`Operation.add_done_callback`:
-
-.. code-block:: python
-
- operation = my_api_client.long_running_method()
-
- def my_callback(future):
- result = future.result()
-
- operation.add_done_callback(my_callback)
-
-"""
-
-import functools
-import threading
-
-from google.api_core import exceptions
-from google.api_core import protobuf_helpers
-from google.api_core.future import polling
-from google.longrunning import operations_pb2
-from google.protobuf import json_format
-from google.rpc import code_pb2
-
-
-class Operation(polling.PollingFuture):
- """A Future for interacting with a Google API Long-Running Operation.
-
- Args:
- operation (google.longrunning.operations_pb2.Operation): The
- initial operation.
- refresh (Callable[[], ~.api_core.operation.Operation]): A callable that
- returns the latest state of the operation.
- cancel (Callable[[], None]): A callable that tries to cancel
- the operation.
- result_type (func:`type`): The protobuf type for the operation's
- result.
- metadata_type (func:`type`): The protobuf type for the operation's
- metadata.
- polling (google.api_core.retry.Retry): The configuration used for polling.
- This parameter controls how often :meth:`done` is polled. If the
- ``timeout`` argument is specified in the :meth:`result` method, it will
- override the ``polling.timeout`` property.
- retry (google.api_core.retry.Retry): DEPRECATED: use ``polling`` instead.
- If specified it will override ``polling`` parameter to maintain
- backward compatibility.
- """
-
- def __init__(
- self,
- operation,
- refresh,
- cancel,
- result_type,
- metadata_type=None,
- polling=polling.DEFAULT_POLLING,
- **kwargs,
- ):
- super(Operation, self).__init__(polling=polling, **kwargs)
- self._operation = operation
- self._refresh = refresh
- self._cancel = cancel
- self._result_type = result_type
- self._metadata_type = metadata_type
- self._completion_lock = threading.Lock()
- # Invoke this in case the operation came back already complete.
- self._set_result_from_operation()
-
- @property
- def operation(self):
- """google.longrunning.Operation: The current long-running operation."""
- return self._operation
-
- @property
- def metadata(self):
- """google.protobuf.Message: the current operation metadata."""
- if not self._operation.HasField("metadata"):
- return None
-
- return protobuf_helpers.from_any_pb(
- self._metadata_type, self._operation.metadata
- )
-
- @classmethod
- def deserialize(self, payload):
- """Deserialize a ``google.longrunning.Operation`` protocol buffer.
-
- Args:
- payload (bytes): A serialized operation protocol buffer.
-
- Returns:
- ~.operations_pb2.Operation: An Operation protobuf object.
- """
- return operations_pb2.Operation.FromString(payload)
-
- def _set_result_from_operation(self):
- """Set the result or exception from the operation if it is complete."""
- # This must be done in a lock to prevent the polling thread
- # and main thread from both executing the completion logic
- # at the same time.
- with self._completion_lock:
- # If the operation isn't complete or if the result has already been
- # set, do not call set_result/set_exception again.
- # Note: self._result_set is set to True in set_result and
- # set_exception, in case those methods are invoked directly.
- if not self._operation.done or self._result_set:
- return
-
- if self._operation.HasField("response"):
- response = protobuf_helpers.from_any_pb(
- self._result_type, self._operation.response
- )
- self.set_result(response)
- elif self._operation.HasField("error"):
- exception = exceptions.from_grpc_status(
- status_code=self._operation.error.code,
- message=self._operation.error.message,
- errors=(self._operation.error,),
- response=self._operation,
- )
- self.set_exception(exception)
- else:
- exception = exceptions.GoogleAPICallError(
- "Unexpected state: Long-running operation had neither "
- "response nor error set."
- )
- self.set_exception(exception)
-
- def _refresh_and_update(self, retry=None):
- """Refresh the operation and update the result if needed.
-
- Args:
- retry (google.api_core.retry.Retry): (Optional) How to retry the RPC.
- """
- # If the currently cached operation is done, no need to make another
- # RPC as it will not change once done.
- if not self._operation.done:
- self._operation = self._refresh(retry=retry) if retry else self._refresh()
- self._set_result_from_operation()
-
- def done(self, retry=None):
- """Checks to see if the operation is complete.
-
- Args:
- retry (google.api_core.retry.Retry): (Optional) How to retry the RPC.
-
- Returns:
- bool: True if the operation is complete, False otherwise.
- """
- self._refresh_and_update(retry)
- return self._operation.done
-
- def cancel(self):
- """Attempt to cancel the operation.
-
- Returns:
- bool: True if the cancel RPC was made, False if the operation is
- already complete.
- """
- if self.done():
- return False
-
- self._cancel()
- return True
-
- def cancelled(self):
- """True if the operation was cancelled."""
- self._refresh_and_update()
- return (
- self._operation.HasField("error")
- and self._operation.error.code == code_pb2.CANCELLED
- )
-
-
-def _refresh_http(api_request, operation_name, retry=None):
- """Refresh an operation using a JSON/HTTP client.
-
- Args:
- api_request (Callable): A callable used to make an API request. This
- should generally be
- :meth:`google.cloud._http.Connection.api_request`.
- operation_name (str): The name of the operation.
- retry (google.api_core.retry.Retry): (Optional) retry policy
-
- Returns:
- google.longrunning.operations_pb2.Operation: The operation.
- """
- path = "operations/{}".format(operation_name)
-
- if retry is not None:
- api_request = retry(api_request)
-
- api_response = api_request(method="GET", path=path)
- return json_format.ParseDict(api_response, operations_pb2.Operation())
-
-
-def _cancel_http(api_request, operation_name):
- """Cancel an operation using a JSON/HTTP client.
-
- Args:
- api_request (Callable): A callable used to make an API request. This
- should generally be
- :meth:`google.cloud._http.Connection.api_request`.
- operation_name (str): The name of the operation.
- """
- path = "operations/{}:cancel".format(operation_name)
- api_request(method="POST", path=path)
-
-
-def from_http_json(operation, api_request, result_type, **kwargs):
- """Create an operation future using a HTTP/JSON client.
-
- This interacts with the long-running operations `service`_ (specific
- to a given API) via `HTTP/JSON`_.
-
- .. _HTTP/JSON: https://cloud.google.com/speech/reference/rest/\
- v1beta1/operations#Operation
-
- Args:
- operation (dict): Operation as a dictionary.
- api_request (Callable): A callable used to make an API request. This
- should generally be
- :meth:`google.cloud._http.Connection.api_request`.
- result_type (:func:`type`): The protobuf result type.
- kwargs: Keyword args passed into the :class:`Operation` constructor.
-
- Returns:
- ~.api_core.operation.Operation: The operation future to track the given
- operation.
- """
- operation_proto = json_format.ParseDict(operation, operations_pb2.Operation())
- refresh = functools.partial(_refresh_http, api_request, operation_proto.name)
- cancel = functools.partial(_cancel_http, api_request, operation_proto.name)
- return Operation(operation_proto, refresh, cancel, result_type, **kwargs)
-
-
-def _refresh_grpc(operations_stub, operation_name, retry=None):
- """Refresh an operation using a gRPC client.
-
- Args:
- operations_stub (google.longrunning.operations_pb2.OperationsStub):
- The gRPC operations stub.
- operation_name (str): The name of the operation.
- retry (google.api_core.retry.Retry): (Optional) retry policy
-
- Returns:
- google.longrunning.operations_pb2.Operation: The operation.
- """
- request_pb = operations_pb2.GetOperationRequest(name=operation_name)
-
- rpc = operations_stub.GetOperation
- if retry is not None:
- rpc = retry(rpc)
-
- return rpc(request_pb)
-
-
-def _cancel_grpc(operations_stub, operation_name):
- """Cancel an operation using a gRPC client.
-
- Args:
- operations_stub (google.longrunning.operations_pb2.OperationsStub):
- The gRPC operations stub.
- operation_name (str): The name of the operation.
- """
- request_pb = operations_pb2.CancelOperationRequest(name=operation_name)
- operations_stub.CancelOperation(request_pb)
-
-
-def from_grpc(operation, operations_stub, result_type, grpc_metadata=None, **kwargs):
- """Create an operation future using a gRPC client.
-
- This interacts with the long-running operations `service`_ (specific
- to a given API) via gRPC.
-
- .. _service: https://github.com/googleapis/googleapis/blob/\
- 050400df0fdb16f63b63e9dee53819044bffc857/\
- google/longrunning/operations.proto#L38
-
- Args:
- operation (google.longrunning.operations_pb2.Operation): The operation.
- operations_stub (google.longrunning.operations_pb2.OperationsStub):
- The operations stub.
- result_type (:func:`type`): The protobuf result type.
- grpc_metadata (Optional[List[Tuple[str, str]]]): Additional metadata to pass
- to the rpc.
- kwargs: Keyword args passed into the :class:`Operation` constructor.
-
- Returns:
- ~.api_core.operation.Operation: The operation future to track the given
- operation.
- """
- refresh = functools.partial(
- _refresh_grpc,
- operations_stub,
- operation.name,
- metadata=grpc_metadata,
- )
- cancel = functools.partial(
- _cancel_grpc,
- operations_stub,
- operation.name,
- metadata=grpc_metadata,
- )
- return Operation(operation, refresh, cancel, result_type, **kwargs)
-
-
-def from_gapic(operation, operations_client, result_type, grpc_metadata=None, **kwargs):
- """Create an operation future from a gapic client.
-
- This interacts with the long-running operations `service`_ (specific
- to a given API) via a gapic client.
-
- .. _service: https://github.com/googleapis/googleapis/blob/\
- 050400df0fdb16f63b63e9dee53819044bffc857/\
- google/longrunning/operations.proto#L38
-
- Args:
- operation (google.longrunning.operations_pb2.Operation): The operation.
- operations_client (google.api_core.operations_v1.OperationsClient):
- The operations client.
- result_type (:func:`type`): The protobuf result type.
- grpc_metadata (Optional[List[Tuple[str, str]]]): Additional metadata to pass
- to the rpc.
- kwargs: Keyword args passed into the :class:`Operation` constructor.
-
- Returns:
- ~.api_core.operation.Operation: The operation future to track the given
- operation.
- """
- refresh = functools.partial(
- operations_client.get_operation,
- operation.name,
- metadata=grpc_metadata,
- )
- cancel = functools.partial(
- operations_client.cancel_operation,
- operation.name,
- metadata=grpc_metadata,
- )
- return Operation(operation, refresh, cancel, result_type, **kwargs)
diff --git a/google/api_core/operation_async.py b/google/api_core/operation_async.py
deleted file mode 100644
index 2fd341d..0000000
--- a/google/api_core/operation_async.py
+++ /dev/null
@@ -1,225 +0,0 @@
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""AsyncIO futures for long-running operations returned from Google Cloud APIs.
-
-These futures can be used to await for the result of a long-running operation
-using :meth:`AsyncOperation.result`:
-
-
-.. code-block:: python
-
- operation = my_api_client.long_running_method()
- result = await operation.result()
-
-Or asynchronously using callbacks and :meth:`Operation.add_done_callback`:
-
-.. code-block:: python
-
- operation = my_api_client.long_running_method()
-
- def my_callback(future):
- result = await future.result()
-
- operation.add_done_callback(my_callback)
-
-"""
-
-import functools
-import threading
-
-from google.api_core import exceptions
-from google.api_core import protobuf_helpers
-from google.api_core.future import async_future
-from google.longrunning import operations_pb2
-from google.rpc import code_pb2
-
-
-class AsyncOperation(async_future.AsyncFuture):
- """A Future for interacting with a Google API Long-Running Operation.
-
- Args:
- operation (google.longrunning.operations_pb2.Operation): The
- initial operation.
- refresh (Callable[[], ~.api_core.operation.Operation]): A callable that
- returns the latest state of the operation.
- cancel (Callable[[], None]): A callable that tries to cancel
- the operation.
- result_type (func:`type`): The protobuf type for the operation's
- result.
- metadata_type (func:`type`): The protobuf type for the operation's
- metadata.
- retry (google.api_core.retry.Retry): The retry configuration used
- when polling. This can be used to control how often :meth:`done`
- is polled. Regardless of the retry's ``deadline``, it will be
- overridden by the ``timeout`` argument to :meth:`result`.
- """
-
- def __init__(
- self,
- operation,
- refresh,
- cancel,
- result_type,
- metadata_type=None,
- retry=async_future.DEFAULT_RETRY,
- ):
- super().__init__(retry=retry)
- self._operation = operation
- self._refresh = refresh
- self._cancel = cancel
- self._result_type = result_type
- self._metadata_type = metadata_type
- self._completion_lock = threading.Lock()
- # Invoke this in case the operation came back already complete.
- self._set_result_from_operation()
-
- @property
- def operation(self):
- """google.longrunning.Operation: The current long-running operation."""
- return self._operation
-
- @property
- def metadata(self):
- """google.protobuf.Message: the current operation metadata."""
- if not self._operation.HasField("metadata"):
- return None
-
- return protobuf_helpers.from_any_pb(
- self._metadata_type, self._operation.metadata
- )
-
- @classmethod
- def deserialize(cls, payload):
- """Deserialize a ``google.longrunning.Operation`` protocol buffer.
-
- Args:
- payload (bytes): A serialized operation protocol buffer.
-
- Returns:
- ~.operations_pb2.Operation: An Operation protobuf object.
- """
- return operations_pb2.Operation.FromString(payload)
-
- def _set_result_from_operation(self):
- """Set the result or exception from the operation if it is complete."""
- # This must be done in a lock to prevent the async_future thread
- # and main thread from both executing the completion logic
- # at the same time.
- with self._completion_lock:
- # If the operation isn't complete or if the result has already been
- # set, do not call set_result/set_exception again.
- if not self._operation.done or self._future.done():
- return
-
- if self._operation.HasField("response"):
- response = protobuf_helpers.from_any_pb(
- self._result_type, self._operation.response
- )
- self.set_result(response)
- elif self._operation.HasField("error"):
- exception = exceptions.GoogleAPICallError(
- self._operation.error.message,
- errors=(self._operation.error,),
- response=self._operation,
- )
- self.set_exception(exception)
- else:
- exception = exceptions.GoogleAPICallError(
- "Unexpected state: Long-running operation had neither "
- "response nor error set."
- )
- self.set_exception(exception)
-
- async def _refresh_and_update(self, retry=async_future.DEFAULT_RETRY):
- """Refresh the operation and update the result if needed.
-
- Args:
- retry (google.api_core.retry.Retry): (Optional) How to retry the RPC.
- """
- # If the currently cached operation is done, no need to make another
- # RPC as it will not change once done.
- if not self._operation.done:
- self._operation = await self._refresh(retry=retry)
- self._set_result_from_operation()
-
- async def done(self, retry=async_future.DEFAULT_RETRY):
- """Checks to see if the operation is complete.
-
- Args:
- retry (google.api_core.retry.Retry): (Optional) How to retry the RPC.
-
- Returns:
- bool: True if the operation is complete, False otherwise.
- """
- await self._refresh_and_update(retry)
- return self._operation.done
-
- async def cancel(self):
- """Attempt to cancel the operation.
-
- Returns:
- bool: True if the cancel RPC was made, False if the operation is
- already complete.
- """
- result = await self.done()
- if result:
- return False
- else:
- await self._cancel()
- return True
-
- async def cancelled(self):
- """True if the operation was cancelled."""
- await self._refresh_and_update()
- return (
- self._operation.HasField("error")
- and self._operation.error.code == code_pb2.CANCELLED
- )
-
-
-def from_gapic(operation, operations_client, result_type, grpc_metadata=None, **kwargs):
- """Create an operation future from a gapic client.
-
- This interacts with the long-running operations `service`_ (specific
- to a given API) via a gapic client.
-
- .. _service: https://github.com/googleapis/googleapis/blob/\
- 050400df0fdb16f63b63e9dee53819044bffc857/\
- google/longrunning/operations.proto#L38
-
- Args:
- operation (google.longrunning.operations_pb2.Operation): The operation.
- operations_client (google.api_core.operations_v1.OperationsClient):
- The operations client.
- result_type (:func:`type`): The protobuf result type.
- grpc_metadata (Optional[List[Tuple[str, str]]]): Additional metadata to pass
- to the rpc.
- kwargs: Keyword args passed into the :class:`Operation` constructor.
-
- Returns:
- ~.api_core.operation.Operation: The operation future to track the given
- operation.
- """
- refresh = functools.partial(
- operations_client.get_operation,
- operation.name,
- metadata=grpc_metadata,
- )
- cancel = functools.partial(
- operations_client.cancel_operation,
- operation.name,
- metadata=grpc_metadata,
- )
- return AsyncOperation(operation, refresh, cancel, result_type, **kwargs)
diff --git a/google/api_core/operations_v1/__init__.py b/google/api_core/operations_v1/__init__.py
deleted file mode 100644
index 4db32a4..0000000
--- a/google/api_core/operations_v1/__init__.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Package for interacting with the google.longrunning.operations meta-API."""
-
-from google.api_core.operations_v1.abstract_operations_client import AbstractOperationsClient
-from google.api_core.operations_v1.operations_async_client import OperationsAsyncClient
-from google.api_core.operations_v1.operations_client import OperationsClient
-from google.api_core.operations_v1.transports.rest import OperationsRestTransport
-
-__all__ = [
- "AbstractOperationsClient",
- "OperationsAsyncClient",
- "OperationsClient",
- "OperationsRestTransport"
-]
-
-try:
- from google.api_core.operations_v1.transports.rest_asyncio import (
- AsyncOperationsRestTransport,
- )
- from google.api_core.operations_v1.operations_rest_client_async import AsyncOperationsRestClient
-
- __all__ += ["AsyncOperationsRestClient", "AsyncOperationsRestTransport"]
-except ImportError:
- # This import requires the `async_rest` extra.
- # Don't raise an exception if `AsyncOperationsRestTransport` cannot be imported
- # as other transports are still available.
- pass
diff --git a/google/api_core/operations_v1/abstract_operations_base_client.py b/google/api_core/operations_v1/abstract_operations_base_client.py
deleted file mode 100644
index f62f60b..0000000
--- a/google/api_core/operations_v1/abstract_operations_base_client.py
+++ /dev/null
@@ -1,376 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from collections import OrderedDict
-import os
-import re
-from typing import Dict, Optional, Type, Union
-
-from google.api_core import client_options as client_options_lib # type: ignore
-from google.api_core import gapic_v1 # type: ignore
-from google.api_core.operations_v1.transports.base import (
- DEFAULT_CLIENT_INFO,
- OperationsTransport,
-)
-from google.api_core.operations_v1.transports.rest import OperationsRestTransport
-
-try:
- from google.api_core.operations_v1.transports.rest_asyncio import (
- AsyncOperationsRestTransport,
- )
-
- HAS_ASYNC_REST_DEPENDENCIES = True
-except ImportError as e:
- HAS_ASYNC_REST_DEPENDENCIES = False
- ASYNC_REST_EXCEPTION = e
-
-from google.auth import credentials as ga_credentials # type: ignore
-from google.auth.exceptions import MutualTLSChannelError # type: ignore
-from google.auth.transport import mtls # type: ignore
-
-
-class AbstractOperationsBaseClientMeta(type):
- """Metaclass for the Operations Base client.
-
- This provides base class-level methods for building and retrieving
- support objects (e.g. transport) without polluting the client instance
- objects.
- """
-
- _transport_registry = OrderedDict() # type: Dict[str, Type[OperationsTransport]]
- _transport_registry["rest"] = OperationsRestTransport
- if HAS_ASYNC_REST_DEPENDENCIES:
- _transport_registry["rest_asyncio"] = AsyncOperationsRestTransport
-
- def get_transport_class(
- cls,
- label: Optional[str] = None,
- ) -> Type[OperationsTransport]:
- """Returns an appropriate transport class.
-
- Args:
- label: The name of the desired transport. If none is
- provided, then the first transport in the registry is used.
-
- Returns:
- The transport class to use.
- """
- # If a specific transport is requested, return that one.
- if (
- label == "rest_asyncio" and not HAS_ASYNC_REST_DEPENDENCIES
- ): # pragma: NO COVER
- raise ASYNC_REST_EXCEPTION
-
- if label:
- return cls._transport_registry[label]
-
- # No transport is requested; return the default (that is, the first one
- # in the dictionary).
- return next(iter(cls._transport_registry.values()))
-
-
-class AbstractOperationsBaseClient(metaclass=AbstractOperationsBaseClientMeta):
- """Manages long-running operations with an API service.
-
- When an API method normally takes long time to complete, it can be
- designed to return [Operation][google.api_core.operations_v1.Operation] to the
- client, and the client can use this interface to receive the real
- response asynchronously by polling the operation resource, or pass
- the operation resource to another API (such as Google Cloud Pub/Sub
- API) to receive the response. Any API service that returns
- long-running operations should implement the ``Operations``
- interface so developers can have a consistent client experience.
- """
-
- @staticmethod
- def _get_default_mtls_endpoint(api_endpoint):
- """Converts api endpoint to mTLS endpoint.
-
- Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
- "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
- Args:
- api_endpoint (Optional[str]): the api endpoint to convert.
- Returns:
- str: converted mTLS api endpoint.
- """
- if not api_endpoint:
- return api_endpoint
-
- mtls_endpoint_re = re.compile(
- r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
- )
-
- m = mtls_endpoint_re.match(api_endpoint)
- name, mtls, sandbox, googledomain = m.groups()
- if mtls or not googledomain:
- return api_endpoint
-
- if sandbox:
- return api_endpoint.replace(
- "sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
- )
-
- return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
-
- DEFAULT_ENDPOINT = "longrunning.googleapis.com"
- DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
- DEFAULT_ENDPOINT
- )
-
- @classmethod
- def from_service_account_info(cls, info: dict, *args, **kwargs):
- """
- This class method should be overridden by the subclasses.
-
- Args:
- info (dict): The service account private key info.
- args: Additional arguments to pass to the constructor.
- kwargs: Additional arguments to pass to the constructor.
-
- Raises:
- NotImplementedError: If the method is called on the base class.
- """
- raise NotImplementedError("`from_service_account_info` is not implemented.")
-
- @classmethod
- def from_service_account_file(cls, filename: str, *args, **kwargs):
- """
- This class method should be overridden by the subclasses.
-
- Args:
- filename (str): The path to the service account private key json
- file.
- args: Additional arguments to pass to the constructor.
- kwargs: Additional arguments to pass to the constructor.
-
- Raises:
- NotImplementedError: If the method is called on the base class.
- """
- raise NotImplementedError("`from_service_account_file` is not implemented.")
-
- from_service_account_json = from_service_account_file
-
- @property
- def transport(self) -> OperationsTransport:
- """Returns the transport used by the client instance.
-
- Returns:
- OperationsTransport: The transport used by the client
- instance.
- """
- return self._transport
-
- @staticmethod
- def common_billing_account_path(
- billing_account: str,
- ) -> str:
- """Returns a fully-qualified billing_account string."""
- return "billingAccounts/{billing_account}".format(
- billing_account=billing_account,
- )
-
- @staticmethod
- def parse_common_billing_account_path(path: str) -> Dict[str, str]:
- """Parse a billing_account path into its component segments."""
- m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
- return m.groupdict() if m else {}
-
- @staticmethod
- def common_folder_path(
- folder: str,
- ) -> str:
- """Returns a fully-qualified folder string."""
- return "folders/{folder}".format(
- folder=folder,
- )
-
- @staticmethod
- def parse_common_folder_path(path: str) -> Dict[str, str]:
- """Parse a folder path into its component segments."""
- m = re.match(r"^folders/(?P<folder>.+?)$", path)
- return m.groupdict() if m else {}
-
- @staticmethod
- def common_organization_path(
- organization: str,
- ) -> str:
- """Returns a fully-qualified organization string."""
- return "organizations/{organization}".format(
- organization=organization,
- )
-
- @staticmethod
- def parse_common_organization_path(path: str) -> Dict[str, str]:
- """Parse a organization path into its component segments."""
- m = re.match(r"^organizations/(?P<organization>.+?)$", path)
- return m.groupdict() if m else {}
-
- @staticmethod
- def common_project_path(
- project: str,
- ) -> str:
- """Returns a fully-qualified project string."""
- return "projects/{project}".format(
- project=project,
- )
-
- @staticmethod
- def parse_common_project_path(path: str) -> Dict[str, str]:
- """Parse a project path into its component segments."""
- m = re.match(r"^projects/(?P<project>.+?)$", path)
- return m.groupdict() if m else {}
-
- @staticmethod
- def common_location_path(
- project: str,
- location: str,
- ) -> str:
- """Returns a fully-qualified location string."""
- return "projects/{project}/locations/{location}".format(
- project=project,
- location=location,
- )
-
- @staticmethod
- def parse_common_location_path(path: str) -> Dict[str, str]:
- """Parse a location path into its component segments."""
- m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path)
- return m.groupdict() if m else {}
-
- def __init__(
- self,
- *,
- credentials: Optional[ga_credentials.Credentials] = None,
- transport: Union[str, OperationsTransport, None] = None,
- client_options: Optional[client_options_lib.ClientOptions] = None,
- client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
- ) -> None:
- """Instantiates the operations client.
-
- Args:
- credentials (Optional[google.auth.credentials.Credentials]): The
- authorization credentials to attach to requests. These
- credentials identify the application to the service; if none
- are specified, the client will attempt to ascertain the
- credentials from the environment.
- transport (Union[str, OperationsTransport]): The
- transport to use. If set to None, a transport is chosen
- automatically.
- client_options (google.api_core.client_options.ClientOptions): Custom options for the
- client. It won't take effect if a ``transport`` instance is provided.
- (1) The ``api_endpoint`` property can be used to override the
- default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
- environment variable can also be used to override the endpoint:
- "always" (always use the default mTLS endpoint), "never" (always
- use the default regular endpoint) and "auto" (auto switch to the
- default mTLS endpoint if client certificate is present, this is
- the default value). However, the ``api_endpoint`` property takes
- precedence if provided.
- (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
- is "true", then the ``client_cert_source`` property can be used
- to provide client certificate for mutual TLS transport. If
- not provided, the default SSL client certificate will be used if
- present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
- set, no client certificate will be used.
- client_info (google.api_core.gapic_v1.client_info.ClientInfo):
- The client info used to send a user-agent string along with
- API requests. If ``None``, then default info will be used.
- Generally, you only need to set this if you're developing
- your own client library.
-
- Raises:
- google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
- creation failed for any reason.
- """
- if isinstance(client_options, dict):
- client_options = client_options_lib.from_dict(client_options)
- if client_options is None:
- client_options = client_options_lib.ClientOptions()
-
- # Create SSL credentials for mutual TLS if needed.
- if hasattr(mtls, "should_use_client_cert"):
- use_client_cert = mtls.should_use_client_cert()
- else:
- # if unsupported, fallback to reading from env var
- use_client_cert_str = os.getenv(
- "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"
- ).lower()
- if use_client_cert_str not in ("true", "false"):
- raise ValueError(
- "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be"
- " either `true` or `false`"
- )
- use_client_cert = use_client_cert_str == "true"
- client_cert_source_func = None
- is_mtls = False
- if use_client_cert:
- if client_options.client_cert_source:
- is_mtls = True
- client_cert_source_func = client_options.client_cert_source
- else:
- is_mtls = mtls.has_default_client_cert_source()
- if is_mtls:
- client_cert_source_func = mtls.default_client_cert_source()
- else:
- client_cert_source_func = None
-
- # Figure out which api endpoint to use.
- if client_options.api_endpoint is not None:
- api_endpoint = client_options.api_endpoint
- else:
- use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
- if use_mtls_env == "never":
- api_endpoint = self.DEFAULT_ENDPOINT
- elif use_mtls_env == "always":
- api_endpoint = self.DEFAULT_MTLS_ENDPOINT
- elif use_mtls_env == "auto":
- if is_mtls:
- api_endpoint = self.DEFAULT_MTLS_ENDPOINT
- else:
- api_endpoint = self.DEFAULT_ENDPOINT
- else:
- raise MutualTLSChannelError(
- "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted "
- "values: never, auto, always"
- )
-
- # Save or instantiate the transport.
- # Ordinarily, we provide the transport, but allowing a custom transport
- # instance provides an extensibility point for unusual situations.
- if isinstance(transport, OperationsTransport):
- # transport is a OperationsTransport instance.
- if credentials or client_options.credentials_file:
- raise ValueError(
- "When providing a transport instance, "
- "provide its credentials directly."
- )
- if client_options.scopes:
- raise ValueError(
- "When providing a transport instance, provide its scopes "
- "directly."
- )
- self._transport = transport
- else:
- Transport = type(self).get_transport_class(transport)
- self._transport = Transport(
- credentials=credentials,
- credentials_file=client_options.credentials_file,
- host=api_endpoint,
- scopes=client_options.scopes,
- client_cert_source_for_mtls=client_cert_source_func,
- quota_project_id=client_options.quota_project_id,
- client_info=client_info,
- always_use_jwt_access=True,
- )
diff --git a/google/api_core/operations_v1/abstract_operations_client.py b/google/api_core/operations_v1/abstract_operations_client.py
deleted file mode 100644
index fc44536..0000000
--- a/google/api_core/operations_v1/abstract_operations_client.py
+++ /dev/null
@@ -1,387 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-from typing import Optional, Sequence, Tuple, Union
-
-from google.api_core import client_options as client_options_lib # type: ignore
-from google.api_core import gapic_v1 # type: ignore
-from google.api_core import retry as retries # type: ignore
-from google.api_core.operations_v1 import pagers
-from google.api_core.operations_v1.transports.base import (
- DEFAULT_CLIENT_INFO,
- OperationsTransport,
-)
-from google.api_core.operations_v1.abstract_operations_base_client import (
- AbstractOperationsBaseClient,
-)
-from google.auth import credentials as ga_credentials # type: ignore
-from google.longrunning import operations_pb2
-from google.oauth2 import service_account # type: ignore
-import grpc
-
-OptionalRetry = Union[retries.Retry, object]
-
-
-class AbstractOperationsClient(AbstractOperationsBaseClient):
- """Manages long-running operations with an API service.
-
- When an API method normally takes long time to complete, it can be
- designed to return [Operation][google.api_core.operations_v1.Operation] to the
- client, and the client can use this interface to receive the real
- response asynchronously by polling the operation resource, or pass
- the operation resource to another API (such as Google Cloud Pub/Sub
- API) to receive the response. Any API service that returns
- long-running operations should implement the ``Operations``
- interface so developers can have a consistent client experience.
- """
-
- def __init__(
- self,
- *,
- credentials: Optional[ga_credentials.Credentials] = None,
- transport: Union[str, OperationsTransport, None] = None,
- client_options: Optional[client_options_lib.ClientOptions] = None,
- client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
- ) -> None:
- """Instantiates the operations client.
-
- Args:
- credentials (Optional[google.auth.credentials.Credentials]): The
- authorization credentials to attach to requests. These
- credentials identify the application to the service; if none
- are specified, the client will attempt to ascertain the
- credentials from the environment.
- transport (Union[str, OperationsTransport]): The
- transport to use. If set to None, a transport is chosen
- automatically.
- client_options (google.api_core.client_options.ClientOptions): Custom options for the
- client. It won't take effect if a ``transport`` instance is provided.
- (1) The ``api_endpoint`` property can be used to override the
- default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
- environment variable can also be used to override the endpoint:
- "always" (always use the default mTLS endpoint), "never" (always
- use the default regular endpoint) and "auto" (auto switch to the
- default mTLS endpoint if client certificate is present, this is
- the default value). However, the ``api_endpoint`` property takes
- precedence if provided.
- (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
- is "true", then the ``client_cert_source`` property can be used
- to provide client certificate for mutual TLS transport. If
- not provided, the default SSL client certificate will be used if
- present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
- set, no client certificate will be used.
- client_info (google.api_core.gapic_v1.client_info.ClientInfo):
- The client info used to send a user-agent string along with
- API requests. If ``None``, then default info will be used.
- Generally, you only need to set this if you're developing
- your own client library.
-
- Raises:
- google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
- creation failed for any reason.
- """
- super().__init__(
- credentials=credentials,
- transport=transport,
- client_options=client_options,
- client_info=client_info,
- )
-
- @classmethod
- def from_service_account_info(cls, info: dict, *args, **kwargs):
- """Creates an instance of this client using the provided credentials
- info.
-
- Args:
- info (dict): The service account private key info.
- args: Additional arguments to pass to the constructor.
- kwargs: Additional arguments to pass to the constructor.
-
- Returns:
- AbstractOperationsClient: The constructed client.
- """
- credentials = service_account.Credentials.from_service_account_info(info)
- kwargs["credentials"] = credentials
- return cls(*args, **kwargs)
-
- @classmethod
- def from_service_account_file(cls, filename: str, *args, **kwargs):
- """Creates an instance of this client using the provided credentials
- file.
-
- Args:
- filename (str): The path to the service account private key json
- file.
- args: Additional arguments to pass to the constructor.
- kwargs: Additional arguments to pass to the constructor.
-
- Returns:
- AbstractOperationsClient: The constructed client.
- """
- credentials = service_account.Credentials.from_service_account_file(filename)
- kwargs["credentials"] = credentials
- return cls(*args, **kwargs)
-
- from_service_account_json = from_service_account_file
-
- def list_operations(
- self,
- name: str,
- filter_: Optional[str] = None,
- *,
- page_size: Optional[int] = None,
- page_token: Optional[str] = None,
- retry: OptionalRetry = gapic_v1.method.DEFAULT,
- timeout: Optional[float] = None,
- compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
- ) -> pagers.ListOperationsPager:
- r"""Lists operations that match the specified filter in the request.
- If the server doesn't support this method, it returns
- ``UNIMPLEMENTED``.
-
- NOTE: the ``name`` binding allows API services to override the
- binding to use different resource name schemes, such as
- ``users/*/operations``. To override the binding, API services
- can add a binding such as ``"/v1/{name=users/*}/operations"`` to
- their service configuration. For backwards compatibility, the
- default name includes the operations collection id, however
- overriding users must ensure the name binding is the parent
- resource, without the operations collection id.
-
- Args:
- name (str):
- The name of the operation's parent
- resource.
- filter_ (str):
- The standard list filter.
- This corresponds to the ``filter`` field
- on the ``request`` instance; if ``request`` is provided, this
- should not be set.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
- should be retried.
- timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
-
- Returns:
- google.api_core.operations_v1.pagers.ListOperationsPager:
- The response message for
- [Operations.ListOperations][google.api_core.operations_v1.Operations.ListOperations].
-
- Iterating over this object will yield results and
- resolve additional pages automatically.
-
- """
- # Create a protobuf request object.
- request = operations_pb2.ListOperationsRequest(name=name, filter=filter_)
- if page_size is not None:
- request.page_size = page_size
- if page_token is not None:
- request.page_token = page_token
-
- # Wrap the RPC method; this adds retry and timeout information,
- # and friendly error handling.
- rpc = self._transport._wrapped_methods[self._transport.list_operations]
-
- # Certain fields should be provided within the metadata header;
- # add these here.
- metadata = tuple(metadata or ()) + (
- gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
- )
-
- # Send the request.
- response = rpc(
- request,
- retry=retry,
- timeout=timeout,
- compression=compression,
- metadata=metadata,
- )
-
- # This method is paged; wrap the response in a pager, which provides
- # an `__iter__` convenience method.
- response = pagers.ListOperationsPager(
- method=rpc,
- request=request,
- response=response,
- metadata=metadata,
- )
-
- # Done; return the response.
- return response
-
- def get_operation(
- self,
- name: str,
- *,
- retry: OptionalRetry = gapic_v1.method.DEFAULT,
- timeout: Optional[float] = None,
- compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
- ) -> operations_pb2.Operation:
- r"""Gets the latest state of a long-running operation.
- Clients can use this method to poll the operation result
- at intervals as recommended by the API service.
-
- Args:
- name (str):
- The name of the operation resource.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
- should be retried.
- timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
-
- Returns:
- google.longrunning.operations_pb2.Operation:
- This resource represents a long-
- running operation that is the result of a
- network API call.
-
- """
-
- request = operations_pb2.GetOperationRequest(name=name)
-
- # Wrap the RPC method; this adds retry and timeout information,
- # and friendly error handling.
- rpc = self._transport._wrapped_methods[self._transport.get_operation]
-
- # Certain fields should be provided within the metadata header;
- # add these here.
- metadata = tuple(metadata or ()) + (
- gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
- )
-
- # Send the request.
- response = rpc(
- request,
- retry=retry,
- timeout=timeout,
- compression=compression,
- metadata=metadata,
- )
-
- # Done; return the response.
- return response
-
- def delete_operation(
- self,
- name: str,
- *,
- retry: OptionalRetry = gapic_v1.method.DEFAULT,
- timeout: Optional[float] = None,
- compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
- ) -> None:
- r"""Deletes a long-running operation. This method indicates that the
- client is no longer interested in the operation result. It does
- not cancel the operation. If the server doesn't support this
- method, it returns ``google.rpc.Code.UNIMPLEMENTED``.
-
- Args:
- name (str):
- The name of the operation resource to
- be deleted.
-
- This corresponds to the ``name`` field
- on the ``request`` instance; if ``request`` is provided, this
- should not be set.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
- should be retried.
- timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
- """
- # Create the request object.
- request = operations_pb2.DeleteOperationRequest(name=name)
-
- # Wrap the RPC method; this adds retry and timeout information,
- # and friendly error handling.
- rpc = self._transport._wrapped_methods[self._transport.delete_operation]
-
- # Certain fields should be provided within the metadata header;
- # add these here.
- metadata = tuple(metadata or ()) + (
- gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
- )
-
- # Send the request.
- rpc(
- request,
- retry=retry,
- timeout=timeout,
- compression=compression,
- metadata=metadata,
- )
-
- def cancel_operation(
- self,
- name: Optional[str] = None,
- *,
- retry: OptionalRetry = gapic_v1.method.DEFAULT,
- timeout: Optional[float] = None,
- compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
- ) -> None:
- r"""Starts asynchronous cancellation on a long-running operation.
- The server makes a best effort to cancel the operation, but
- success is not guaranteed. If the server doesn't support this
- method, it returns ``google.rpc.Code.UNIMPLEMENTED``. Clients
- can use
- [Operations.GetOperation][google.api_core.operations_v1.Operations.GetOperation]
- or other methods to check whether the cancellation succeeded or
- whether the operation completed despite cancellation. On
- successful cancellation, the operation is not deleted; instead,
- it becomes an operation with an
- [Operation.error][google.api_core.operations_v1.Operation.error] value with
- a [google.rpc.Status.code][google.rpc.Status.code] of 1,
- corresponding to ``Code.CANCELLED``.
-
- Args:
- name (str):
- The name of the operation resource to
- be cancelled.
-
- This corresponds to the ``name`` field
- on the ``request`` instance; if ``request`` is provided, this
- should not be set.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
- should be retried.
- timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
- """
- # Create the request object.
- request = operations_pb2.CancelOperationRequest(name=name)
-
- # Wrap the RPC method; this adds retry and timeout information,
- # and friendly error handling.
- rpc = self._transport._wrapped_methods[self._transport.cancel_operation]
-
- # Certain fields should be provided within the metadata header;
- # add these here.
- metadata = tuple(metadata or ()) + (
- gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
- )
-
- # Send the request.
- rpc(
- request,
- retry=retry,
- timeout=timeout,
- compression=compression,
- metadata=metadata,
- )
diff --git a/google/api_core/operations_v1/operations_async_client.py b/google/api_core/operations_v1/operations_async_client.py
deleted file mode 100644
index a60c717..0000000
--- a/google/api_core/operations_v1/operations_async_client.py
+++ /dev/null
@@ -1,364 +0,0 @@
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""An async client for the google.longrunning.operations meta-API.
-
-.. _Google API Style Guide:
- https://cloud.google.com/apis/design/design_pattern
- s#long_running_operations
-.. _google/longrunning/operations.proto:
- https://github.com/googleapis/googleapis/blob/master/google/longrunning
- /operations.proto
-"""
-
-import functools
-
-from google.api_core import exceptions as core_exceptions
-from google.api_core import gapic_v1, page_iterator_async
-from google.api_core import retry_async as retries
-from google.api_core import timeout as timeouts
-from google.longrunning import operations_pb2
-from grpc import Compression
-
-
-class OperationsAsyncClient:
- """Async client for interacting with long-running operations.
-
- Args:
- channel (aio.Channel): The gRPC AsyncIO channel associated with the
- service that implements the ``google.longrunning.operations``
- interface.
- client_config (dict):
- A dictionary of call options for each method. If not specified
- the default configuration is used.
- """
-
- def __init__(self, channel, client_config=None):
- # Create the gRPC client stub with gRPC AsyncIO channel.
- self.operations_stub = operations_pb2.OperationsStub(channel)
-
- default_retry = retries.AsyncRetry(
- initial=0.1, # seconds
- maximum=60.0, # seconds
- multiplier=1.3,
- predicate=retries.if_exception_type(
- core_exceptions.DeadlineExceeded,
- core_exceptions.ServiceUnavailable,
- ),
- timeout=600.0, # seconds
- )
- default_timeout = timeouts.TimeToDeadlineTimeout(timeout=600.0)
-
- default_compression = Compression.NoCompression
-
- self._get_operation = gapic_v1.method_async.wrap_method(
- self.operations_stub.GetOperation,
- default_retry=default_retry,
- default_timeout=default_timeout,
- default_compression=default_compression,
- )
-
- self._list_operations = gapic_v1.method_async.wrap_method(
- self.operations_stub.ListOperations,
- default_retry=default_retry,
- default_timeout=default_timeout,
- default_compression=default_compression,
- )
-
- self._cancel_operation = gapic_v1.method_async.wrap_method(
- self.operations_stub.CancelOperation,
- default_retry=default_retry,
- default_timeout=default_timeout,
- default_compression=default_compression,
- )
-
- self._delete_operation = gapic_v1.method_async.wrap_method(
- self.operations_stub.DeleteOperation,
- default_retry=default_retry,
- default_timeout=default_timeout,
- default_compression=default_compression,
- )
-
- async def get_operation(
- self,
- name,
- retry=gapic_v1.method_async.DEFAULT,
- timeout=gapic_v1.method_async.DEFAULT,
- compression=gapic_v1.method_async.DEFAULT,
- metadata=None,
- ):
- """Gets the latest state of a long-running operation.
-
- Clients can use this method to poll the operation result at intervals
- as recommended by the API service.
-
- Example:
- >>> from google.api_core import operations_v1
- >>> api = operations_v1.OperationsClient()
- >>> name = ''
- >>> response = await api.get_operation(name)
-
- Args:
- name (str): The name of the operation resource.
- retry (google.api_core.retry.Retry): The retry strategy to use
- when invoking the RPC. If unspecified, the default retry from
- the client configuration will be used. If ``None``, then this
- method will not retry the RPC at all.
- timeout (float): The amount of time in seconds to wait for the RPC
- to complete. Note that if ``retry`` is used, this timeout
- applies to each individual attempt and the overall time it
- takes for this method to complete may be longer. If
- unspecified, the the default timeout in the client
- configuration is used. If ``None``, then the RPC method will
- not time out.
- compression (grpc.Compression): An element of grpc.compression
- e.g. grpc.compression.Gzip.
- metadata (Optional[List[Tuple[str, str]]]):
- Additional gRPC metadata.
-
- Returns:
- google.longrunning.operations_pb2.Operation: The state of the
- operation.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If an error occurred
- while invoking the RPC, the appropriate ``GoogleAPICallError``
- subclass will be raised.
- """
- request = operations_pb2.GetOperationRequest(name=name)
-
- # Add routing header
- metadata = metadata or []
- metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
-
- return await self._get_operation(
- request,
- retry=retry,
- timeout=timeout,
- compression=compression,
- metadata=metadata,
- )
-
- async def list_operations(
- self,
- name,
- filter_,
- retry=gapic_v1.method_async.DEFAULT,
- timeout=gapic_v1.method_async.DEFAULT,
- compression=gapic_v1.method_async.DEFAULT,
- metadata=None,
- ):
- """
- Lists operations that match the specified filter in the request.
-
- Example:
- >>> from google.api_core import operations_v1
- >>> api = operations_v1.OperationsClient()
- >>> name = ''
- >>>
- >>> # Iterate over all results
- >>> for operation in await api.list_operations(name):
- >>> # process operation
- >>> pass
- >>>
- >>> # Or iterate over results one page at a time
- >>> iter = await api.list_operations(name)
- >>> for page in iter.pages:
- >>> for operation in page:
- >>> # process operation
- >>> pass
-
- Args:
- name (str): The name of the operation collection.
- filter_ (str): The standard list filter.
- retry (google.api_core.retry.Retry): The retry strategy to use
- when invoking the RPC. If unspecified, the default retry from
- the client configuration will be used. If ``None``, then this
- method will not retry the RPC at all.
- timeout (float): The amount of time in seconds to wait for the RPC
- to complete. Note that if ``retry`` is used, this timeout
- applies to each individual attempt and the overall time it
- takes for this method to complete may be longer. If
- unspecified, the the default timeout in the client
- configuration is used. If ``None``, then the RPC method will
- not time out.
- compression (grpc.Compression): An element of grpc.compression
- e.g. grpc.compression.Gzip.
- metadata (Optional[List[Tuple[str, str]]]): Additional gRPC
- metadata.
-
- Returns:
- google.api_core.page_iterator.Iterator: An iterator that yields
- :class:`google.longrunning.operations_pb2.Operation` instances.
-
- Raises:
- google.api_core.exceptions.MethodNotImplemented: If the server
- does not support this method. Services are not required to
- implement this method.
- google.api_core.exceptions.GoogleAPICallError: If an error occurred
- while invoking the RPC, the appropriate ``GoogleAPICallError``
- subclass will be raised.
- """
- # Create the request object.
- request = operations_pb2.ListOperationsRequest(name=name, filter=filter_)
-
- # Add routing header
- metadata = metadata or []
- metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
-
- # Create the method used to fetch pages
- method = functools.partial(
- self._list_operations,
- retry=retry,
- timeout=timeout,
- compression=compression,
- metadata=metadata,
- )
-
- iterator = page_iterator_async.AsyncGRPCIterator(
- client=None,
- method=method,
- request=request,
- items_field="operations",
- request_token_field="page_token",
- response_token_field="next_page_token",
- )
-
- return iterator
-
- async def cancel_operation(
- self,
- name,
- retry=gapic_v1.method_async.DEFAULT,
- timeout=gapic_v1.method_async.DEFAULT,
- compression=gapic_v1.method_async.DEFAULT,
- metadata=None,
- ):
- """Starts asynchronous cancellation on a long-running operation.
-
- The server makes a best effort to cancel the operation, but success is
- not guaranteed. Clients can use :meth:`get_operation` or service-
- specific methods to check whether the cancellation succeeded or whether
- the operation completed despite cancellation. On successful
- cancellation, the operation is not deleted; instead, it becomes an
- operation with an ``Operation.error`` value with a
- ``google.rpc.Status.code`` of ``1``, corresponding to
- ``Code.CANCELLED``.
-
- Example:
- >>> from google.api_core import operations_v1
- >>> api = operations_v1.OperationsClient()
- >>> name = ''
- >>> api.cancel_operation(name)
-
- Args:
- name (str): The name of the operation resource to be cancelled.
- retry (google.api_core.retry.Retry): The retry strategy to use
- when invoking the RPC. If unspecified, the default retry from
- the client configuration will be used. If ``None``, then this
- method will not retry the RPC at all.
- timeout (float): The amount of time in seconds to wait for the RPC
- to complete. Note that if ``retry`` is used, this timeout
- applies to each individual attempt and the overall time it
- takes for this method to complete may be longer. If
- unspecified, the the default timeout in the client
- configuration is used. If ``None``, then the RPC method will
- not time out.
-
- Raises:
- google.api_core.exceptions.MethodNotImplemented: If the server
- does not support this method. Services are not required to
- implement this method.
- google.api_core.exceptions.GoogleAPICallError: If an error occurred
- while invoking the RPC, the appropriate ``GoogleAPICallError``
- subclass will be raised.
- compression (grpc.Compression): An element of grpc.compression
- e.g. grpc.compression.Gzip.
- metadata (Optional[List[Tuple[str, str]]]): Additional gRPC
- metadata.
- """
- # Create the request object.
- request = operations_pb2.CancelOperationRequest(name=name)
-
- # Add routing header
- metadata = metadata or []
- metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
-
- await self._cancel_operation(
- request,
- retry=retry,
- timeout=timeout,
- compression=compression,
- metadata=metadata,
- )
-
- async def delete_operation(
- self,
- name,
- retry=gapic_v1.method_async.DEFAULT,
- timeout=gapic_v1.method_async.DEFAULT,
- compression=gapic_v1.method_async.DEFAULT,
- metadata=None,
- ):
- """Deletes a long-running operation.
-
- This method indicates that the client is no longer interested in the
- operation result. It does not cancel the operation.
-
- Example:
- >>> from google.api_core import operations_v1
- >>> api = operations_v1.OperationsClient()
- >>> name = ''
- >>> api.delete_operation(name)
-
- Args:
- name (str): The name of the operation resource to be deleted.
- retry (google.api_core.retry.Retry): The retry strategy to use
- when invoking the RPC. If unspecified, the default retry from
- the client configuration will be used. If ``None``, then this
- method will not retry the RPC at all.
- timeout (float): The amount of time in seconds to wait for the RPC
- to complete. Note that if ``retry`` is used, this timeout
- applies to each individual attempt and the overall time it
- takes for this method to complete may be longer. If
- unspecified, the the default timeout in the client
- configuration is used. If ``None``, then the RPC method will
- not time out.
- compression (grpc.Compression): An element of grpc.compression
- e.g. grpc.compression.Gzip.
- metadata (Optional[List[Tuple[str, str]]]): Additional gRPC
- metadata.
-
- Raises:
- google.api_core.exceptions.MethodNotImplemented: If the server
- does not support this method. Services are not required to
- implement this method.
- google.api_core.exceptions.GoogleAPICallError: If an error occurred
- while invoking the RPC, the appropriate ``GoogleAPICallError``
- subclass will be raised.
- """
- # Create the request object.
- request = operations_pb2.DeleteOperationRequest(name=name)
-
- # Add routing header
- metadata = metadata or []
- metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
-
- await self._delete_operation(
- request,
- retry=retry,
- timeout=timeout,
- compression=compression,
- metadata=metadata,
- )
diff --git a/google/api_core/operations_v1/operations_client.py b/google/api_core/operations_v1/operations_client.py
deleted file mode 100644
index d1d3fd5..0000000
--- a/google/api_core/operations_v1/operations_client.py
+++ /dev/null
@@ -1,378 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""A client for the google.longrunning.operations meta-API.
-
-This is a client that deals with long-running operations that follow the
-pattern outlined by the `Google API Style Guide`_.
-
-When an API method normally takes long time to complete, it can be designed to
-return ``Operation`` to the client, and the client can use this interface to
-receive the real response asynchronously by polling the operation resource to
-receive the response.
-
-It is not a separate service, but rather an interface implemented by a larger
-service. The protocol-level definition is available at
-`google/longrunning/operations.proto`_. Typically, this will be constructed
-automatically by another client class to deal with operations.
-
-.. _Google API Style Guide:
- https://cloud.google.com/apis/design/design_pattern
- s#long_running_operations
-.. _google/longrunning/operations.proto:
- https://github.com/googleapis/googleapis/blob/master/google/longrunning
- /operations.proto
-"""
-
-import functools
-
-from google.api_core import exceptions as core_exceptions
-from google.api_core import gapic_v1
-from google.api_core import page_iterator
-from google.api_core import retry as retries
-from google.api_core import timeout as timeouts
-from google.longrunning import operations_pb2
-from grpc import Compression
-
-
-class OperationsClient(object):
- """Client for interacting with long-running operations within a service.
-
- Args:
- channel (grpc.Channel): The gRPC channel associated with the service
- that implements the ``google.longrunning.operations`` interface.
- client_config (dict):
- A dictionary of call options for each method. If not specified
- the default configuration is used.
- """
-
- def __init__(self, channel, client_config=None):
- # Create the gRPC client stub.
- self.operations_stub = operations_pb2.OperationsStub(channel)
-
- default_retry = retries.Retry(
- initial=0.1, # seconds
- maximum=60.0, # seconds
- multiplier=1.3,
- predicate=retries.if_exception_type(
- core_exceptions.DeadlineExceeded,
- core_exceptions.ServiceUnavailable,
- ),
- timeout=600.0, # seconds
- )
- default_timeout = timeouts.TimeToDeadlineTimeout(timeout=600.0)
-
- default_compression = Compression.NoCompression
-
- self._get_operation = gapic_v1.method.wrap_method(
- self.operations_stub.GetOperation,
- default_retry=default_retry,
- default_timeout=default_timeout,
- default_compression=default_compression,
- )
-
- self._list_operations = gapic_v1.method.wrap_method(
- self.operations_stub.ListOperations,
- default_retry=default_retry,
- default_timeout=default_timeout,
- default_compression=default_compression,
- )
-
- self._cancel_operation = gapic_v1.method.wrap_method(
- self.operations_stub.CancelOperation,
- default_retry=default_retry,
- default_timeout=default_timeout,
- default_compression=default_compression,
- )
-
- self._delete_operation = gapic_v1.method.wrap_method(
- self.operations_stub.DeleteOperation,
- default_retry=default_retry,
- default_timeout=default_timeout,
- default_compression=default_compression,
- )
-
- # Service calls
- def get_operation(
- self,
- name,
- retry=gapic_v1.method.DEFAULT,
- timeout=gapic_v1.method.DEFAULT,
- compression=gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """Gets the latest state of a long-running operation.
-
- Clients can use this method to poll the operation result at intervals
- as recommended by the API service.
-
- Example:
- >>> from google.api_core import operations_v1
- >>> api = operations_v1.OperationsClient()
- >>> name = ''
- >>> response = api.get_operation(name)
-
- Args:
- name (str): The name of the operation resource.
- retry (google.api_core.retry.Retry): The retry strategy to use
- when invoking the RPC. If unspecified, the default retry from
- the client configuration will be used. If ``None``, then this
- method will not retry the RPC at all.
- timeout (float): The amount of time in seconds to wait for the RPC
- to complete. Note that if ``retry`` is used, this timeout
- applies to each individual attempt and the overall time it
- takes for this method to complete may be longer. If
- unspecified, the the default timeout in the client
- configuration is used. If ``None``, then the RPC method will
- not time out.
- compression (grpc.Compression): An element of grpc.compression
- e.g. grpc.compression.Gzip.
- metadata (Optional[List[Tuple[str, str]]]):
- Additional gRPC metadata.
-
- Returns:
- google.longrunning.operations_pb2.Operation: The state of the
- operation.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If an error occurred
- while invoking the RPC, the appropriate ``GoogleAPICallError``
- subclass will be raised.
- """
- request = operations_pb2.GetOperationRequest(name=name)
-
- # Add routing header
- metadata = metadata or []
- metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
-
- return self._get_operation(
- request,
- retry=retry,
- timeout=timeout,
- compression=compression,
- metadata=metadata,
- )
-
- def list_operations(
- self,
- name,
- filter_,
- retry=gapic_v1.method.DEFAULT,
- timeout=gapic_v1.method.DEFAULT,
- compression=gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Lists operations that match the specified filter in the request.
-
- Example:
- >>> from google.api_core import operations_v1
- >>> api = operations_v1.OperationsClient()
- >>> name = ''
- >>>
- >>> # Iterate over all results
- >>> for operation in api.list_operations(name):
- >>> # process operation
- >>> pass
- >>>
- >>> # Or iterate over results one page at a time
- >>> iter = api.list_operations(name)
- >>> for page in iter.pages:
- >>> for operation in page:
- >>> # process operation
- >>> pass
-
- Args:
- name (str): The name of the operation collection.
- filter_ (str): The standard list filter.
- retry (google.api_core.retry.Retry): The retry strategy to use
- when invoking the RPC. If unspecified, the default retry from
- the client configuration will be used. If ``None``, then this
- method will not retry the RPC at all.
- timeout (float): The amount of time in seconds to wait for the RPC
- to complete. Note that if ``retry`` is used, this timeout
- applies to each individual attempt and the overall time it
- takes for this method to complete may be longer. If
- unspecified, the the default timeout in the client
- configuration is used. If ``None``, then the RPC method will
- not time out.
- compression (grpc.Compression): An element of grpc.compression
- e.g. grpc.compression.Gzip.
- metadata (Optional[List[Tuple[str, str]]]): Additional gRPC
- metadata.
-
- Returns:
- google.api_core.page_iterator.Iterator: An iterator that yields
- :class:`google.longrunning.operations_pb2.Operation` instances.
-
- Raises:
- google.api_core.exceptions.MethodNotImplemented: If the server
- does not support this method. Services are not required to
- implement this method.
- google.api_core.exceptions.GoogleAPICallError: If an error occurred
- while invoking the RPC, the appropriate ``GoogleAPICallError``
- subclass will be raised.
- """
- # Create the request object.
- request = operations_pb2.ListOperationsRequest(name=name, filter=filter_)
-
- # Add routing header
- metadata = metadata or []
- metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
-
- # Create the method used to fetch pages
- method = functools.partial(
- self._list_operations,
- retry=retry,
- timeout=timeout,
- compression=compression,
- metadata=metadata,
- )
-
- iterator = page_iterator.GRPCIterator(
- client=None,
- method=method,
- request=request,
- items_field="operations",
- request_token_field="page_token",
- response_token_field="next_page_token",
- )
-
- return iterator
-
- def cancel_operation(
- self,
- name,
- retry=gapic_v1.method.DEFAULT,
- timeout=gapic_v1.method.DEFAULT,
- compression=gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """Starts asynchronous cancellation on a long-running operation.
-
- The server makes a best effort to cancel the operation, but success is
- not guaranteed. Clients can use :meth:`get_operation` or service-
- specific methods to check whether the cancellation succeeded or whether
- the operation completed despite cancellation. On successful
- cancellation, the operation is not deleted; instead, it becomes an
- operation with an ``Operation.error`` value with a
- ``google.rpc.Status.code`` of ``1``, corresponding to
- ``Code.CANCELLED``.
-
- Example:
- >>> from google.api_core import operations_v1
- >>> api = operations_v1.OperationsClient()
- >>> name = ''
- >>> api.cancel_operation(name)
-
- Args:
- name (str): The name of the operation resource to be cancelled.
- retry (google.api_core.retry.Retry): The retry strategy to use
- when invoking the RPC. If unspecified, the default retry from
- the client configuration will be used. If ``None``, then this
- method will not retry the RPC at all.
- timeout (float): The amount of time in seconds to wait for the RPC
- to complete. Note that if ``retry`` is used, this timeout
- applies to each individual attempt and the overall time it
- takes for this method to complete may be longer. If
- unspecified, the the default timeout in the client
- configuration is used. If ``None``, then the RPC method will
- not time out.
- compression (grpc.Compression): An element of grpc.compression
- e.g. grpc.compression.Gzip.
- metadata (Optional[List[Tuple[str, str]]]): Additional gRPC
- metadata.
-
- Raises:
- google.api_core.exceptions.MethodNotImplemented: If the server
- does not support this method. Services are not required to
- implement this method.
- google.api_core.exceptions.GoogleAPICallError: If an error occurred
- while invoking the RPC, the appropriate ``GoogleAPICallError``
- subclass will be raised.
- """
- # Create the request object.
- request = operations_pb2.CancelOperationRequest(name=name)
-
- # Add routing header
- metadata = metadata or []
- metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
-
- self._cancel_operation(
- request,
- retry=retry,
- timeout=timeout,
- compression=compression,
- metadata=metadata,
- )
-
- def delete_operation(
- self,
- name,
- retry=gapic_v1.method.DEFAULT,
- timeout=gapic_v1.method.DEFAULT,
- compression=gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """Deletes a long-running operation.
-
- This method indicates that the client is no longer interested in the
- operation result. It does not cancel the operation.
-
- Example:
- >>> from google.api_core import operations_v1
- >>> api = operations_v1.OperationsClient()
- >>> name = ''
- >>> api.delete_operation(name)
-
- Args:
- name (str): The name of the operation resource to be deleted.
- retry (google.api_core.retry.Retry): The retry strategy to use
- when invoking the RPC. If unspecified, the default retry from
- the client configuration will be used. If ``None``, then this
- method will not retry the RPC at all.
- timeout (float): The amount of time in seconds to wait for the RPC
- to complete. Note that if ``retry`` is used, this timeout
- applies to each individual attempt and the overall time it
- takes for this method to complete may be longer. If
- unspecified, the the default timeout in the client
- configuration is used. If ``None``, then the RPC method will
- not time out.
- compression (grpc.Compression): An element of grpc.compression
- e.g. grpc.compression.Gzip.
- metadata (Optional[List[Tuple[str, str]]]): Additional gRPC
- metadata.
-
- Raises:
- google.api_core.exceptions.MethodNotImplemented: If the server
- does not support this method. Services are not required to
- implement this method.
- google.api_core.exceptions.GoogleAPICallError: If an error occurred
- while invoking the RPC, the appropriate ``GoogleAPICallError``
- subclass will be raised.
- """
- # Create the request object.
- request = operations_pb2.DeleteOperationRequest(name=name)
-
- # Add routing header
- metadata = metadata or []
- metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
-
- self._delete_operation(
- request,
- retry=retry,
- timeout=timeout,
- compression=compression,
- metadata=metadata,
- )
diff --git a/google/api_core/operations_v1/operations_client_config.py b/google/api_core/operations_v1/operations_client_config.py
deleted file mode 100644
index 3ad3548..0000000
--- a/google/api_core/operations_v1/operations_client_config.py
+++ /dev/null
@@ -1,60 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""gapic configuration for the google.longrunning.operations client."""
-
-# DEPRECATED: retry and timeout classes are instantiated directly
-config = {
- "interfaces": {
- "google.longrunning.Operations": {
- "retry_codes": {
- "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"],
- "non_idempotent": [],
- },
- "retry_params": {
- "default": {
- "initial_retry_delay_millis": 100,
- "retry_delay_multiplier": 1.3,
- "max_retry_delay_millis": 60000,
- "initial_rpc_timeout_millis": 20000,
- "rpc_timeout_multiplier": 1.0,
- "max_rpc_timeout_millis": 600000,
- "total_timeout_millis": 600000,
- }
- },
- "methods": {
- "GetOperation": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
- },
- "ListOperations": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
- },
- "CancelOperation": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
- },
- "DeleteOperation": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
- },
- },
- }
- }
-}
diff --git a/google/api_core/operations_v1/operations_rest_client_async.py b/google/api_core/operations_v1/operations_rest_client_async.py
deleted file mode 100644
index 7ab0cd3..0000000
--- a/google/api_core/operations_v1/operations_rest_client_async.py
+++ /dev/null
@@ -1,345 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-from typing import Optional, Sequence, Tuple, Union
-
-from google.api_core import client_options as client_options_lib # type: ignore
-from google.api_core import gapic_v1 # type: ignore
-from google.api_core.operations_v1 import pagers_async as pagers
-from google.api_core.operations_v1.transports.base import (
- DEFAULT_CLIENT_INFO,
- OperationsTransport,
-)
-from google.api_core.operations_v1.abstract_operations_base_client import (
- AbstractOperationsBaseClient,
-)
-from google.longrunning import operations_pb2
-
-try:
- from google.auth.aio import credentials as ga_credentials # type: ignore
-except ImportError as e: # pragma: NO COVER
- raise ImportError(
- "The `async_rest` extra of `google-api-core` is required to use long-running operations. Install it by running "
- "`pip install google-api-core[async_rest]`."
- ) from e
-
-
-class AsyncOperationsRestClient(AbstractOperationsBaseClient):
- """Manages long-running operations with a REST API service for the asynchronous client.
-
- When an API method normally takes long time to complete, it can be
- designed to return [Operation][google.api_core.operations_v1.Operation] to the
- client, and the client can use this interface to receive the real
- response asynchronously by polling the operation resource, or pass
- the operation resource to another API (such as Google Cloud Pub/Sub
- API) to receive the response. Any API service that returns
- long-running operations should implement the ``Operations``
- interface so developers can have a consistent client experience.
- """
-
- def __init__(
- self,
- *,
- credentials: Optional[ga_credentials.Credentials] = None,
- transport: Union[str, OperationsTransport, None] = None,
- client_options: Optional[client_options_lib.ClientOptions] = None,
- client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
- ) -> None:
- """Instantiates the operations client.
-
- Args:
- credentials (Optional[google.auth.aio.credentials.Credentials]): The
- authorization credentials to attach to requests. These
- credentials identify the application to the service; if none
- are specified, the client will attempt to ascertain the
- credentials from the environment.
- transport (Union[str, OperationsTransport]): The
- transport to use. If set to None, this defaults to 'rest_asyncio'.
- client_options (google.api_core.client_options.ClientOptions): Custom options for the
- client. It won't take effect if a ``transport`` instance is provided.
- (1) The ``api_endpoint`` property can be used to override the
- default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
- environment variable can also be used to override the endpoint:
- "always" (always use the default mTLS endpoint), "never" (always
- use the default regular endpoint) and "auto" (auto switch to the
- default mTLS endpoint if client certificate is present, this is
- the default value). However, the ``api_endpoint`` property takes
- precedence if provided.
- (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
- is "true", then the ``client_cert_source`` property can be used
- to provide client certificate for mutual TLS transport. If
- not provided, the default SSL client certificate will be used if
- present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
- set, no client certificate will be used.
- client_info (google.api_core.gapic_v1.client_info.ClientInfo):
- The client info used to send a user-agent string along with
- API requests. If ``None``, then default info will be used.
- Generally, you only need to set this if you're developing
- your own client library.
-
- Raises:
- google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
- creation failed for any reason.
- """
- super().__init__(
- credentials=credentials, # type: ignore
- # NOTE: If a transport is not provided, we force the client to use the async
- # REST transport.
- transport=transport or "rest_asyncio",
- client_options=client_options,
- client_info=client_info,
- )
-
- async def get_operation(
- self,
- name: str,
- *,
- # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry`
- # to allow configuring retryable error codes.
- retry=gapic_v1.method_async.DEFAULT,
- timeout: Optional[float] = None,
- metadata: Sequence[Tuple[str, str]] = (),
- ) -> operations_pb2.Operation:
- r"""Gets the latest state of a long-running operation.
- Clients can use this method to poll the operation result
- at intervals as recommended by the API service.
-
- Args:
- name (str):
- The name of the operation resource.
- timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
-
- Returns:
- google.longrunning.operations_pb2.Operation:
- This resource represents a long-
- running operation that is the result of a
- network API call.
-
- """
-
- request = operations_pb2.GetOperationRequest(name=name)
-
- # Wrap the RPC method; this adds retry and timeout information,
- # and friendly error handling.
- rpc = self._transport._wrapped_methods[self._transport.get_operation]
-
- # Certain fields should be provided within the metadata header;
- # add these here.
- metadata = tuple(metadata or ()) + (
- gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
- )
-
- # Send the request.
- response = await rpc(
- request,
- retry=retry,
- timeout=timeout,
- metadata=metadata,
- )
-
- # Done; return the response.
- return response
-
- async def list_operations(
- self,
- name: str,
- filter_: Optional[str] = None,
- *,
- page_size: Optional[int] = None,
- page_token: Optional[str] = None,
- # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry`
- # to allow configuring retryable error codes.
- retry=gapic_v1.method_async.DEFAULT,
- timeout: Optional[float] = None,
- metadata: Sequence[Tuple[str, str]] = (),
- ) -> pagers.ListOperationsAsyncPager:
- r"""Lists operations that match the specified filter in the request.
- If the server doesn't support this method, it returns
- ``UNIMPLEMENTED``.
-
- NOTE: the ``name`` binding allows API services to override the
- binding to use different resource name schemes, such as
- ``users/*/operations``. To override the binding, API services
- can add a binding such as ``"/v1/{name=users/*}/operations"`` to
- their service configuration. For backwards compatibility, the
- default name includes the operations collection id, however
- overriding users must ensure the name binding is the parent
- resource, without the operations collection id.
-
- Args:
- name (str):
- The name of the operation's parent
- resource.
- filter_ (str):
- The standard list filter.
- This corresponds to the ``filter`` field
- on the ``request`` instance; if ``request`` is provided, this
- should not be set.
- timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
-
- Returns:
- google.api_core.operations_v1.pagers.ListOperationsPager:
- The response message for
- [Operations.ListOperations][google.api_core.operations_v1.Operations.ListOperations].
-
- Iterating over this object will yield results and
- resolve additional pages automatically.
-
- """
- # Create a protobuf request object.
- request = operations_pb2.ListOperationsRequest(name=name, filter=filter_)
- if page_size is not None:
- request.page_size = page_size
- if page_token is not None:
- request.page_token = page_token
-
- # Wrap the RPC method; this adds retry and timeout information,
- # and friendly error handling.
- rpc = self._transport._wrapped_methods[self._transport.list_operations]
-
- # Certain fields should be provided within the metadata header;
- # add these here.
- metadata = tuple(metadata or ()) + (
- gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
- )
-
- # Send the request.
- response = await rpc(
- request,
- retry=retry,
- timeout=timeout,
- metadata=metadata,
- )
-
- # This method is paged; wrap the response in a pager, which provides
- # an `__iter__` convenience method.
- response = pagers.ListOperationsAsyncPager(
- method=rpc,
- request=request,
- response=response,
- metadata=metadata,
- )
-
- # Done; return the response.
- return response
-
- async def delete_operation(
- self,
- name: str,
- *,
- # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry`
- # to allow configuring retryable error codes.
- retry=gapic_v1.method_async.DEFAULT,
- timeout: Optional[float] = None,
- metadata: Sequence[Tuple[str, str]] = (),
- ) -> None:
- r"""Deletes a long-running operation. This method indicates that the
- client is no longer interested in the operation result. It does
- not cancel the operation. If the server doesn't support this
- method, it returns ``google.rpc.Code.UNIMPLEMENTED``.
-
- Args:
- name (str):
- The name of the operation resource to
- be deleted.
-
- This corresponds to the ``name`` field
- on the ``request`` instance; if ``request`` is provided, this
- should not be set.
- timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
- """
- # Create the request object.
- request = operations_pb2.DeleteOperationRequest(name=name)
-
- # Wrap the RPC method; this adds retry and timeout information,
- # and friendly error handling.
- rpc = self._transport._wrapped_methods[self._transport.delete_operation]
-
- # Certain fields should be provided within the metadata header;
- # add these here.
- metadata = tuple(metadata or ()) + (
- gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
- )
-
- # Send the request.
- await rpc(
- request,
- retry=retry,
- timeout=timeout,
- metadata=metadata,
- )
-
- async def cancel_operation(
- self,
- name: Optional[str] = None,
- *,
- # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry`
- # to allow configuring retryable error codes.
- retry=gapic_v1.method_async.DEFAULT,
- timeout: Optional[float] = None,
- metadata: Sequence[Tuple[str, str]] = (),
- ) -> None:
- r"""Starts asynchronous cancellation on a long-running operation.
- The server makes a best effort to cancel the operation, but
- success is not guaranteed. If the server doesn't support this
- method, it returns ``google.rpc.Code.UNIMPLEMENTED``. Clients
- can use
- [Operations.GetOperation][google.api_core.operations_v1.Operations.GetOperation]
- or other methods to check whether the cancellation succeeded or
- whether the operation completed despite cancellation. On
- successful cancellation, the operation is not deleted; instead,
- it becomes an operation with an
- [Operation.error][google.api_core.operations_v1.Operation.error] value with
- a [google.rpc.Status.code][google.rpc.Status.code] of 1,
- corresponding to ``Code.CANCELLED``.
-
- Args:
- name (str):
- The name of the operation resource to
- be cancelled.
-
- This corresponds to the ``name`` field
- on the ``request`` instance; if ``request`` is provided, this
- should not be set.
- timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
- """
- # Create the request object.
- request = operations_pb2.CancelOperationRequest(name=name)
-
- # Wrap the RPC method; this adds retry and timeout information,
- # and friendly error handling.
- rpc = self._transport._wrapped_methods[self._transport.cancel_operation]
-
- # Certain fields should be provided within the metadata header;
- # add these here.
- metadata = tuple(metadata or ()) + (
- gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
- )
-
- # Send the request.
- await rpc(
- request,
- retry=retry,
- timeout=timeout,
- metadata=metadata,
- )
diff --git a/google/api_core/operations_v1/pagers.py b/google/api_core/operations_v1/pagers.py
deleted file mode 100644
index 76efd59..0000000
--- a/google/api_core/operations_v1/pagers.py
+++ /dev/null
@@ -1,67 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-from typing import (
- Callable,
- Iterator,
- Sequence,
- Tuple,
-)
-
-from google.longrunning import operations_pb2
-from google.api_core.operations_v1.pagers_base import ListOperationsPagerBase
-
-
-class ListOperationsPager(ListOperationsPagerBase):
- """A pager for iterating through ``list_operations`` requests.
-
- This class thinly wraps an initial
- :class:`google.longrunning.operations_pb2.ListOperationsResponse` object, and
- provides an ``__iter__`` method to iterate through its
- ``operations`` field.
-
- If there are more pages, the ``__iter__`` method will make additional
- ``ListOperations`` requests and continue to iterate
- through the ``operations`` field on the
- corresponding responses.
-
- All the usual :class:`google.longrunning.operations_pb2.ListOperationsResponse`
- attributes are available on the pager. If multiple requests are made, only
- the most recent response is retained, and thus used for attribute lookup.
- """
-
- def __init__(
- self,
- method: Callable[..., operations_pb2.ListOperationsResponse],
- request: operations_pb2.ListOperationsRequest,
- response: operations_pb2.ListOperationsResponse,
- *,
- metadata: Sequence[Tuple[str, str]] = (),
- ):
- super().__init__(
- method=method, request=request, response=response, metadata=metadata
- )
-
- @property
- def pages(self) -> Iterator[operations_pb2.ListOperationsResponse]:
- yield self._response
- while self._response.next_page_token:
- self._request.page_token = self._response.next_page_token
- self._response = self._method(self._request, metadata=self._metadata)
- yield self._response
-
- def __iter__(self) -> Iterator[operations_pb2.Operation]:
- for page in self.pages:
- yield from page.operations
diff --git a/google/api_core/operations_v1/pagers_async.py b/google/api_core/operations_v1/pagers_async.py
deleted file mode 100644
index 4bb7f8c..0000000
--- a/google/api_core/operations_v1/pagers_async.py
+++ /dev/null
@@ -1,71 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-from typing import (
- Callable,
- AsyncIterator,
- Sequence,
- Tuple,
-)
-
-from google.longrunning import operations_pb2
-from google.api_core.operations_v1.pagers_base import ListOperationsPagerBase
-
-
-class ListOperationsAsyncPager(ListOperationsPagerBase):
- """A pager for iterating through ``list_operations`` requests.
-
- This class thinly wraps an initial
- :class:`google.longrunning.operations_pb2.ListOperationsResponse` object, and
- provides an ``__iter__`` method to iterate through its
- ``operations`` field.
-
- If there are more pages, the ``__iter__`` method will make additional
- ``ListOperations`` requests and continue to iterate
- through the ``operations`` field on the
- corresponding responses.
-
- All the usual :class:`google.longrunning.operations_pb2.ListOperationsResponse`
- attributes are available on the pager. If multiple requests are made, only
- the most recent response is retained, and thus used for attribute lookup.
- """
-
- def __init__(
- self,
- method: Callable[..., operations_pb2.ListOperationsResponse],
- request: operations_pb2.ListOperationsRequest,
- response: operations_pb2.ListOperationsResponse,
- *,
- metadata: Sequence[Tuple[str, str]] = (),
- ):
- super().__init__(
- method=method, request=request, response=response, metadata=metadata
- )
-
- @property
- async def pages(self) -> AsyncIterator[operations_pb2.ListOperationsResponse]:
- yield self._response
- while self._response.next_page_token:
- self._request.page_token = self._response.next_page_token
- self._response = await self._method(self._request, metadata=self._metadata)
- yield self._response
-
- def __aiter__(self) -> AsyncIterator[operations_pb2.Operation]:
- async def async_generator():
- async for page in self.pages:
- for operation in page.operations:
- yield operation
-
- return async_generator()
diff --git a/google/api_core/operations_v1/pagers_base.py b/google/api_core/operations_v1/pagers_base.py
deleted file mode 100644
index 5ef8384..0000000
--- a/google/api_core/operations_v1/pagers_base.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-from typing import (
- Any,
- Callable,
- Sequence,
- Tuple,
-)
-
-from google.longrunning import operations_pb2
-
-
-class ListOperationsPagerBase:
- """A pager for iterating through ``list_operations`` requests.
-
- This class thinly wraps an initial
- :class:`google.longrunning.operations_pb2.ListOperationsResponse` object, and
- provides an ``__iter__`` method to iterate through its
- ``operations`` field.
-
- If there are more pages, the ``__iter__`` method will make additional
- ``ListOperations`` requests and continue to iterate
- through the ``operations`` field on the
- corresponding responses.
-
- All the usual :class:`google.longrunning.operations_pb2.ListOperationsResponse`
- attributes are available on the pager. If multiple requests are made, only
- the most recent response is retained, and thus used for attribute lookup.
- """
-
- def __init__(
- self,
- method: Callable[..., operations_pb2.ListOperationsResponse],
- request: operations_pb2.ListOperationsRequest,
- response: operations_pb2.ListOperationsResponse,
- *,
- metadata: Sequence[Tuple[str, str]] = (),
- ):
- """Instantiate the pager.
-
- Args:
- method (Callable): The method that was originally called, and
- which instantiated this pager.
- request (google.longrunning.operations_pb2.ListOperationsRequest):
- The initial request object.
- response (google.longrunning.operations_pb2.ListOperationsResponse):
- The initial response object.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
- """
- self._method = method
- self._request = request
- self._response = response
- self._metadata = metadata
-
- def __getattr__(self, name: str) -> Any:
- return getattr(self._response, name)
-
- def __repr__(self) -> str:
- return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
diff --git a/google/api_core/operations_v1/transports/__init__.py b/google/api_core/operations_v1/transports/__init__.py
deleted file mode 100644
index 8c24ce6..0000000
--- a/google/api_core/operations_v1/transports/__init__.py
+++ /dev/null
@@ -1,39 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-from collections import OrderedDict
-from typing import cast, Dict, Tuple
-
-from .base import OperationsTransport
-from .rest import OperationsRestTransport
-
-# Compile a registry of transports.
-_transport_registry: Dict[str, OperationsTransport] = OrderedDict()
-_transport_registry["rest"] = cast(OperationsTransport, OperationsRestTransport)
-
-__all__: Tuple[str, ...] = ("OperationsTransport", "OperationsRestTransport")
-
-try:
- from .rest_asyncio import AsyncOperationsRestTransport
-
- __all__ += ("AsyncOperationsRestTransport",)
- _transport_registry["rest_asyncio"] = cast(
- OperationsTransport, AsyncOperationsRestTransport
- )
-except ImportError:
- # This import requires the `async_rest` extra.
- # Don't raise an exception if `AsyncOperationsRestTransport` cannot be imported
- # as other transports are still available.
- pass
diff --git a/google/api_core/operations_v1/transports/base.py b/google/api_core/operations_v1/transports/base.py
deleted file mode 100644
index 2d78809..0000000
--- a/google/api_core/operations_v1/transports/base.py
+++ /dev/null
@@ -1,303 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-import abc
-import re
-from typing import Awaitable, Callable, Optional, Sequence, Union
-import warnings
-
-import google.auth # type: ignore
-from google.auth import credentials as ga_credentials # type: ignore
-from google.longrunning import operations_pb2
-from google.oauth2 import service_account # type: ignore
-import google.protobuf
-from google.protobuf import empty_pb2, json_format # type: ignore
-from grpc import Compression
-
-import google.api_core # type: ignore
-from google.api_core import exceptions as core_exceptions # type: ignore
-from google.api_core import gapic_v1 # type: ignore
-from google.api_core import general_helpers
-from google.api_core import retry as retries # type: ignore
-from google.api_core import version
-
-PROTOBUF_VERSION = google.protobuf.__version__
-
-DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
- gapic_version=version.__version__,
-)
-
-
-class OperationsTransport(abc.ABC):
- """Abstract transport class for Operations."""
-
- AUTH_SCOPES = ()
-
- DEFAULT_HOST: str = "longrunning.googleapis.com"
-
- def __init__(
- self,
- *,
- host: str = DEFAULT_HOST,
- # TODO(https://github.com/googleapis/python-api-core/issues/709): update type hint for credentials to include `google.auth.aio.Credentials`.
- credentials: Optional[ga_credentials.Credentials] = None,
- credentials_file: Optional[str] = None,
- scopes: Optional[Sequence[str]] = None,
- quota_project_id: Optional[str] = None,
- client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
- always_use_jwt_access: Optional[bool] = False,
- url_scheme="https",
- **kwargs,
- ) -> None:
- """Instantiate the transport.
-
- Args:
- host (Optional[str]):
- The hostname to connect to.
- credentials (Optional[google.auth.credentials.Credentials]): The
- authorization credentials to attach to requests. These
- credentials identify the application to the service; if none
- are specified, the client will attempt to ascertain the
- credentials from the environment.
- credentials_file (Optional[str]): Deprecated. A file with credentials that can
- be loaded with :func:`google.auth.load_credentials_from_file`.
- This argument is mutually exclusive with credentials. This argument will be
- removed in the next major version of `google-api-core`.
-
- .. warning::
- Important: If you accept a credential configuration (credential JSON/File/Stream)
- from an external source for authentication to Google Cloud Platform, you must
- validate it before providing it to any Google API or client library. Providing an
- unvalidated credential configuration to Google APIs or libraries can compromise
- the security of your systems and data. For more information, refer to
- `Validate credential configurations from external sources`_.
-
- .. _Validate credential configurations from external sources:
-
- https://cloud.google.com/docs/authentication/external/externally-sourced-credentials
- scopes (Optional[Sequence[str]]): A list of scopes.
- quota_project_id (Optional[str]): An optional project to use for billing
- and quota.
- client_info (google.api_core.gapic_v1.client_info.ClientInfo):
- The client info used to send a user-agent string along with
- API requests. If ``None``, then default info will be used.
- Generally, you only need to set this if you're developing
- your own client library.
- always_use_jwt_access (Optional[bool]): Whether self signed JWT should
- be used for service account credentials.
- url_scheme: the protocol scheme for the API endpoint. Normally
- "https", but for testing or local servers,
- "http" can be specified.
- """
- if credentials_file is not None:
- warnings.warn(general_helpers._CREDENTIALS_FILE_WARNING, DeprecationWarning)
-
- maybe_url_match = re.match("^(?P<scheme>http(?:s)?://)?(?P<host>.*)$", host)
- if maybe_url_match is None:
- raise ValueError(
- f"Unexpected hostname structure: {host}"
- ) # pragma: NO COVER
-
- url_match_items = maybe_url_match.groupdict()
-
- host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host
-
- # Save the hostname. Default to port 443 (HTTPS) if none is specified.
- if ":" not in host:
- host += ":443" # pragma: NO COVER
- self._host = host
-
- # Save the scopes.
- self._scopes = scopes
-
- # If no credentials are provided, then determine the appropriate
- # defaults.
- if credentials and credentials_file:
- raise core_exceptions.DuplicateCredentialArgs(
- "'credentials_file' and 'credentials' are mutually exclusive"
- )
-
- if credentials_file is not None:
- credentials, _ = google.auth.load_credentials_from_file(
- credentials_file,
- scopes=scopes,
- quota_project_id=quota_project_id,
- default_scopes=self.AUTH_SCOPES,
- )
-
- elif credentials is None:
- credentials, _ = google.auth.default(
- scopes=scopes,
- quota_project_id=quota_project_id,
- default_scopes=self.AUTH_SCOPES,
- )
-
- # If the credentials are service account credentials, then always try to use self signed JWT.
- if (
- always_use_jwt_access
- and isinstance(credentials, service_account.Credentials)
- and hasattr(service_account.Credentials, "with_always_use_jwt_access")
- ):
- credentials = credentials.with_always_use_jwt_access(True)
-
- # Save the credentials.
- self._credentials = credentials
-
- def _prep_wrapped_messages(self, client_info):
- # Precompute the wrapped methods.
- self._wrapped_methods = {
- self.list_operations: gapic_v1.method.wrap_method(
- self.list_operations,
- default_retry=retries.Retry(
- initial=0.5,
- maximum=10.0,
- multiplier=2.0,
- predicate=retries.if_exception_type(
- core_exceptions.ServiceUnavailable,
- ),
- deadline=10.0,
- ),
- default_timeout=10.0,
- default_compression=Compression.NoCompression,
- client_info=client_info,
- ),
- self.get_operation: gapic_v1.method.wrap_method(
- self.get_operation,
- default_retry=retries.Retry(
- initial=0.5,
- maximum=10.0,
- multiplier=2.0,
- predicate=retries.if_exception_type(
- core_exceptions.ServiceUnavailable,
- ),
- deadline=10.0,
- ),
- default_timeout=10.0,
- default_compression=Compression.NoCompression,
- client_info=client_info,
- ),
- self.delete_operation: gapic_v1.method.wrap_method(
- self.delete_operation,
- default_retry=retries.Retry(
- initial=0.5,
- maximum=10.0,
- multiplier=2.0,
- predicate=retries.if_exception_type(
- core_exceptions.ServiceUnavailable,
- ),
- deadline=10.0,
- ),
- default_timeout=10.0,
- default_compression=Compression.NoCompression,
- client_info=client_info,
- ),
- self.cancel_operation: gapic_v1.method.wrap_method(
- self.cancel_operation,
- default_retry=retries.Retry(
- initial=0.5,
- maximum=10.0,
- multiplier=2.0,
- predicate=retries.if_exception_type(
- core_exceptions.ServiceUnavailable,
- ),
- deadline=10.0,
- ),
- default_timeout=10.0,
- default_compression=Compression.NoCompression,
- client_info=client_info,
- ),
- }
-
- def close(self):
- """Closes resources associated with the transport.
-
- .. warning::
- Only call this method if the transport is NOT shared
- with other clients - this may cause errors in other clients!
- """
- raise NotImplementedError()
-
- def _convert_protobuf_message_to_dict(
- self, message: google.protobuf.message.Message
- ):
- r"""Converts protobuf message to a dictionary.
-
- When the dictionary is encoded to JSON, it conforms to proto3 JSON spec.
-
- Args:
- message(google.protobuf.message.Message): The protocol buffers message
- instance to serialize.
-
- Returns:
- A dict representation of the protocol buffer message.
- """
- # TODO(https://github.com/googleapis/python-api-core/issues/643): For backwards compatibility
- # with protobuf 3.x 4.x, Remove once support for protobuf 3.x and 4.x is dropped.
- if PROTOBUF_VERSION[0:2] in ["3.", "4."]:
- result = json_format.MessageToDict(
- message,
- preserving_proto_field_name=True,
- including_default_value_fields=True, # type: ignore # backward compatibility
- )
- else:
- result = json_format.MessageToDict(
- message,
- preserving_proto_field_name=True,
- always_print_fields_with_no_presence=True,
- )
-
- return result
-
- @property
- def list_operations(
- self,
- ) -> Callable[
- [operations_pb2.ListOperationsRequest],
- Union[
- operations_pb2.ListOperationsResponse,
- Awaitable[operations_pb2.ListOperationsResponse],
- ],
- ]:
- raise NotImplementedError()
-
- @property
- def get_operation(
- self,
- ) -> Callable[
- [operations_pb2.GetOperationRequest],
- Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
- ]:
- raise NotImplementedError()
-
- @property
- def delete_operation(
- self,
- ) -> Callable[
- [operations_pb2.DeleteOperationRequest],
- Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]],
- ]:
- raise NotImplementedError()
-
- @property
- def cancel_operation(
- self,
- ) -> Callable[
- [operations_pb2.CancelOperationRequest],
- Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]],
- ]:
- raise NotImplementedError()
-
-
-__all__ = ("OperationsTransport",)
diff --git a/google/api_core/operations_v1/transports/rest.py b/google/api_core/operations_v1/transports/rest.py
deleted file mode 100644
index 62f34d6..0000000
--- a/google/api_core/operations_v1/transports/rest.py
+++ /dev/null
@@ -1,492 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-from typing import Callable, Dict, Optional, Sequence, Tuple, Union
-import warnings
-
-from google.auth import credentials as ga_credentials # type: ignore
-from google.auth.transport.requests import AuthorizedSession # type: ignore
-from google.longrunning import operations_pb2 # type: ignore
-import google.protobuf
-from google.protobuf import empty_pb2 # type: ignore
-from google.protobuf import json_format # type: ignore
-import grpc
-from requests import __version__ as requests_version
-
-from google.api_core import exceptions as core_exceptions # type: ignore
-from google.api_core import gapic_v1 # type: ignore
-from google.api_core import general_helpers
-from google.api_core import path_template # type: ignore
-from google.api_core import rest_helpers # type: ignore
-from google.api_core import retry as retries # type: ignore
-
-from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO
-from .base import OperationsTransport
-
-PROTOBUF_VERSION = google.protobuf.__version__
-
-OptionalRetry = Union[retries.Retry, object]
-
-DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
- gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version,
- grpc_version=None,
- rest_version=f"requests@{requests_version}",
-)
-
-
-class OperationsRestTransport(OperationsTransport):
- """REST backend transport for Operations.
-
- Manages long-running operations with an API service.
-
- When an API method normally takes long time to complete, it can be
- designed to return [Operation][google.api_core.operations_v1.Operation] to the
- client, and the client can use this interface to receive the real
- response asynchronously by polling the operation resource, or pass
- the operation resource to another API (such as Google Cloud Pub/Sub
- API) to receive the response. Any API service that returns
- long-running operations should implement the ``Operations``
- interface so developers can have a consistent client experience.
-
- This class defines the same methods as the primary client, so the
- primary client can load the underlying transport implementation
- and call it.
-
- It sends JSON representations of protocol buffers over HTTP/1.1
- """
-
- def __init__(
- self,
- *,
- host: str = "longrunning.googleapis.com",
- credentials: Optional[ga_credentials.Credentials] = None,
- credentials_file: Optional[str] = None,
- scopes: Optional[Sequence[str]] = None,
- client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
- quota_project_id: Optional[str] = None,
- client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
- always_use_jwt_access: Optional[bool] = False,
- url_scheme: str = "https",
- http_options: Optional[Dict] = None,
- path_prefix: str = "v1",
- ) -> None:
- """Instantiate the transport.
-
- Args:
- host (Optional[str]):
- The hostname to connect to.
- credentials (Optional[google.auth.credentials.Credentials]): The
- authorization credentials to attach to requests. These
- credentials identify the application to the service; if none
- are specified, the client will attempt to ascertain the
- credentials from the environment.
-
- credentials_file (Optional[str]): Deprecated. A file with credentials that can
- be loaded with :func:`google.auth.load_credentials_from_file`.
- This argument is ignored if ``channel`` is provided. This argument will be
- removed in the next major version of `google-api-core`.
-
- .. warning::
- Important: If you accept a credential configuration (credential JSON/File/Stream)
- from an external source for authentication to Google Cloud Platform, you must
- validate it before providing it to any Google API or client library. Providing an
- unvalidated credential configuration to Google APIs or libraries can compromise
- the security of your systems and data. For more information, refer to
- `Validate credential configuration from external sources`_.
-
- .. _Validate credential configuration from external sources:
-
- https://cloud.google.com/docs/authentication/external/externally-sourced-credentials
- scopes (Optional(Sequence[str])): A list of scopes. This argument is
- ignored if ``channel`` is provided.
- client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client
- certificate to configure mutual TLS HTTP channel. It is ignored
- if ``channel`` is provided.
- quota_project_id (Optional[str]): An optional project to use for billing
- and quota.
- client_info (google.api_core.gapic_v1.client_info.ClientInfo):
- The client info used to send a user-agent string along with
- API requests. If ``None``, then default info will be used.
- Generally, you only need to set this if you're developing
- your own client library.
- always_use_jwt_access (Optional[bool]): Whether self signed JWT should
- be used for service account credentials.
- url_scheme: the protocol scheme for the API endpoint. Normally
- "https", but for testing or local servers,
- "http" can be specified.
- http_options: a dictionary of http_options for transcoding, to override
- the defaults from operations.proto. Each method has an entry
- with the corresponding http rules as value.
- path_prefix: path prefix (usually represents API version). Set to
- "v1" by default.
-
- """
- if credentials_file is not None:
- warnings.warn(general_helpers._CREDENTIALS_FILE_WARNING, DeprecationWarning)
-
- # Run the base constructor
- # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc.
- # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the
- # credentials object
- super().__init__(
- host=host,
- credentials=credentials,
- client_info=client_info,
- always_use_jwt_access=always_use_jwt_access,
- )
- self._session = AuthorizedSession(
- self._credentials, default_host=self.DEFAULT_HOST
- )
- if client_cert_source_for_mtls:
- self._session.configure_mtls_channel(client_cert_source_for_mtls)
- # TODO(https://github.com/googleapis/python-api-core/issues/720): Add wrap logic directly to the property methods for callables.
- self._prep_wrapped_messages(client_info)
- self._http_options = http_options or {}
- self._path_prefix = path_prefix
-
- def _list_operations(
- self,
- request: operations_pb2.ListOperationsRequest,
- *,
- # TODO(https://github.com/googleapis/python-api-core/issues/723): Leverage `retry`
- # to allow configuring retryable error codes.
- retry: OptionalRetry = gapic_v1.method.DEFAULT,
- timeout: Optional[float] = None,
- compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
- ) -> operations_pb2.ListOperationsResponse:
- r"""Call the list operations method over HTTP.
-
- Args:
- request (~.operations_pb2.ListOperationsRequest):
- The request object. The request message for
- [Operations.ListOperations][google.api_core.operations_v1.Operations.ListOperations].
-
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
- should be retried.
- timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
-
- Returns:
- ~.operations_pb2.ListOperationsResponse:
- The response message for
- [Operations.ListOperations][google.api_core.operations_v1.Operations.ListOperations].
-
- """
-
- http_options = [
- {
- "method": "get",
- "uri": "/{}/{{name=**}}/operations".format(self._path_prefix),
- },
- ]
- if "google.longrunning.Operations.ListOperations" in self._http_options:
- http_options = self._http_options[
- "google.longrunning.Operations.ListOperations"
- ]
-
- request_kwargs = self._convert_protobuf_message_to_dict(request)
- transcoded_request = path_template.transcode(http_options, **request_kwargs)
-
- uri = transcoded_request["uri"]
- method = transcoded_request["method"]
-
- # Jsonify the query params
- query_params_request = operations_pb2.ListOperationsRequest()
- json_format.ParseDict(transcoded_request["query_params"], query_params_request)
- query_params = json_format.MessageToDict(
- query_params_request,
- preserving_proto_field_name=False,
- use_integers_for_enums=False,
- )
-
- # Send the request
- headers = dict(metadata)
- headers["Content-Type"] = "application/json"
- # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name.
- response = getattr(self._session, method)(
- "{host}{uri}".format(host=self._host, uri=uri),
- timeout=timeout,
- headers=headers,
- params=rest_helpers.flatten_query_params(query_params),
- )
-
- # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
- # subclass.
- if response.status_code >= 400:
- raise core_exceptions.from_http_response(response)
-
- # Return the response
- api_response = operations_pb2.ListOperationsResponse()
- json_format.Parse(response.content, api_response, ignore_unknown_fields=False)
- return api_response
-
- def _get_operation(
- self,
- request: operations_pb2.GetOperationRequest,
- *,
- # TODO(https://github.com/googleapis/python-api-core/issues/723): Leverage `retry`
- # to allow configuring retryable error codes.
- retry: OptionalRetry = gapic_v1.method.DEFAULT,
- timeout: Optional[float] = None,
- compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
- ) -> operations_pb2.Operation:
- r"""Call the get operation method over HTTP.
-
- Args:
- request (~.operations_pb2.GetOperationRequest):
- The request object. The request message for
- [Operations.GetOperation][google.api_core.operations_v1.Operations.GetOperation].
-
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
- should be retried.
- timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
-
- Returns:
- ~.operations_pb2.Operation:
- This resource represents a long-
- running operation that is the result of a
- network API call.
-
- """
-
- http_options = [
- {
- "method": "get",
- "uri": "/{}/{{name=**/operations/*}}".format(self._path_prefix),
- },
- ]
- if "google.longrunning.Operations.GetOperation" in self._http_options:
- http_options = self._http_options[
- "google.longrunning.Operations.GetOperation"
- ]
-
- request_kwargs = self._convert_protobuf_message_to_dict(request)
- transcoded_request = path_template.transcode(http_options, **request_kwargs)
-
- uri = transcoded_request["uri"]
- method = transcoded_request["method"]
-
- # Jsonify the query params
- query_params_request = operations_pb2.GetOperationRequest()
- json_format.ParseDict(transcoded_request["query_params"], query_params_request)
- query_params = json_format.MessageToDict(
- query_params_request,
- preserving_proto_field_name=False,
- use_integers_for_enums=False,
- )
-
- # Send the request
- headers = dict(metadata)
- headers["Content-Type"] = "application/json"
- # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name.
- response = getattr(self._session, method)(
- "{host}{uri}".format(host=self._host, uri=uri),
- timeout=timeout,
- headers=headers,
- params=rest_helpers.flatten_query_params(query_params),
- )
-
- # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
- # subclass.
- if response.status_code >= 400:
- raise core_exceptions.from_http_response(response)
-
- # Return the response
- api_response = operations_pb2.Operation()
- json_format.Parse(response.content, api_response, ignore_unknown_fields=False)
- return api_response
-
- def _delete_operation(
- self,
- request: operations_pb2.DeleteOperationRequest,
- *,
- # TODO(https://github.com/googleapis/python-api-core/issues/723): Leverage `retry`
- # to allow configuring retryable error codes.
- retry: OptionalRetry = gapic_v1.method.DEFAULT,
- timeout: Optional[float] = None,
- compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
- ) -> empty_pb2.Empty:
- r"""Call the delete operation method over HTTP.
-
- Args:
- request (~.operations_pb2.DeleteOperationRequest):
- The request object. The request message for
- [Operations.DeleteOperation][google.api_core.operations_v1.Operations.DeleteOperation].
-
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
- should be retried.
- timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
- """
-
- http_options = [
- {
- "method": "delete",
- "uri": "/{}/{{name=**/operations/*}}".format(self._path_prefix),
- },
- ]
- if "google.longrunning.Operations.DeleteOperation" in self._http_options:
- http_options = self._http_options[
- "google.longrunning.Operations.DeleteOperation"
- ]
-
- request_kwargs = self._convert_protobuf_message_to_dict(request)
- transcoded_request = path_template.transcode(http_options, **request_kwargs)
-
- uri = transcoded_request["uri"]
- method = transcoded_request["method"]
-
- # Jsonify the query params
- query_params_request = operations_pb2.DeleteOperationRequest()
- json_format.ParseDict(transcoded_request["query_params"], query_params_request)
- query_params = json_format.MessageToDict(
- query_params_request,
- preserving_proto_field_name=False,
- use_integers_for_enums=False,
- )
-
- # Send the request
- headers = dict(metadata)
- headers["Content-Type"] = "application/json"
- # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name.
- response = getattr(self._session, method)(
- "{host}{uri}".format(host=self._host, uri=uri),
- timeout=timeout,
- headers=headers,
- params=rest_helpers.flatten_query_params(query_params),
- )
-
- # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
- # subclass.
- if response.status_code >= 400:
- raise core_exceptions.from_http_response(response)
-
- return empty_pb2.Empty()
-
- def _cancel_operation(
- self,
- request: operations_pb2.CancelOperationRequest,
- *,
- # TODO(https://github.com/googleapis/python-api-core/issues/723): Leverage `retry`
- # to allow configuring retryable error codes.
- retry: OptionalRetry = gapic_v1.method.DEFAULT,
- timeout: Optional[float] = None,
- compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
- ) -> empty_pb2.Empty:
- r"""Call the cancel operation method over HTTP.
-
- Args:
- request (~.operations_pb2.CancelOperationRequest):
- The request object. The request message for
- [Operations.CancelOperation][google.api_core.operations_v1.Operations.CancelOperation].
-
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
- should be retried.
- timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
- """
-
- http_options = [
- {
- "method": "post",
- "uri": "/{}/{{name=**/operations/*}}:cancel".format(self._path_prefix),
- "body": "*",
- },
- ]
- if "google.longrunning.Operations.CancelOperation" in self._http_options:
- http_options = self._http_options[
- "google.longrunning.Operations.CancelOperation"
- ]
-
- request_kwargs = self._convert_protobuf_message_to_dict(request)
- transcoded_request = path_template.transcode(http_options, **request_kwargs)
-
- # Jsonify the request body
- body_request = operations_pb2.CancelOperationRequest()
- json_format.ParseDict(transcoded_request["body"], body_request)
- body = json_format.MessageToDict(
- body_request,
- preserving_proto_field_name=False,
- use_integers_for_enums=False,
- )
- uri = transcoded_request["uri"]
- method = transcoded_request["method"]
-
- # Jsonify the query params
- query_params_request = operations_pb2.CancelOperationRequest()
- json_format.ParseDict(transcoded_request["query_params"], query_params_request)
- query_params = json_format.MessageToDict(
- query_params_request,
- preserving_proto_field_name=False,
- use_integers_for_enums=False,
- )
-
- # Send the request
- headers = dict(metadata)
- headers["Content-Type"] = "application/json"
- # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name.
- response = getattr(self._session, method)(
- "{host}{uri}".format(host=self._host, uri=uri),
- timeout=timeout,
- headers=headers,
- params=rest_helpers.flatten_query_params(query_params),
- data=body,
- )
-
- # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
- # subclass.
- if response.status_code >= 400:
- raise core_exceptions.from_http_response(response)
-
- return empty_pb2.Empty()
-
- @property
- def list_operations(
- self,
- ) -> Callable[
- [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse
- ]:
- return self._list_operations
-
- @property
- def get_operation(
- self,
- ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]:
- return self._get_operation
-
- @property
- def delete_operation(
- self,
- ) -> Callable[[operations_pb2.DeleteOperationRequest], empty_pb2.Empty]:
- return self._delete_operation
-
- @property
- def cancel_operation(
- self,
- ) -> Callable[[operations_pb2.CancelOperationRequest], empty_pb2.Empty]:
- return self._cancel_operation
-
-
-__all__ = ("OperationsRestTransport",)
diff --git a/google/api_core/operations_v1/transports/rest_asyncio.py b/google/api_core/operations_v1/transports/rest_asyncio.py
deleted file mode 100644
index 6fa9f56..0000000
--- a/google/api_core/operations_v1/transports/rest_asyncio.py
+++ /dev/null
@@ -1,581 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-import json
-from typing import Any, Callable, Coroutine, Dict, Optional, Sequence, Tuple
-import warnings
-
-from google.auth import __version__ as auth_version
-
-try:
- from google.auth.aio.transport.sessions import AsyncAuthorizedSession # type: ignore
-except ImportError as e: # pragma: NO COVER
- raise ImportError(
- "The `async_rest` extra of `google-api-core` is required to use long-running operations. Install it by running "
- "`pip install google-api-core[async_rest]`."
- ) from e
-
-from google.api_core import exceptions as core_exceptions # type: ignore
-from google.api_core import gapic_v1 # type: ignore
-from google.api_core import general_helpers
-from google.api_core import path_template # type: ignore
-from google.api_core import rest_helpers # type: ignore
-from google.api_core import retry_async as retries_async # type: ignore
-from google.auth.aio import credentials as ga_credentials_async # type: ignore
-from google.longrunning import operations_pb2 # type: ignore
-from google.protobuf import empty_pb2 # type: ignore
-from google.protobuf import json_format # type: ignore
-
-from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, OperationsTransport
-
-DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
- gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version,
- grpc_version=None,
- rest_version=f"google-auth@{auth_version}",
-)
-
-
-class AsyncOperationsRestTransport(OperationsTransport):
- """Asynchronous REST backend transport for Operations.
-
- Manages async long-running operations with an API service.
-
- When an API method normally takes long time to complete, it can be
- designed to return [Operation][google.api_core.operations_v1.Operation] to the
- client, and the client can use this interface to receive the real
- response asynchronously by polling the operation resource, or pass
- the operation resource to another API (such as Google Cloud Pub/Sub
- API) to receive the response. Any API service that returns
- long-running operations should implement the ``Operations``
- interface so developers can have a consistent client experience.
-
- This class defines the same methods as the primary client, so the
- primary client can load the underlying transport implementation
- and call it.
-
- It sends JSON representations of protocol buffers over HTTP/1.1
- """
-
- def __init__(
- self,
- *,
- host: str = "longrunning.googleapis.com",
- credentials: Optional[ga_credentials_async.Credentials] = None,
- credentials_file: Optional[str] = None,
- scopes: Optional[Sequence[str]] = None,
- client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
- quota_project_id: Optional[str] = None,
- client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
- always_use_jwt_access: Optional[bool] = False,
- url_scheme: str = "https",
- http_options: Optional[Dict] = None,
- path_prefix: str = "v1",
- # TODO(https://github.com/googleapis/python-api-core/issues/715): Add docstring for `credentials_file` to async REST transport.
- # TODO(https://github.com/googleapis/python-api-core/issues/716): Add docstring for `scopes` to async REST transport.
- # TODO(https://github.com/googleapis/python-api-core/issues/717): Add docstring for `quota_project_id` to async REST transport.
- # TODO(https://github.com/googleapis/python-api-core/issues/718): Add docstring for `client_cert_source` to async REST transport.
- ) -> None:
- """Instantiate the transport.
-
- Args:
- host (Optional[str]):
- The hostname to connect to.
- credentials (Optional[google.auth.aio.credentials.Credentials]): The
- authorization credentials to attach to requests. These
- credentials identify the application to the service; if none
- are specified, the client will attempt to ascertain the
- credentials from the environment.
- credentials_file (Optional[str]): Deprecated. A file with credentials that can
- be loaded with :func:`google.auth.load_credentials_from_file`.
- This argument is ignored if ``channel`` is provided. This argument will be
- removed in the next major version of `google-api-core`.
-
- .. warning::
- Important: If you accept a credential configuration (credential JSON/File/Stream)
- from an external source for authentication to Google Cloud Platform, you must
- validate it before providing it to any Google API or client library. Providing an
- unvalidated credential configuration to Google APIs or libraries can compromise
- the security of your systems and data. For more information, refer to
- `Validate credential configurations from external sources`_.
-
- .. _Validate credential configurations from external sources:
-
- https://cloud.google.com/docs/authentication/external/externally-sourced-credentials
- client_info (google.api_core.gapic_v1.client_info.ClientInfo):
- The client info used to send a user-agent string along with
- API requests. If ``None``, then default info will be used.
- Generally, you only need to set this if you're developing
- your own client library.
- always_use_jwt_access (Optional[bool]): Whether self signed JWT should
- be used for service account credentials.
- url_scheme: the protocol scheme for the API endpoint. Normally
- "https", but for testing or local servers,
- "http" can be specified.
- http_options: a dictionary of http_options for transcoding, to override
- the defaults from operations.proto. Each method has an entry
- with the corresponding http rules as value.
- path_prefix: path prefix (usually represents API version). Set to
- "v1" by default.
-
- """
- if credentials_file is not None:
- warnings.warn(general_helpers._CREDENTIALS_FILE_WARNING, DeprecationWarning)
-
- unsupported_params = {
- # TODO(https://github.com/googleapis/python-api-core/issues/715): Add support for `credentials_file` to async REST transport.
- "google.api_core.client_options.ClientOptions.credentials_file": credentials_file,
- # TODO(https://github.com/googleapis/python-api-core/issues/716): Add support for `scopes` to async REST transport.
- "google.api_core.client_options.ClientOptions.scopes": scopes,
- # TODO(https://github.com/googleapis/python-api-core/issues/717): Add support for `quota_project_id` to async REST transport.
- "google.api_core.client_options.ClientOptions.quota_project_id": quota_project_id,
- # TODO(https://github.com/googleapis/python-api-core/issues/718): Add support for `client_cert_source` to async REST transport.
- "google.api_core.client_options.ClientOptions.client_cert_source": client_cert_source_for_mtls,
- # TODO(https://github.com/googleapis/python-api-core/issues/718): Add support for `client_cert_source` to async REST transport.
- "google.api_core.client_options.ClientOptions.client_cert_source": client_cert_source_for_mtls,
- }
- provided_unsupported_params = [
- name for name, value in unsupported_params.items() if value is not None
- ]
- if provided_unsupported_params:
- raise core_exceptions.AsyncRestUnsupportedParameterError(
- f"The following provided parameters are not supported for `transport=rest_asyncio`: {', '.join(provided_unsupported_params)}"
- )
-
- super().__init__(
- host=host,
- # TODO(https://github.com/googleapis/python-api-core/issues/709): Remove `type: ignore` when the linked issue is resolved.
- credentials=credentials, # type: ignore
- client_info=client_info,
- # TODO(https://github.com/googleapis/python-api-core/issues/725): Set always_use_jwt_access token when supported.
- always_use_jwt_access=False,
- )
- # TODO(https://github.com/googleapis/python-api-core/issues/708): add support for
- # `default_host` in AsyncAuthorizedSession for feature parity with the synchronous
- # code.
- # TODO(https://github.com/googleapis/python-api-core/issues/709): Remove `type: ignore` when the linked issue is resolved.
- self._session = AsyncAuthorizedSession(self._credentials) # type: ignore
- # TODO(https://github.com/googleapis/python-api-core/issues/720): Add wrap logic directly to the property methods for callables.
- self._prep_wrapped_messages(client_info)
- self._http_options = http_options or {}
- self._path_prefix = path_prefix
-
- def _prep_wrapped_messages(self, client_info):
- # Precompute the wrapped methods.
- self._wrapped_methods = {
- self.list_operations: gapic_v1.method_async.wrap_method(
- self.list_operations,
- default_retry=retries_async.AsyncRetry(
- initial=0.5,
- maximum=10.0,
- multiplier=2.0,
- predicate=retries_async.if_exception_type(
- core_exceptions.ServiceUnavailable,
- ),
- deadline=10.0,
- ),
- default_timeout=10.0,
- client_info=client_info,
- kind="rest_asyncio",
- ),
- self.get_operation: gapic_v1.method_async.wrap_method(
- self.get_operation,
- default_retry=retries_async.AsyncRetry(
- initial=0.5,
- maximum=10.0,
- multiplier=2.0,
- predicate=retries_async.if_exception_type(
- core_exceptions.ServiceUnavailable,
- ),
- deadline=10.0,
- ),
- default_timeout=10.0,
- client_info=client_info,
- kind="rest_asyncio",
- ),
- self.delete_operation: gapic_v1.method_async.wrap_method(
- self.delete_operation,
- default_retry=retries_async.AsyncRetry(
- initial=0.5,
- maximum=10.0,
- multiplier=2.0,
- predicate=retries_async.if_exception_type(
- core_exceptions.ServiceUnavailable,
- ),
- deadline=10.0,
- ),
- default_timeout=10.0,
- client_info=client_info,
- kind="rest_asyncio",
- ),
- self.cancel_operation: gapic_v1.method_async.wrap_method(
- self.cancel_operation,
- default_retry=retries_async.AsyncRetry(
- initial=0.5,
- maximum=10.0,
- multiplier=2.0,
- predicate=retries_async.if_exception_type(
- core_exceptions.ServiceUnavailable,
- ),
- deadline=10.0,
- ),
- default_timeout=10.0,
- client_info=client_info,
- kind="rest_asyncio",
- ),
- }
-
- async def _list_operations(
- self,
- request: operations_pb2.ListOperationsRequest,
- *,
- # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry`
- # to allow configuring retryable error codes.
- retry=gapic_v1.method_async.DEFAULT,
- timeout: Optional[float] = None,
- metadata: Sequence[Tuple[str, str]] = (),
- ) -> operations_pb2.ListOperationsResponse:
- r"""Asynchronously call the list operations method over HTTP.
-
- Args:
- request (~.operations_pb2.ListOperationsRequest):
- The request object. The request message for
- [Operations.ListOperations][google.api_core.operations_v1.Operations.ListOperations].
- timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
-
- Returns:
- ~.operations_pb2.ListOperationsResponse:
- The response message for
- [Operations.ListOperations][google.api_core.operations_v1.Operations.ListOperations].
-
- """
-
- http_options = [
- {
- "method": "get",
- "uri": "/{}/{{name=**}}/operations".format(self._path_prefix),
- },
- ]
- if "google.longrunning.Operations.ListOperations" in self._http_options:
- http_options = self._http_options[
- "google.longrunning.Operations.ListOperations"
- ]
-
- request_kwargs = self._convert_protobuf_message_to_dict(request)
- transcoded_request = path_template.transcode(http_options, **request_kwargs)
-
- uri = transcoded_request["uri"]
- method = transcoded_request["method"]
-
- # Jsonify the query params
- query_params_request = operations_pb2.ListOperationsRequest()
- json_format.ParseDict(transcoded_request["query_params"], query_params_request)
- query_params = json_format.MessageToDict(
- query_params_request,
- preserving_proto_field_name=False,
- use_integers_for_enums=False,
- )
-
- # Send the request
- headers = dict(metadata)
- headers["Content-Type"] = "application/json"
- # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name.
- response = await getattr(self._session, method)(
- "{host}{uri}".format(host=self._host, uri=uri),
- timeout=timeout,
- headers=headers,
- params=rest_helpers.flatten_query_params(query_params),
- )
- content = await response.read()
-
- # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
- # subclass.
- if response.status_code >= 400:
- payload = json.loads(content.decode("utf-8"))
- request_url = "{host}{uri}".format(host=self._host, uri=uri)
- raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore
-
- # Return the response
- api_response = operations_pb2.ListOperationsResponse()
- json_format.Parse(content, api_response, ignore_unknown_fields=False)
- return api_response
-
- async def _get_operation(
- self,
- request: operations_pb2.GetOperationRequest,
- *,
- # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry`
- # to allow configuring retryable error codes.
- retry=gapic_v1.method_async.DEFAULT,
- timeout: Optional[float] = None,
- metadata: Sequence[Tuple[str, str]] = (),
- ) -> operations_pb2.Operation:
- r"""Asynchronously call the get operation method over HTTP.
-
- Args:
- request (~.operations_pb2.GetOperationRequest):
- The request object. The request message for
- [Operations.GetOperation][google.api_core.operations_v1.Operations.GetOperation].
- timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
-
- Returns:
- ~.operations_pb2.Operation:
- This resource represents a long-
- running operation that is the result of a
- network API call.
-
- """
-
- http_options = [
- {
- "method": "get",
- "uri": "/{}/{{name=**/operations/*}}".format(self._path_prefix),
- },
- ]
- if "google.longrunning.Operations.GetOperation" in self._http_options:
- http_options = self._http_options[
- "google.longrunning.Operations.GetOperation"
- ]
-
- request_kwargs = self._convert_protobuf_message_to_dict(request)
- transcoded_request = path_template.transcode(http_options, **request_kwargs)
-
- uri = transcoded_request["uri"]
- method = transcoded_request["method"]
-
- # Jsonify the query params
- query_params_request = operations_pb2.GetOperationRequest()
- json_format.ParseDict(transcoded_request["query_params"], query_params_request)
- query_params = json_format.MessageToDict(
- query_params_request,
- preserving_proto_field_name=False,
- use_integers_for_enums=False,
- )
-
- # Send the request
- headers = dict(metadata)
- headers["Content-Type"] = "application/json"
- # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name.
- response = await getattr(self._session, method)(
- "{host}{uri}".format(host=self._host, uri=uri),
- timeout=timeout,
- headers=headers,
- params=rest_helpers.flatten_query_params(query_params),
- )
- content = await response.read()
-
- # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
- # subclass.
- if response.status_code >= 400:
- payload = json.loads(content.decode("utf-8"))
- request_url = "{host}{uri}".format(host=self._host, uri=uri)
- raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore
-
- # Return the response
- api_response = operations_pb2.Operation()
- json_format.Parse(content, api_response, ignore_unknown_fields=False)
- return api_response
-
- async def _delete_operation(
- self,
- request: operations_pb2.DeleteOperationRequest,
- *,
- # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry`
- # to allow configuring retryable error codes.
- retry=gapic_v1.method_async.DEFAULT,
- timeout: Optional[float] = None,
- metadata: Sequence[Tuple[str, str]] = (),
- ) -> empty_pb2.Empty:
- r"""Asynchronously call the delete operation method over HTTP.
-
- Args:
- request (~.operations_pb2.DeleteOperationRequest):
- The request object. The request message for
- [Operations.DeleteOperation][google.api_core.operations_v1.Operations.DeleteOperation].
-
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
- should be retried.
- timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
- """
-
- http_options = [
- {
- "method": "delete",
- "uri": "/{}/{{name=**/operations/*}}".format(self._path_prefix),
- },
- ]
- if "google.longrunning.Operations.DeleteOperation" in self._http_options:
- http_options = self._http_options[
- "google.longrunning.Operations.DeleteOperation"
- ]
-
- request_kwargs = self._convert_protobuf_message_to_dict(request)
- transcoded_request = path_template.transcode(http_options, **request_kwargs)
-
- uri = transcoded_request["uri"]
- method = transcoded_request["method"]
-
- # Jsonify the query params
- query_params_request = operations_pb2.DeleteOperationRequest()
- json_format.ParseDict(transcoded_request["query_params"], query_params_request)
- query_params = json_format.MessageToDict(
- query_params_request,
- preserving_proto_field_name=False,
- use_integers_for_enums=False,
- )
-
- # Send the request
- headers = dict(metadata)
- headers["Content-Type"] = "application/json"
- # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name.
- response = await getattr(self._session, method)(
- "{host}{uri}".format(host=self._host, uri=uri),
- timeout=timeout,
- headers=headers,
- params=rest_helpers.flatten_query_params(query_params),
- )
-
- # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
- # subclass.
- if response.status_code >= 400:
- content = await response.read()
- payload = json.loads(content.decode("utf-8"))
- request_url = "{host}{uri}".format(host=self._host, uri=uri)
- raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore
-
- return empty_pb2.Empty()
-
- async def _cancel_operation(
- self,
- request: operations_pb2.CancelOperationRequest,
- *,
- # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry`
- # to allow configuring retryable error codes.
- retry=gapic_v1.method_async.DEFAULT,
- timeout: Optional[float] = None,
- metadata: Sequence[Tuple[str, str]] = (),
- # TODO(https://github.com/googleapis/python-api-core/issues/722): Add `retry` parameter
- # to allow configuring retryable error codes.
- ) -> empty_pb2.Empty:
- r"""Asynchronously call the cancel operation method over HTTP.
-
- Args:
- request (~.operations_pb2.CancelOperationRequest):
- The request object. The request message for
- [Operations.CancelOperation][google.api_core.operations_v1.Operations.CancelOperation].
- timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
- """
-
- http_options = [
- {
- "method": "post",
- "uri": "/{}/{{name=**/operations/*}}:cancel".format(self._path_prefix),
- "body": "*",
- },
- ]
- if "google.longrunning.Operations.CancelOperation" in self._http_options:
- http_options = self._http_options[
- "google.longrunning.Operations.CancelOperation"
- ]
-
- request_kwargs = self._convert_protobuf_message_to_dict(request)
- transcoded_request = path_template.transcode(http_options, **request_kwargs)
-
- # Jsonify the request body
- body_request = operations_pb2.CancelOperationRequest()
- json_format.ParseDict(transcoded_request["body"], body_request)
- body = json_format.MessageToDict(
- body_request,
- preserving_proto_field_name=False,
- use_integers_for_enums=False,
- )
- uri = transcoded_request["uri"]
- method = transcoded_request["method"]
-
- # Jsonify the query params
- query_params_request = operations_pb2.CancelOperationRequest()
- json_format.ParseDict(transcoded_request["query_params"], query_params_request)
- query_params = json_format.MessageToDict(
- query_params_request,
- preserving_proto_field_name=False,
- use_integers_for_enums=False,
- )
-
- # Send the request
- headers = dict(metadata)
- headers["Content-Type"] = "application/json"
- # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name.
- response = await getattr(self._session, method)(
- "{host}{uri}".format(host=self._host, uri=uri),
- timeout=timeout,
- headers=headers,
- params=rest_helpers.flatten_query_params(query_params),
- data=body,
- )
-
- # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
- # subclass.
- if response.status_code >= 400:
- content = await response.read()
- payload = json.loads(content.decode("utf-8"))
- request_url = "{host}{uri}".format(host=self._host, uri=uri)
- raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore
-
- return empty_pb2.Empty()
-
- @property
- def list_operations(
- self,
- ) -> Callable[
- [operations_pb2.ListOperationsRequest],
- Coroutine[Any, Any, operations_pb2.ListOperationsResponse],
- ]:
- return self._list_operations
-
- @property
- def get_operation(
- self,
- ) -> Callable[
- [operations_pb2.GetOperationRequest],
- Coroutine[Any, Any, operations_pb2.Operation],
- ]:
- return self._get_operation
-
- @property
- def delete_operation(
- self,
- ) -> Callable[
- [operations_pb2.DeleteOperationRequest], Coroutine[Any, Any, empty_pb2.Empty]
- ]:
- return self._delete_operation
-
- @property
- def cancel_operation(
- self,
- ) -> Callable[
- [operations_pb2.CancelOperationRequest], Coroutine[Any, Any, empty_pb2.Empty]
- ]:
- return self._cancel_operation
-
-
-__all__ = ("AsyncOperationsRestTransport",)
diff --git a/google/api_core/page_iterator.py b/google/api_core/page_iterator.py
deleted file mode 100644
index 23761ec..0000000
--- a/google/api_core/page_iterator.py
+++ /dev/null
@@ -1,571 +0,0 @@
-# Copyright 2015 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Iterators for paging through paged API methods.
-
-These iterators simplify the process of paging through API responses
-where the request takes a page token and the response is a list of results with
-a token for the next page. See `list pagination`_ in the Google API Style Guide
-for more details.
-
-.. _list pagination:
- https://cloud.google.com/apis/design/design_patterns#list_pagination
-
-API clients that have methods that follow the list pagination pattern can
-return an :class:`.Iterator`. You can use this iterator to get **all** of
-the results across all pages::
-
- >>> results_iterator = client.list_resources()
- >>> list(results_iterator) # Convert to a list (consumes all values).
-
-Or you can walk your way through items and call off the search early if
-you find what you're looking for (resulting in possibly fewer requests)::
-
- >>> for resource in results_iterator:
- ... print(resource.name)
- ... if not resource.is_valid:
- ... break
-
-At any point, you may check the number of items consumed by referencing the
-``num_results`` property of the iterator::
-
- >>> for my_item in results_iterator:
- ... if results_iterator.num_results >= 10:
- ... break
-
-When iterating, not every new item will send a request to the server.
-To iterate based on each page of items (where a page corresponds to
-a request)::
-
- >>> for page in results_iterator.pages:
- ... print('=' * 20)
- ... print(' Page number: {:d}'.format(iterator.page_number))
- ... print(' Items in page: {:d}'.format(page.num_items))
- ... print(' First item: {!r}'.format(next(page)))
- ... print('Items remaining: {:d}'.format(page.remaining))
- ... print('Next page token: {}'.format(iterator.next_page_token))
- ====================
- Page number: 1
- Items in page: 1
- First item: <MyItemClass at 0x7f1d3cccf690>
- Items remaining: 0
- Next page token: eav1OzQB0OM8rLdGXOEsyQWSG
- ====================
- Page number: 2
- Items in page: 19
- First item: <MyItemClass at 0x7f1d3cccffd0>
- Items remaining: 18
- Next page token: None
-
-Then, for each page you can get all the resources on that page by iterating
-through it or using :func:`list`::
-
- >>> list(page)
- [
- <MyItemClass at 0x7fd64a098ad0>,
- <MyItemClass at 0x7fd64a098ed0>,
- <MyItemClass at 0x7fd64a098e90>,
- ]
-"""
-
-import abc
-
-
-class Page(object):
- """Single page of results in an iterator.
-
- Args:
- parent (google.api_core.page_iterator.Iterator): The iterator that owns
- the current page.
- items (Sequence[Any]): An iterable (that also defines __len__) of items
- from a raw API response.
- item_to_value (Callable[google.api_core.page_iterator.Iterator, Any]):
- Callable to convert an item from the type in the raw API response
- into the native object. Will be called with the iterator and a
- single item.
- raw_page Optional[google.protobuf.message.Message]:
- The raw page response.
- """
-
- def __init__(self, parent, items, item_to_value, raw_page=None):
- self._parent = parent
- self._num_items = len(items)
- self._remaining = self._num_items
- self._item_iter = iter(items)
- self._item_to_value = item_to_value
- self._raw_page = raw_page
-
- @property
- def raw_page(self):
- """google.protobuf.message.Message"""
- return self._raw_page
-
- @property
- def num_items(self):
- """int: Total items in the page."""
- return self._num_items
-
- @property
- def remaining(self):
- """int: Remaining items in the page."""
- return self._remaining
-
- def __iter__(self):
- """The :class:`Page` is an iterator of items."""
- return self
-
- def __next__(self):
- """Get the next value in the page."""
- item = next(self._item_iter)
- result = self._item_to_value(self._parent, item)
- # Since we've successfully got the next value from the
- # iterator, we update the number of remaining.
- self._remaining -= 1
- return result
-
-
-def _item_to_value_identity(iterator, item):
- """An item to value transformer that returns the item un-changed."""
- # pylint: disable=unused-argument
- # We are conforming to the interface defined by Iterator.
- return item
-
-
-class Iterator(object, metaclass=abc.ABCMeta):
- """A generic class for iterating through API list responses.
-
- Args:
- client(google.cloud.client.Client): The API client.
- item_to_value (Callable[google.api_core.page_iterator.Iterator, Any]):
- Callable to convert an item from the type in the raw API response
- into the native object. Will be called with the iterator and a
- single item.
- page_token (str): A token identifying a page in a result set to start
- fetching results from.
- max_results (int): The maximum number of results to fetch.
- """
-
- def __init__(
- self,
- client,
- item_to_value=_item_to_value_identity,
- page_token=None,
- max_results=None,
- ):
- self._started = False
- self.__active_iterator = None
-
- self.client = client
- """Optional[Any]: The client that created this iterator."""
- self.item_to_value = item_to_value
- """Callable[Iterator, Any]: Callable to convert an item from the type
- in the raw API response into the native object. Will be called with
- the iterator and a
- single item.
- """
- self.max_results = max_results
- """int: The maximum number of results to fetch"""
-
- # The attributes below will change over the life of the iterator.
- self.page_number = 0
- """int: The current page of results."""
- self.next_page_token = page_token
- """str: The token for the next page of results. If this is set before
- the iterator starts, it effectively offsets the iterator to a
- specific starting point."""
- self.num_results = 0
- """int: The total number of results fetched so far."""
-
- @property
- def pages(self):
- """Iterator of pages in the response.
-
- returns:
- types.GeneratorType[google.api_core.page_iterator.Page]: A
- generator of page instances.
-
- raises:
- ValueError: If the iterator has already been started.
- """
- if self._started:
- raise ValueError("Iterator has already started", self)
- self._started = True
- return self._page_iter(increment=True)
-
- def _items_iter(self):
- """Iterator for each item returned."""
- for page in self._page_iter(increment=False):
- for item in page:
- self.num_results += 1
- yield item
-
- def __iter__(self):
- """Iterator for each item returned.
-
- Returns:
- types.GeneratorType[Any]: A generator of items from the API.
-
- Raises:
- ValueError: If the iterator has already been started.
- """
- if self._started:
- raise ValueError("Iterator has already started", self)
- self._started = True
- return self._items_iter()
-
- def __next__(self):
- if self.__active_iterator is None:
- self.__active_iterator = iter(self)
- return next(self.__active_iterator)
-
- def _page_iter(self, increment):
- """Generator of pages of API responses.
-
- Args:
- increment (bool): Flag indicating if the total number of results
- should be incremented on each page. This is useful since a page
- iterator will want to increment by results per page while an
- items iterator will want to increment per item.
-
- Yields:
- Page: each page of items from the API.
- """
- page = self._next_page()
- while page is not None:
- self.page_number += 1
- if increment:
- self.num_results += page.num_items
- yield page
- page = self._next_page()
-
- @abc.abstractmethod
- def _next_page(self):
- """Get the next page in the iterator.
-
- This does nothing and is intended to be over-ridden by subclasses
- to return the next :class:`Page`.
-
- Raises:
- NotImplementedError: Always, this method is abstract.
- """
- raise NotImplementedError
-
-
-def _do_nothing_page_start(iterator, page, response):
- """Helper to provide custom behavior after a :class:`Page` is started.
-
- This is a do-nothing stand-in as the default value.
-
- Args:
- iterator (Iterator): An iterator that holds some request info.
- page (Page): The page that was just created.
- response (Any): The API response for a page.
- """
- # pylint: disable=unused-argument
- pass
-
-
-class HTTPIterator(Iterator):
- """A generic class for iterating through HTTP/JSON API list responses.
-
- To make an iterator work, you'll need to provide a way to convert a JSON
- item returned from the API into the object of your choice (via
- ``item_to_value``). You also may need to specify a custom ``items_key`` so
- that a given response (containing a page of results) can be parsed into an
- iterable page of the actual objects you want.
-
- Args:
- client (google.cloud.client.Client): The API client.
- api_request (Callable): The function to use to make API requests.
- Generally, this will be
- :meth:`google.cloud._http.JSONConnection.api_request`.
- path (str): The method path to query for the list of items.
- item_to_value (Callable[google.api_core.page_iterator.Iterator, Any]):
- Callable to convert an item from the type in the JSON response into
- a native object. Will be called with the iterator and a single
- item.
- items_key (str): The key in the API response where the list of items
- can be found.
- page_token (str): A token identifying a page in a result set to start
- fetching results from.
- page_size (int): The maximum number of results to fetch per page
- max_results (int): The maximum number of results to fetch
- extra_params (dict): Extra query string parameters for the
- API call.
- page_start (Callable[
- google.api_core.page_iterator.Iterator,
- google.api_core.page_iterator.Page, dict]): Callable to provide
- any special behavior after a new page has been created. Assumed
- signature takes the :class:`.Iterator` that started the page,
- the :class:`.Page` that was started and the dictionary containing
- the page response.
- next_token (str): The name of the field used in the response for page
- tokens.
-
- .. autoattribute:: pages
- """
-
- _DEFAULT_ITEMS_KEY = "items"
- _PAGE_TOKEN = "pageToken"
- _MAX_RESULTS = "maxResults"
- _NEXT_TOKEN = "nextPageToken"
- _RESERVED_PARAMS = frozenset([_PAGE_TOKEN])
- _HTTP_METHOD = "GET"
-
- def __init__(
- self,
- client,
- api_request,
- path,
- item_to_value,
- items_key=_DEFAULT_ITEMS_KEY,
- page_token=None,
- page_size=None,
- max_results=None,
- extra_params=None,
- page_start=_do_nothing_page_start,
- next_token=_NEXT_TOKEN,
- ):
- super(HTTPIterator, self).__init__(
- client, item_to_value, page_token=page_token, max_results=max_results
- )
- self.api_request = api_request
- self.path = path
- self._items_key = items_key
- self.extra_params = extra_params
- self._page_size = page_size
- self._page_start = page_start
- self._next_token = next_token
- # Verify inputs / provide defaults.
- if self.extra_params is None:
- self.extra_params = {}
- self._verify_params()
-
- def _verify_params(self):
- """Verifies the parameters don't use any reserved parameter.
-
- Raises:
- ValueError: If a reserved parameter is used.
- """
- reserved_in_use = self._RESERVED_PARAMS.intersection(self.extra_params)
- if reserved_in_use:
- raise ValueError("Using a reserved parameter", reserved_in_use)
-
- def _next_page(self):
- """Get the next page in the iterator.
-
- Returns:
- Optional[Page]: The next page in the iterator or :data:`None` if
- there are no pages left.
- """
- if self._has_next_page():
- response = self._get_next_page_response()
- items = response.get(self._items_key, ())
- page = Page(self, items, self.item_to_value, raw_page=response)
- self._page_start(self, page, response)
- self.next_page_token = response.get(self._next_token)
- return page
- else:
- return None
-
- def _has_next_page(self):
- """Determines whether or not there are more pages with results.
-
- Returns:
- bool: Whether the iterator has more pages.
- """
- if self.page_number == 0:
- return True
-
- if self.max_results is not None:
- if self.num_results >= self.max_results:
- return False
-
- return self.next_page_token is not None
-
- def _get_query_params(self):
- """Getter for query parameters for the next request.
-
- Returns:
- dict: A dictionary of query parameters.
- """
- result = {}
- if self.next_page_token is not None:
- result[self._PAGE_TOKEN] = self.next_page_token
-
- page_size = None
- if self.max_results is not None:
- page_size = self.max_results - self.num_results
- if self._page_size is not None:
- page_size = min(page_size, self._page_size)
- elif self._page_size is not None:
- page_size = self._page_size
-
- if page_size is not None:
- result[self._MAX_RESULTS] = page_size
-
- result.update(self.extra_params)
- return result
-
- def _get_next_page_response(self):
- """Requests the next page from the path provided.
-
- Returns:
- dict: The parsed JSON response of the next page's contents.
-
- Raises:
- ValueError: If the HTTP method is not ``GET`` or ``POST``.
- """
- params = self._get_query_params()
- if self._HTTP_METHOD == "GET":
- return self.api_request(
- method=self._HTTP_METHOD, path=self.path, query_params=params
- )
- elif self._HTTP_METHOD == "POST":
- return self.api_request(
- method=self._HTTP_METHOD, path=self.path, data=params
- )
- else:
- raise ValueError("Unexpected HTTP method", self._HTTP_METHOD)
-
-
-class _GAXIterator(Iterator):
- """A generic class for iterating through Cloud gRPC APIs list responses.
-
- Any:
- client (google.cloud.client.Client): The API client.
- page_iter (google.gax.PageIterator): A GAX page iterator to be wrapped
- to conform to the :class:`Iterator` interface.
- item_to_value (Callable[Iterator, Any]): Callable to convert an item
- from the protobuf response into a native object. Will
- be called with the iterator and a single item.
- max_results (int): The maximum number of results to fetch.
-
- .. autoattribute:: pages
- """
-
- def __init__(self, client, page_iter, item_to_value, max_results=None):
- super(_GAXIterator, self).__init__(
- client,
- item_to_value,
- page_token=page_iter.page_token,
- max_results=max_results,
- )
- self._gax_page_iter = page_iter
-
- def _next_page(self):
- """Get the next page in the iterator.
-
- Wraps the response from the :class:`~google.gax.PageIterator` in a
- :class:`Page` instance and captures some state at each page.
-
- Returns:
- Optional[Page]: The next page in the iterator or :data:`None` if
- there are no pages left.
- """
- try:
- items = next(self._gax_page_iter)
- page = Page(self, items, self.item_to_value)
- self.next_page_token = self._gax_page_iter.page_token or None
- return page
- except StopIteration:
- return None
-
-
-class GRPCIterator(Iterator):
- """A generic class for iterating through gRPC list responses.
-
- .. note:: The class does not take a ``page_token`` argument because it can
- just be specified in the ``request``.
-
- Args:
- client (google.cloud.client.Client): The API client. This unused by
- this class, but kept to satisfy the :class:`Iterator` interface.
- method (Callable[protobuf.Message]): A bound gRPC method that should
- take a single message for the request.
- request (protobuf.Message): The request message.
- items_field (str): The field in the response message that has the
- items for the page.
- item_to_value (Callable[GRPCIterator, Any]): Callable to convert an
- item from the type in the JSON response into a native object. Will
- be called with the iterator and a single item.
- request_token_field (str): The field in the request message used to
- specify the page token.
- response_token_field (str): The field in the response message that has
- the token for the next page.
- max_results (int): The maximum number of results to fetch.
-
- .. autoattribute:: pages
- """
-
- _DEFAULT_REQUEST_TOKEN_FIELD = "page_token"
- _DEFAULT_RESPONSE_TOKEN_FIELD = "next_page_token"
-
- def __init__(
- self,
- client,
- method,
- request,
- items_field,
- item_to_value=_item_to_value_identity,
- request_token_field=_DEFAULT_REQUEST_TOKEN_FIELD,
- response_token_field=_DEFAULT_RESPONSE_TOKEN_FIELD,
- max_results=None,
- ):
- super(GRPCIterator, self).__init__(
- client, item_to_value, max_results=max_results
- )
- self._method = method
- self._request = request
- self._items_field = items_field
- self._request_token_field = request_token_field
- self._response_token_field = response_token_field
-
- def _next_page(self):
- """Get the next page in the iterator.
-
- Returns:
- Page: The next page in the iterator or :data:`None` if
- there are no pages left.
- """
- if not self._has_next_page():
- return None
-
- if self.next_page_token is not None:
- setattr(self._request, self._request_token_field, self.next_page_token)
-
- response = self._method(self._request)
-
- self.next_page_token = getattr(response, self._response_token_field)
- items = getattr(response, self._items_field)
- page = Page(self, items, self.item_to_value, raw_page=response)
-
- return page
-
- def _has_next_page(self):
- """Determines whether or not there are more pages with results.
-
- Returns:
- bool: Whether the iterator has more pages.
- """
- if self.page_number == 0:
- return True
-
- if self.max_results is not None:
- if self.num_results >= self.max_results:
- return False
-
- # Note: intentionally a falsy check instead of a None check. The RPC
- # can return an empty string indicating no more pages.
- return True if self.next_page_token else False
diff --git a/google/api_core/page_iterator_async.py b/google/api_core/page_iterator_async.py
deleted file mode 100644
index c072575..0000000
--- a/google/api_core/page_iterator_async.py
+++ /dev/null
@@ -1,285 +0,0 @@
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""AsyncIO iterators for paging through paged API methods.
-
-These iterators simplify the process of paging through API responses
-where the request takes a page token and the response is a list of results with
-a token for the next page. See `list pagination`_ in the Google API Style Guide
-for more details.
-
-.. _list pagination:
- https://cloud.google.com/apis/design/design_patterns#list_pagination
-
-API clients that have methods that follow the list pagination pattern can
-return an :class:`.AsyncIterator`:
-
- >>> results_iterator = await client.list_resources()
-
-Or you can walk your way through items and call off the search early if
-you find what you're looking for (resulting in possibly fewer requests)::
-
- >>> async for resource in results_iterator:
- ... print(resource.name)
- ... if not resource.is_valid:
- ... break
-
-At any point, you may check the number of items consumed by referencing the
-``num_results`` property of the iterator::
-
- >>> async for my_item in results_iterator:
- ... if results_iterator.num_results >= 10:
- ... break
-
-When iterating, not every new item will send a request to the server.
-To iterate based on each page of items (where a page corresponds to
-a request)::
-
- >>> async for page in results_iterator.pages:
- ... print('=' * 20)
- ... print(' Page number: {:d}'.format(iterator.page_number))
- ... print(' Items in page: {:d}'.format(page.num_items))
- ... print(' First item: {!r}'.format(next(page)))
- ... print('Items remaining: {:d}'.format(page.remaining))
- ... print('Next page token: {}'.format(iterator.next_page_token))
- ====================
- Page number: 1
- Items in page: 1
- First item: <MyItemClass at 0x7f1d3cccf690>
- Items remaining: 0
- Next page token: eav1OzQB0OM8rLdGXOEsyQWSG
- ====================
- Page number: 2
- Items in page: 19
- First item: <MyItemClass at 0x7f1d3cccffd0>
- Items remaining: 18
- Next page token: None
-"""
-
-import abc
-
-from google.api_core.page_iterator import Page
-
-
-def _item_to_value_identity(iterator, item):
- """An item to value transformer that returns the item un-changed."""
- # pylint: disable=unused-argument
- # We are conforming to the interface defined by Iterator.
- return item
-
-
-class AsyncIterator(abc.ABC):
- """A generic class for iterating through API list responses.
-
- Args:
- client(google.cloud.client.Client): The API client.
- item_to_value (Callable[google.api_core.page_iterator_async.AsyncIterator, Any]):
- Callable to convert an item from the type in the raw API response
- into the native object. Will be called with the iterator and a
- single item.
- page_token (str): A token identifying a page in a result set to start
- fetching results from.
- max_results (int): The maximum number of results to fetch.
- """
-
- def __init__(
- self,
- client,
- item_to_value=_item_to_value_identity,
- page_token=None,
- max_results=None,
- ):
- self._started = False
- self.__active_aiterator = None
-
- self.client = client
- """Optional[Any]: The client that created this iterator."""
- self.item_to_value = item_to_value
- """Callable[Iterator, Any]: Callable to convert an item from the type
- in the raw API response into the native object. Will be called with
- the iterator and a
- single item.
- """
- self.max_results = max_results
- """int: The maximum number of results to fetch."""
-
- # The attributes below will change over the life of the iterator.
- self.page_number = 0
- """int: The current page of results."""
- self.next_page_token = page_token
- """str: The token for the next page of results. If this is set before
- the iterator starts, it effectively offsets the iterator to a
- specific starting point."""
- self.num_results = 0
- """int: The total number of results fetched so far."""
-
- @property
- def pages(self):
- """Iterator of pages in the response.
-
- returns:
- types.GeneratorType[google.api_core.page_iterator.Page]: A
- generator of page instances.
-
- raises:
- ValueError: If the iterator has already been started.
- """
- if self._started:
- raise ValueError("Iterator has already started", self)
- self._started = True
- return self._page_aiter(increment=True)
-
- async def _items_aiter(self):
- """Iterator for each item returned."""
- async for page in self._page_aiter(increment=False):
- for item in page:
- self.num_results += 1
- yield item
-
- def __aiter__(self):
- """Iterator for each item returned.
-
- Returns:
- types.GeneratorType[Any]: A generator of items from the API.
-
- Raises:
- ValueError: If the iterator has already been started.
- """
- if self._started:
- raise ValueError("Iterator has already started", self)
- self._started = True
- return self._items_aiter()
-
- async def __anext__(self):
- if self.__active_aiterator is None:
- self.__active_aiterator = self.__aiter__()
- return await self.__active_aiterator.__anext__()
-
- async def _page_aiter(self, increment):
- """Generator of pages of API responses.
-
- Args:
- increment (bool): Flag indicating if the total number of results
- should be incremented on each page. This is useful since a page
- iterator will want to increment by results per page while an
- items iterator will want to increment per item.
-
- Yields:
- Page: each page of items from the API.
- """
- page = await self._next_page()
- while page is not None:
- self.page_number += 1
- if increment:
- self.num_results += page.num_items
- yield page
- page = await self._next_page()
-
- @abc.abstractmethod
- async def _next_page(self):
- """Get the next page in the iterator.
-
- This does nothing and is intended to be over-ridden by subclasses
- to return the next :class:`Page`.
-
- Raises:
- NotImplementedError: Always, this method is abstract.
- """
- raise NotImplementedError
-
-
-class AsyncGRPCIterator(AsyncIterator):
- """A generic class for iterating through gRPC list responses.
-
- .. note:: The class does not take a ``page_token`` argument because it can
- just be specified in the ``request``.
-
- Args:
- client (google.cloud.client.Client): The API client. This unused by
- this class, but kept to satisfy the :class:`Iterator` interface.
- method (Callable[protobuf.Message]): A bound gRPC method that should
- take a single message for the request.
- request (protobuf.Message): The request message.
- items_field (str): The field in the response message that has the
- items for the page.
- item_to_value (Callable[GRPCIterator, Any]): Callable to convert an
- item from the type in the JSON response into a native object. Will
- be called with the iterator and a single item.
- request_token_field (str): The field in the request message used to
- specify the page token.
- response_token_field (str): The field in the response message that has
- the token for the next page.
- max_results (int): The maximum number of results to fetch.
-
- .. autoattribute:: pages
- """
-
- _DEFAULT_REQUEST_TOKEN_FIELD = "page_token"
- _DEFAULT_RESPONSE_TOKEN_FIELD = "next_page_token"
-
- def __init__(
- self,
- client,
- method,
- request,
- items_field,
- item_to_value=_item_to_value_identity,
- request_token_field=_DEFAULT_REQUEST_TOKEN_FIELD,
- response_token_field=_DEFAULT_RESPONSE_TOKEN_FIELD,
- max_results=None,
- ):
- super().__init__(client, item_to_value, max_results=max_results)
- self._method = method
- self._request = request
- self._items_field = items_field
- self._request_token_field = request_token_field
- self._response_token_field = response_token_field
-
- async def _next_page(self):
- """Get the next page in the iterator.
-
- Returns:
- Page: The next page in the iterator or :data:`None` if
- there are no pages left.
- """
- if not self._has_next_page():
- return None
-
- if self.next_page_token is not None:
- setattr(self._request, self._request_token_field, self.next_page_token)
-
- response = await self._method(self._request)
-
- self.next_page_token = getattr(response, self._response_token_field)
- items = getattr(response, self._items_field)
- page = Page(self, items, self.item_to_value, raw_page=response)
-
- return page
-
- def _has_next_page(self):
- """Determines whether or not there are more pages with results.
-
- Returns:
- bool: Whether the iterator has more pages.
- """
- if self.page_number == 0:
- return True
-
- # Note: intentionally a falsy check instead of a None check. The RPC
- # can return an empty string indicating no more pages.
- if self.max_results is not None:
- if self.num_results >= self.max_results:
- return False
-
- return True if self.next_page_token else False
diff --git a/google/api_core/path_template.py b/google/api_core/path_template.py
deleted file mode 100644
index b8ebb2a..0000000
--- a/google/api_core/path_template.py
+++ /dev/null
@@ -1,346 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Expand and validate URL path templates.
-
-This module provides the :func:`expand` and :func:`validate` functions for
-interacting with Google-style URL `path templates`_ which are commonly used
-in Google APIs for `resource names`_.
-
-.. _path templates: https://github.com/googleapis/googleapis/blob
- /57e2d376ac7ef48681554204a3ba78a414f2c533/google/api/http.proto#L212
-.. _resource names: https://cloud.google.com/apis/design/resource_names
-"""
-
-from __future__ import unicode_literals
-
-from collections import deque
-import copy
-import functools
-import re
-
-# Regular expression for extracting variable parts from a path template.
-# The variables can be expressed as:
-#
-# - "*": a single-segment positional variable, for example: "books/*"
-# - "**": a multi-segment positional variable, for example: "shelf/**/book/*"
-# - "{name}": a single-segment wildcard named variable, for example
-# "books/{name}"
-# - "{name=*}: same as above.
-# - "{name=**}": a multi-segment wildcard named variable, for example
-# "shelf/{name=**}"
-# - "{name=/path/*/**}": a multi-segment named variable with a sub-template.
-_VARIABLE_RE = re.compile(
- r"""
- ( # Capture the entire variable expression
- (?P<positional>\*\*?) # Match & capture * and ** positional variables.
- |
- # Match & capture named variables {name}
- {
- (?P<name>[^/]+?)
- # Optionally match and capture the named variable's template.
- (?:=(?P<template>.+?))?
- }
- )
- """,
- re.VERBOSE,
-)
-
-# Segment expressions used for validating paths against a template.
-_SINGLE_SEGMENT_PATTERN = r"([^/]+)"
-_MULTI_SEGMENT_PATTERN = r"(.+)"
-
-
-def _expand_variable_match(positional_vars, named_vars, match):
- """Expand a matched variable with its value.
-
- Args:
- positional_vars (list): A list of positional variables. This list will
- be modified.
- named_vars (dict): A dictionary of named variables.
- match (re.Match): A regular expression match.
-
- Returns:
- str: The expanded variable to replace the match.
-
- Raises:
- ValueError: If a positional or named variable is required by the
- template but not specified or if an unexpected template expression
- is encountered.
- """
- positional = match.group("positional")
- name = match.group("name")
- if name is not None:
- try:
- return str(named_vars[name])
- except KeyError:
- raise ValueError(
- "Named variable '{}' not specified and needed by template "
- "`{}` at position {}".format(name, match.string, match.start())
- )
- elif positional is not None:
- try:
- return str(positional_vars.pop(0))
- except IndexError:
- raise ValueError(
- "Positional variable not specified and needed by template "
- "`{}` at position {}".format(match.string, match.start())
- )
- else:
- raise ValueError("Unknown template expression {}".format(match.group(0)))
-
-
-def expand(tmpl, *args, **kwargs):
- """Expand a path template with the given variables.
-
- .. code-block:: python
-
- >>> expand('users/*/messages/*', 'me', '123')
- users/me/messages/123
- >>> expand('/v1/{name=shelves/*/books/*}', name='shelves/1/books/3')
- /v1/shelves/1/books/3
-
- Args:
- tmpl (str): The path template.
- args: The positional variables for the path.
- kwargs: The named variables for the path.
-
- Returns:
- str: The expanded path
-
- Raises:
- ValueError: If a positional or named variable is required by the
- template but not specified or if an unexpected template expression
- is encountered.
- """
- replacer = functools.partial(_expand_variable_match, list(args), kwargs)
- return _VARIABLE_RE.sub(replacer, tmpl)
-
-
-def _replace_variable_with_pattern(match):
- """Replace a variable match with a pattern that can be used to validate it.
-
- Args:
- match (re.Match): A regular expression match
-
- Returns:
- str: A regular expression pattern that can be used to validate the
- variable in an expanded path.
-
- Raises:
- ValueError: If an unexpected template expression is encountered.
- """
- positional = match.group("positional")
- name = match.group("name")
- template = match.group("template")
- if name is not None:
- if not template:
- return _SINGLE_SEGMENT_PATTERN.format(name)
- elif template == "**":
- return _MULTI_SEGMENT_PATTERN.format(name)
- else:
- return _generate_pattern_for_template(template)
- elif positional == "*":
- return _SINGLE_SEGMENT_PATTERN
- elif positional == "**":
- return _MULTI_SEGMENT_PATTERN
- else:
- raise ValueError("Unknown template expression {}".format(match.group(0)))
-
-
-def _generate_pattern_for_template(tmpl):
- """Generate a pattern that can validate a path template.
-
- Args:
- tmpl (str): The path template
-
- Returns:
- str: A regular expression pattern that can be used to validate an
- expanded path template.
- """
- return _VARIABLE_RE.sub(_replace_variable_with_pattern, tmpl)
-
-
-def get_field(request, field):
- """Get the value of a field from a given dictionary.
-
- Args:
- request (dict | Message): A dictionary or a Message object.
- field (str): The key to the request in dot notation.
-
- Returns:
- The value of the field.
- """
- parts = field.split(".")
- value = request
-
- for part in parts:
- if not isinstance(value, dict):
- value = getattr(value, part, None)
- else:
- value = value.get(part)
- if isinstance(value, dict):
- return
- return value
-
-
-def delete_field(request, field):
- """Delete the value of a field from a given dictionary.
-
- Args:
- request (dict | Message): A dictionary object or a Message.
- field (str): The key to the request in dot notation.
- """
- parts = deque(field.split("."))
- while len(parts) > 1:
- part = parts.popleft()
- if not isinstance(request, dict):
- if hasattr(request, part):
- request = getattr(request, part, None)
- else:
- return
- else:
- request = request.get(part)
- part = parts.popleft()
- if not isinstance(request, dict):
- if hasattr(request, part):
- request.ClearField(part)
- else:
- return
- else:
- request.pop(part, None)
-
-
-def validate(tmpl, path):
- """Validate a path against the path template.
-
- .. code-block:: python
-
- >>> validate('users/*/messages/*', 'users/me/messages/123')
- True
- >>> validate('users/*/messages/*', 'users/me/drafts/123')
- False
- >>> validate('/v1/{name=shelves/*/books/*}', /v1/shelves/1/books/3)
- True
- >>> validate('/v1/{name=shelves/*/books/*}', /v1/shelves/1/tapes/3)
- False
-
- Args:
- tmpl (str): The path template.
- path (str): The expanded path.
-
- Returns:
- bool: True if the path matches.
- """
- pattern = _generate_pattern_for_template(tmpl) + "$"
- return True if re.match(pattern, path) is not None else False
-
-
-def transcode(http_options, message=None, **request_kwargs):
- """Transcodes a grpc request pattern into a proper HTTP request following the rules outlined here,
- https://github.com/googleapis/googleapis/blob/master/google/api/http.proto#L44-L312
-
- Args:
- http_options (list(dict)): A list of dicts which consist of these keys,
- 'method' (str): The http method
- 'uri' (str): The path template
- 'body' (str): The body field name (optional)
- (This is a simplified representation of the proto option `google.api.http`)
-
- message (Message) : A request object (optional)
- request_kwargs (dict) : A dict representing the request object
-
- Returns:
- dict: The transcoded request with these keys,
- 'method' (str) : The http method
- 'uri' (str) : The expanded uri
- 'body' (dict | Message) : A dict or a Message representing the body (optional)
- 'query_params' (dict | Message) : A dict or Message mapping query parameter variables and values
-
- Raises:
- ValueError: If the request does not match the given template.
- """
- transcoded_value = message or request_kwargs
- bindings = []
- for http_option in http_options:
- request = {}
-
- # Assign path
- uri_template = http_option["uri"]
- fields = [
- (m.group("name"), m.group("template"))
- for m in _VARIABLE_RE.finditer(uri_template)
- ]
- bindings.append((uri_template, fields))
-
- path_args = {field: get_field(transcoded_value, field) for field, _ in fields}
- request["uri"] = expand(uri_template, **path_args)
-
- if not validate(uri_template, request["uri"]) or not all(path_args.values()):
- continue
-
- # Remove fields used in uri path from request
- leftovers = copy.deepcopy(transcoded_value)
- for path_field, _ in fields:
- delete_field(leftovers, path_field)
-
- # Assign body and query params
- body = http_option.get("body")
-
- if body:
- if body == "*":
- request["body"] = leftovers
- if message:
- request["query_params"] = message.__class__()
- else:
- request["query_params"] = {}
- else:
- try:
- if message:
- request["body"] = getattr(leftovers, body)
- delete_field(leftovers, body)
- else:
- request["body"] = leftovers.pop(body)
- except (KeyError, AttributeError):
- continue
- request["query_params"] = leftovers
- else:
- request["query_params"] = leftovers
- request["method"] = http_option["method"]
- return request
-
- bindings_description = [
- '\n\tURI: "{}"'
- "\n\tRequired request fields:\n\t\t{}".format(
- uri,
- "\n\t\t".join(
- [
- 'field: "{}", pattern: "{}"'.format(n, p if p else "*")
- for n, p in fields
- ]
- ),
- )
- for uri, fields in bindings
- ]
-
- raise ValueError(
- "Invalid request."
- "\nSome of the fields of the request message are either not initialized or "
- "initialized with an invalid value."
- "\nPlease make sure your request matches at least one accepted HTTP binding."
- "\nTo match a binding the request message must have all the required fields "
- "initialized with values matching their patterns as listed below:{}".format(
- "\n".join(bindings_description)
- )
- )
diff --git a/google/api_core/protobuf_helpers.py b/google/api_core/protobuf_helpers.py
deleted file mode 100644
index 30cd7c8..0000000
--- a/google/api_core/protobuf_helpers.py
+++ /dev/null
@@ -1,371 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Helpers for :mod:`protobuf`."""
-
-import collections
-import collections.abc
-import copy
-import inspect
-
-from google.protobuf import field_mask_pb2
-from google.protobuf import message
-from google.protobuf import wrappers_pb2
-
-
-_SENTINEL = object()
-_WRAPPER_TYPES = (
- wrappers_pb2.BoolValue,
- wrappers_pb2.BytesValue,
- wrappers_pb2.DoubleValue,
- wrappers_pb2.FloatValue,
- wrappers_pb2.Int32Value,
- wrappers_pb2.Int64Value,
- wrappers_pb2.StringValue,
- wrappers_pb2.UInt32Value,
- wrappers_pb2.UInt64Value,
-)
-
-
-def from_any_pb(pb_type, any_pb):
- """Converts an ``Any`` protobuf to the specified message type.
-
- Args:
- pb_type (type): the type of the message that any_pb stores an instance
- of.
- any_pb (google.protobuf.any_pb2.Any): the object to be converted.
-
- Returns:
- pb_type: An instance of the pb_type message.
-
- Raises:
- TypeError: if the message could not be converted.
- """
- msg = pb_type()
-
- # Unwrap proto-plus wrapped messages.
- if callable(getattr(pb_type, "pb", None)):
- msg_pb = pb_type.pb(msg)
- else:
- msg_pb = msg
-
- # Unpack the Any object and populate the protobuf message instance.
- if not any_pb.Unpack(msg_pb):
- raise TypeError(
- f"Could not convert `{any_pb.TypeName()}` with underlying type `google.protobuf.any_pb2.Any` to `{msg_pb.DESCRIPTOR.full_name}`"
- )
-
- # Done; return the message.
- return msg
-
-
-def check_oneof(**kwargs):
- """Raise ValueError if more than one keyword argument is not ``None``.
-
- Args:
- kwargs (dict): The keyword arguments sent to the function.
-
- Raises:
- ValueError: If more than one entry in ``kwargs`` is not ``None``.
- """
- # Sanity check: If no keyword arguments were sent, this is fine.
- if not kwargs:
- return
-
- not_nones = [val for val in kwargs.values() if val is not None]
- if len(not_nones) > 1:
- raise ValueError(
- "Only one of {fields} should be set.".format(
- fields=", ".join(sorted(kwargs.keys()))
- )
- )
-
-
-def get_messages(module):
- """Discovers all protobuf Message classes in a given import module.
-
- Args:
- module (module): A Python module; :func:`dir` will be run against this
- module to find Message subclasses.
-
- Returns:
- dict[str, google.protobuf.message.Message]: A dictionary with the
- Message class names as keys, and the Message subclasses themselves
- as values.
- """
- answer = collections.OrderedDict()
- for name in dir(module):
- candidate = getattr(module, name)
- if inspect.isclass(candidate) and issubclass(candidate, message.Message):
- answer[name] = candidate
- return answer
-
-
-def _resolve_subkeys(key, separator="."):
- """Resolve a potentially nested key.
-
- If the key contains the ``separator`` (e.g. ``.``) then the key will be
- split on the first instance of the subkey::
-
- >>> _resolve_subkeys('a.b.c')
- ('a', 'b.c')
- >>> _resolve_subkeys('d|e|f', separator='|')
- ('d', 'e|f')
-
- If not, the subkey will be :data:`None`::
-
- >>> _resolve_subkeys('foo')
- ('foo', None)
-
- Args:
- key (str): A string that may or may not contain the separator.
- separator (str): The namespace separator. Defaults to `.`.
-
- Returns:
- Tuple[str, str]: The key and subkey(s).
- """
- parts = key.split(separator, 1)
-
- if len(parts) > 1:
- return parts
- else:
- return parts[0], None
-
-
-def get(msg_or_dict, key, default=_SENTINEL):
- """Retrieve a key's value from a protobuf Message or dictionary.
-
- Args:
- mdg_or_dict (Union[~google.protobuf.message.Message, Mapping]): the
- object.
- key (str): The key to retrieve from the object.
- default (Any): If the key is not present on the object, and a default
- is set, returns that default instead. A type-appropriate falsy
- default is generally recommended, as protobuf messages almost
- always have default values for unset values and it is not always
- possible to tell the difference between a falsy value and an
- unset one. If no default is set then :class:`KeyError` will be
- raised if the key is not present in the object.
-
- Returns:
- Any: The return value from the underlying Message or dict.
-
- Raises:
- KeyError: If the key is not found. Note that, for unset values,
- messages and dictionaries may not have consistent behavior.
- TypeError: If ``msg_or_dict`` is not a Message or Mapping.
- """
- # We may need to get a nested key. Resolve this.
- key, subkey = _resolve_subkeys(key)
-
- # Attempt to get the value from the two types of objects we know about.
- # If we get something else, complain.
- if isinstance(msg_or_dict, message.Message):
- answer = getattr(msg_or_dict, key, default)
- elif isinstance(msg_or_dict, collections.abc.Mapping):
- answer = msg_or_dict.get(key, default)
- else:
- raise TypeError(
- "get() expected a dict or protobuf message, got {!r}.".format(
- type(msg_or_dict)
- )
- )
-
- # If the object we got back is our sentinel, raise KeyError; this is
- # a "not found" case.
- if answer is _SENTINEL:
- raise KeyError(key)
-
- # If a subkey exists, call this method recursively against the answer.
- if subkey is not None and answer is not default:
- return get(answer, subkey, default=default)
-
- return answer
-
-
-def _set_field_on_message(msg, key, value):
- """Set helper for protobuf Messages."""
- # Attempt to set the value on the types of objects we know how to deal
- # with.
- if isinstance(value, (collections.abc.MutableSequence, tuple)):
- # Clear the existing repeated protobuf message of any elements
- # currently inside it.
- while getattr(msg, key):
- getattr(msg, key).pop()
-
- # Write our new elements to the repeated field.
- for item in value:
- if isinstance(item, collections.abc.Mapping):
- getattr(msg, key).add(**item)
- else:
- # protobuf's RepeatedCompositeContainer doesn't support
- # append.
- getattr(msg, key).extend([item])
- elif isinstance(value, collections.abc.Mapping):
- # Assign the dictionary values to the protobuf message.
- for item_key, item_value in value.items():
- set(getattr(msg, key), item_key, item_value)
- elif isinstance(value, message.Message):
- getattr(msg, key).CopyFrom(value)
- else:
- setattr(msg, key, value)
-
-
-def set(msg_or_dict, key, value):
- """Set a key's value on a protobuf Message or dictionary.
-
- Args:
- msg_or_dict (Union[~google.protobuf.message.Message, Mapping]): the
- object.
- key (str): The key to set.
- value (Any): The value to set.
-
- Raises:
- TypeError: If ``msg_or_dict`` is not a Message or dictionary.
- """
- # Sanity check: Is our target object valid?
- if not isinstance(msg_or_dict, (collections.abc.MutableMapping, message.Message)):
- raise TypeError(
- "set() expected a dict or protobuf message, got {!r}.".format(
- type(msg_or_dict)
- )
- )
-
- # We may be setting a nested key. Resolve this.
- basekey, subkey = _resolve_subkeys(key)
-
- # If a subkey exists, then get that object and call this method
- # recursively against it using the subkey.
- if subkey is not None:
- if isinstance(msg_or_dict, collections.abc.MutableMapping):
- msg_or_dict.setdefault(basekey, {})
- set(get(msg_or_dict, basekey), subkey, value)
- return
-
- if isinstance(msg_or_dict, collections.abc.MutableMapping):
- msg_or_dict[key] = value
- else:
- _set_field_on_message(msg_or_dict, key, value)
-
-
-def setdefault(msg_or_dict, key, value):
- """Set the key on a protobuf Message or dictionary to a given value if the
- current value is falsy.
-
- Because protobuf Messages do not distinguish between unset values and
- falsy ones particularly well (by design), this method treats any falsy
- value (e.g. 0, empty list) as a target to be overwritten, on both Messages
- and dictionaries.
-
- Args:
- msg_or_dict (Union[~google.protobuf.message.Message, Mapping]): the
- object.
- key (str): The key on the object in question.
- value (Any): The value to set.
-
- Raises:
- TypeError: If ``msg_or_dict`` is not a Message or dictionary.
- """
- if not get(msg_or_dict, key, default=None):
- set(msg_or_dict, key, value)
-
-
-def field_mask(original, modified):
- """Create a field mask by comparing two messages.
-
- Args:
- original (~google.protobuf.message.Message): the original message.
- If set to None, this field will be interpreted as an empty
- message.
- modified (~google.protobuf.message.Message): the modified message.
- If set to None, this field will be interpreted as an empty
- message.
-
- Returns:
- google.protobuf.field_mask_pb2.FieldMask: field mask that contains
- the list of field names that have different values between the two
- messages. If the messages are equivalent, then the field mask is empty.
-
- Raises:
- ValueError: If the ``original`` or ``modified`` are not the same type.
- """
- if original is None and modified is None:
- return field_mask_pb2.FieldMask()
-
- if original is None and modified is not None:
- original = copy.deepcopy(modified)
- original.Clear()
-
- if modified is None and original is not None:
- modified = copy.deepcopy(original)
- modified.Clear()
-
- if not isinstance(original, type(modified)):
- raise ValueError(
- "expected that both original and modified should be of the "
- 'same type, received "{!r}" and "{!r}".'.format(
- type(original), type(modified)
- )
- )
-
- return field_mask_pb2.FieldMask(paths=_field_mask_helper(original, modified))
-
-
-def _field_mask_helper(original, modified, current=""):
- answer = []
-
- for name in original.DESCRIPTOR.fields_by_name:
- field_path = _get_path(current, name)
-
- original_val = getattr(original, name)
- modified_val = getattr(modified, name)
-
- if _is_message(original_val) or _is_message(modified_val):
- if original_val != modified_val:
- # Wrapper types do not need to include the .value part of the
- # path.
- if _is_wrapper(original_val) or _is_wrapper(modified_val):
- answer.append(field_path)
- elif not modified_val.ListFields():
- answer.append(field_path)
- else:
- answer.extend(
- _field_mask_helper(original_val, modified_val, field_path)
- )
- else:
- if original_val != modified_val:
- answer.append(field_path)
-
- return answer
-
-
-def _get_path(current, name):
- # gapic-generator-python appends underscores to field names
- # that collide with python keywords.
- # `_` is stripped away as it is not possible to
- # natively define a field with a trailing underscore in protobuf.
- # APIs will reject field masks if fields have trailing underscores.
- # See https://github.com/googleapis/python-api-core/issues/227
- name = name.rstrip("_")
- if not current:
- return name
- return "%s.%s" % (current, name)
-
-
-def _is_message(value):
- return isinstance(value, message.Message)
-
-
-def _is_wrapper(value):
- return type(value) in _WRAPPER_TYPES
diff --git a/google/api_core/py.typed b/google/api_core/py.typed
deleted file mode 100644
index 1d5517b..0000000
--- a/google/api_core/py.typed
+++ /dev/null
@@ -1,2 +0,0 @@
-# Marker file for PEP 561.
-# The google-api-core package uses inline types.
diff --git a/google/api_core/rest_helpers.py b/google/api_core/rest_helpers.py
deleted file mode 100644
index a78822f..0000000
--- a/google/api_core/rest_helpers.py
+++ /dev/null
@@ -1,109 +0,0 @@
-# Copyright 2021 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Helpers for rest transports."""
-
-import functools
-import operator
-
-
-def flatten_query_params(obj, strict=False):
- """Flatten a dict into a list of (name,value) tuples.
-
- The result is suitable for setting query params on an http request.
-
- .. code-block:: python
-
- >>> obj = {'a':
- ... {'b':
- ... {'c': ['x', 'y', 'z']} },
- ... 'd': 'uvw',
- ... 'e': True, }
- >>> flatten_query_params(obj, strict=True)
- [('a.b.c', 'x'), ('a.b.c', 'y'), ('a.b.c', 'z'), ('d', 'uvw'), ('e', 'true')]
-
- Note that, as described in
- https://github.com/googleapis/googleapis/blob/48d9fb8c8e287c472af500221c6450ecd45d7d39/google/api/http.proto#L117,
- repeated fields (i.e. list-valued fields) may only contain primitive types (not lists or dicts).
- This is enforced in this function.
-
- Args:
- obj: a possibly nested dictionary (from json), or None
- strict: a bool, defaulting to False, to enforce that all values in the
- result tuples be strings and, if boolean, lower-cased.
-
- Returns: a list of tuples, with each tuple having a (possibly) multi-part name
- and a scalar value.
-
- Raises:
- TypeError if obj is not a dict or None
- ValueError if obj contains a list of non-primitive values.
- """
-
- if obj is not None and not isinstance(obj, dict):
- raise TypeError("flatten_query_params must be called with dict object")
-
- return _flatten(obj, key_path=[], strict=strict)
-
-
-def _flatten(obj, key_path, strict=False):
- if obj is None:
- return []
- if isinstance(obj, dict):
- return _flatten_dict(obj, key_path=key_path, strict=strict)
- if isinstance(obj, list):
- return _flatten_list(obj, key_path=key_path, strict=strict)
- return _flatten_value(obj, key_path=key_path, strict=strict)
-
-
-def _is_primitive_value(obj):
- if obj is None:
- return False
-
- if isinstance(obj, (list, dict)):
- raise ValueError("query params may not contain repeated dicts or lists")
-
- return True
-
-
-def _flatten_value(obj, key_path, strict=False):
- return [(".".join(key_path), _canonicalize(obj, strict=strict))]
-
-
-def _flatten_dict(obj, key_path, strict=False):
- items = (
- _flatten(value, key_path=key_path + [key], strict=strict)
- for key, value in obj.items()
- )
- return functools.reduce(operator.concat, items, [])
-
-
-def _flatten_list(elems, key_path, strict=False):
- # Only lists of scalar values are supported.
- # The name (key_path) is repeated for each value.
- items = (
- _flatten_value(elem, key_path=key_path, strict=strict)
- for elem in elems
- if _is_primitive_value(elem)
- )
- return functools.reduce(operator.concat, items, [])
-
-
-def _canonicalize(obj, strict=False):
- if strict:
- value = str(obj)
- if isinstance(obj, bool):
- value = value.lower()
- return value
- return obj
diff --git a/google/api_core/rest_streaming.py b/google/api_core/rest_streaming.py
deleted file mode 100644
index 84aa270..0000000
--- a/google/api_core/rest_streaming.py
+++ /dev/null
@@ -1,66 +0,0 @@
-# Copyright 2021 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Helpers for server-side streaming in REST."""
-
-from typing import Union
-
-import proto
-import requests
-import google.protobuf.message
-from google.api_core._rest_streaming_base import BaseResponseIterator
-
-
-class ResponseIterator(BaseResponseIterator):
- """Iterator over REST API responses.
-
- Args:
- response (requests.Response): An API response object.
- response_message_cls (Union[proto.Message, google.protobuf.message.Message]): A response
- class expected to be returned from an API.
-
- Raises:
- ValueError:
- - If `response_message_cls` is not a subclass of `proto.Message` or `google.protobuf.message.Message`.
- """
-
- def __init__(
- self,
- response: requests.Response,
- response_message_cls: Union[proto.Message, google.protobuf.message.Message],
- ):
- self._response = response
- # Inner iterator over HTTP response's content.
- self._response_itr = self._response.iter_content(decode_unicode=True)
- super(ResponseIterator, self).__init__(
- response_message_cls=response_message_cls
- )
-
- def cancel(self):
- """Cancel existing streaming operation."""
- self._response.close()
-
- def __next__(self):
- while not self._ready_objs:
- try:
- chunk = next(self._response_itr)
- self._process_chunk(chunk)
- except StopIteration as e:
- if self._level > 0:
- raise ValueError("Unfinished stream: %s" % self._obj)
- raise e
- return self._grab()
-
- def __iter__(self):
- return self
diff --git a/google/api_core/rest_streaming_async.py b/google/api_core/rest_streaming_async.py
deleted file mode 100644
index 370c2b5..0000000
--- a/google/api_core/rest_streaming_async.py
+++ /dev/null
@@ -1,89 +0,0 @@
-# Copyright 2024 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Helpers for asynchronous server-side streaming in REST."""
-
-from typing import Union
-
-import proto
-
-try:
- import google.auth.aio.transport
-except ImportError as e: # pragma: NO COVER
- raise ImportError(
- "`google-api-core[async_rest]` is required to use asynchronous rest streaming. "
- "Install the `async_rest` extra of `google-api-core` using "
- "`pip install google-api-core[async_rest]`."
- ) from e
-
-import google.protobuf.message
-from google.api_core._rest_streaming_base import BaseResponseIterator
-
-
-class AsyncResponseIterator(BaseResponseIterator):
- """Asynchronous Iterator over REST API responses.
-
- Args:
- response (google.auth.aio.transport.Response): An API response object.
- response_message_cls (Union[proto.Message, google.protobuf.message.Message]): A response
- class expected to be returned from an API.
-
- Raises:
- ValueError:
- - If `response_message_cls` is not a subclass of `proto.Message` or `google.protobuf.message.Message`.
- """
-
- def __init__(
- self,
- response: google.auth.aio.transport.Response,
- response_message_cls: Union[proto.Message, google.protobuf.message.Message],
- ):
- self._response = response
- self._chunk_size = 1024
- # TODO(https://github.com/googleapis/python-api-core/issues/703): mypy does not recognize the abstract content
- # method as an async generator as it looks for the `yield` keyword in the implementation.
- # Given that the abstract method is not implemented, mypy fails to recognize it as an async generator.
- # mypy warnings are silenced until the linked issue is resolved.
- self._response_itr = self._response.content(self._chunk_size).__aiter__() # type: ignore
- super(AsyncResponseIterator, self).__init__(
- response_message_cls=response_message_cls
- )
-
- async def __aenter__(self):
- return self
-
- async def cancel(self):
- """Cancel existing streaming operation."""
- await self._response.close()
-
- async def __anext__(self):
- while not self._ready_objs:
- try:
- chunk = await self._response_itr.__anext__()
- chunk = chunk.decode("utf-8")
- self._process_chunk(chunk)
- except StopAsyncIteration as e:
- if self._level > 0:
- raise ValueError("i Unfinished stream: %s" % self._obj)
- raise e
- except ValueError as e:
- raise e
- return self._grab()
-
- def __aiter__(self):
- return self
-
- async def __aexit__(self, exc_type, exc, tb):
- """Cancel existing async streaming operation."""
- await self._response.close()
diff --git a/google/api_core/retry/__init__.py b/google/api_core/retry/__init__.py
deleted file mode 100644
index 1724fdb..0000000
--- a/google/api_core/retry/__init__.py
+++ /dev/null
@@ -1,52 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Retry implementation for Google API client libraries."""
-
-from .retry_base import exponential_sleep_generator
-from .retry_base import if_exception_type
-from .retry_base import if_transient_error
-from .retry_base import build_retry_error
-from .retry_base import RetryFailureReason
-from .retry_unary import Retry
-from .retry_unary import retry_target
-from .retry_unary_async import AsyncRetry
-from .retry_unary_async import retry_target as retry_target_async
-from .retry_streaming import StreamingRetry
-from .retry_streaming import retry_target_stream
-from .retry_streaming_async import AsyncStreamingRetry
-from .retry_streaming_async import retry_target_stream as retry_target_stream_async
-
-# The following imports are for backwards compatibility with https://github.com/googleapis/python-api-core/blob/4d7d2edee2c108d43deb151e6e0fdceb56b73275/google/api_core/retry.py
-#
-# TODO: Revert these imports on the next major version release (https://github.com/googleapis/python-api-core/issues/576)
-from google.api_core import datetime_helpers # noqa: F401
-from google.api_core import exceptions # noqa: F401
-from google.auth import exceptions as auth_exceptions # noqa: F401
-
-__all__ = (
- "exponential_sleep_generator",
- "if_exception_type",
- "if_transient_error",
- "build_retry_error",
- "RetryFailureReason",
- "Retry",
- "AsyncRetry",
- "StreamingRetry",
- "AsyncStreamingRetry",
- "retry_target",
- "retry_target_async",
- "retry_target_stream",
- "retry_target_stream_async",
-)
diff --git a/google/api_core/retry/retry_base.py b/google/api_core/retry/retry_base.py
deleted file mode 100644
index 3dd975e..0000000
--- a/google/api_core/retry/retry_base.py
+++ /dev/null
@@ -1,372 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Shared classes and functions for retrying requests.
-
-:class:`_BaseRetry` is the base class for :class:`Retry`,
-:class:`AsyncRetry`, :class:`StreamingRetry`, and :class:`AsyncStreamingRetry`.
-"""
-
-from __future__ import annotations
-
-import logging
-import random
-import time
-
-from enum import Enum
-from typing import Any, Callable, Optional, Iterator, TYPE_CHECKING
-
-import requests.exceptions
-
-from google.api_core import exceptions
-from google.auth import exceptions as auth_exceptions
-
-if TYPE_CHECKING:
- import sys
-
- if sys.version_info >= (3, 11):
- from typing import Self
- else:
- from typing_extensions import Self
-
-_DEFAULT_INITIAL_DELAY = 1.0 # seconds
-_DEFAULT_MAXIMUM_DELAY = 60.0 # seconds
-_DEFAULT_DELAY_MULTIPLIER = 2.0
-_DEFAULT_DEADLINE = 60.0 * 2.0 # seconds
-
-_LOGGER = logging.getLogger("google.api_core.retry")
-
-
-def if_exception_type(
- *exception_types: type[Exception],
-) -> Callable[[Exception], bool]:
- """Creates a predicate to check if the exception is of a given type.
-
- Args:
- exception_types (Sequence[:func:`type`]): The exception types to check
- for.
-
- Returns:
- Callable[Exception]: A predicate that returns True if the provided
- exception is of the given type(s).
- """
-
- def if_exception_type_predicate(exception: Exception) -> bool:
- """Bound predicate for checking an exception type."""
- return isinstance(exception, exception_types)
-
- return if_exception_type_predicate
-
-
-# pylint: disable=invalid-name
-# Pylint sees this as a constant, but it is also an alias that should be
-# considered a function.
-if_transient_error = if_exception_type(
- exceptions.InternalServerError,
- exceptions.TooManyRequests,
- exceptions.ServiceUnavailable,
- requests.exceptions.ConnectionError,
- requests.exceptions.ChunkedEncodingError,
- auth_exceptions.TransportError,
-)
-"""A predicate that checks if an exception is a transient API error.
-
-The following server errors are considered transient:
-
-- :class:`google.api_core.exceptions.InternalServerError` - HTTP 500, gRPC
- ``INTERNAL(13)`` and its subclasses.
-- :class:`google.api_core.exceptions.TooManyRequests` - HTTP 429
-- :class:`google.api_core.exceptions.ServiceUnavailable` - HTTP 503
-- :class:`requests.exceptions.ConnectionError`
-- :class:`requests.exceptions.ChunkedEncodingError` - The server declared
- chunked encoding but sent an invalid chunk.
-- :class:`google.auth.exceptions.TransportError` - Used to indicate an
- error occurred during an HTTP request.
-"""
-# pylint: enable=invalid-name
-
-
-def exponential_sleep_generator(
- initial: float, maximum: float, multiplier: float = _DEFAULT_DELAY_MULTIPLIER
-):
- """Generates sleep intervals based on the exponential back-off algorithm.
-
- This implements the `Truncated Exponential Back-off`_ algorithm.
-
- .. _Truncated Exponential Back-off:
- https://cloud.google.com/storage/docs/exponential-backoff
-
- Args:
- initial (float): The minimum amount of time to delay. This must
- be greater than 0.
- maximum (float): The maximum amount of time to delay.
- multiplier (float): The multiplier applied to the delay.
-
- Yields:
- float: successive sleep intervals.
- """
- max_delay = min(initial, maximum)
- while True:
- yield random.uniform(0.0, max_delay)
- max_delay = min(max_delay * multiplier, maximum)
-
-
-class RetryFailureReason(Enum):
- """
- The cause of a failed retry, used when building exceptions
- """
-
- TIMEOUT = 0
- NON_RETRYABLE_ERROR = 1
-
-
-def build_retry_error(
- exc_list: list[Exception],
- reason: RetryFailureReason,
- timeout_val: float | None,
- **kwargs: Any,
-) -> tuple[Exception, Exception | None]:
- """
- Default exception_factory implementation.
-
- Returns a RetryError if the failure is due to a timeout, otherwise
- returns the last exception encountered.
-
- Args:
- - exc_list: list of exceptions that occurred during the retry
- - reason: reason for the retry failure.
- Can be TIMEOUT or NON_RETRYABLE_ERROR
- - timeout_val: the original timeout value for the retry (in seconds), for use in the exception message
-
- Returns:
- - tuple: a tuple of the exception to be raised, and the cause exception if any
- """
- if reason == RetryFailureReason.TIMEOUT:
- # return RetryError with the most recent exception as the cause
- src_exc = exc_list[-1] if exc_list else None
- timeout_val_str = f"of {timeout_val:0.1f}s " if timeout_val is not None else ""
- return (
- exceptions.RetryError(
- f"Timeout {timeout_val_str}exceeded",
- src_exc,
- ),
- src_exc,
- )
- elif exc_list:
- # return most recent exception encountered and its cause
- final_exc = exc_list[-1]
- cause = getattr(final_exc, "__cause__", None)
- return final_exc, cause
- else:
- # no exceptions were given in exc_list. Raise generic RetryError
- return exceptions.RetryError("Unknown error", None), None
-
-
-def _retry_error_helper(
- exc: Exception,
- deadline: float | None,
- sleep_iterator: Iterator[float],
- error_list: list[Exception],
- predicate_fn: Callable[[Exception], bool],
- on_error_fn: Callable[[Exception], None] | None,
- exc_factory_fn: Callable[
- [list[Exception], RetryFailureReason, float | None],
- tuple[Exception, Exception | None],
- ],
- original_timeout: float | None,
-) -> float:
- """
- Shared logic for handling an error for all retry implementations
-
- - Raises an error on timeout or non-retryable error
- - Calls on_error_fn if provided
- - Logs the error
-
- Args:
- - exc: the exception that was raised
- - deadline: the deadline for the retry, calculated as a diff from time.monotonic()
- - sleep_iterator: iterator to draw the next backoff value from
- - error_list: the list of exceptions that have been raised so far
- - predicate_fn: takes `exc` and returns true if the operation should be retried
- - on_error_fn: callback to execute when a retryable error occurs
- - exc_factory_fn: callback used to build the exception to be raised on terminal failure
- - original_timeout_val: the original timeout value for the retry (in seconds),
- to be passed to the exception factory for building an error message
- Returns:
- - the sleep value chosen before the next attempt
- """
- error_list.append(exc)
- if not predicate_fn(exc):
- final_exc, source_exc = exc_factory_fn(
- error_list,
- RetryFailureReason.NON_RETRYABLE_ERROR,
- original_timeout,
- )
- raise final_exc from source_exc
- if on_error_fn is not None:
- on_error_fn(exc)
- # next_sleep is fetched after the on_error callback, to allow clients
- # to update sleep_iterator values dynamically in response to errors
- try:
- next_sleep = next(sleep_iterator)
- except StopIteration:
- raise ValueError("Sleep generator stopped yielding sleep values.") from exc
- if deadline is not None and time.monotonic() + next_sleep > deadline:
- final_exc, source_exc = exc_factory_fn(
- error_list,
- RetryFailureReason.TIMEOUT,
- original_timeout,
- )
- raise final_exc from source_exc
- _LOGGER.debug(
- "Retrying due to {}, sleeping {:.1f}s ...".format(error_list[-1], next_sleep)
- )
- return next_sleep
-
-
-class _BaseRetry(object):
- """
- Base class for retry configuration objects. This class is intended to capture retry
- and backoff configuration that is common to both synchronous and asynchronous retries,
- for both unary and streaming RPCs. It is not intended to be instantiated directly,
- but rather to be subclassed by the various retry configuration classes.
- """
-
- def __init__(
- self,
- predicate: Callable[[Exception], bool] = if_transient_error,
- initial: float = _DEFAULT_INITIAL_DELAY,
- maximum: float = _DEFAULT_MAXIMUM_DELAY,
- multiplier: float = _DEFAULT_DELAY_MULTIPLIER,
- timeout: Optional[float] = _DEFAULT_DEADLINE,
- on_error: Optional[Callable[[Exception], Any]] = None,
- **kwargs: Any,
- ) -> None:
- self._predicate = predicate
- self._initial = initial
- self._multiplier = multiplier
- self._maximum = maximum
- self._timeout = kwargs.get("deadline", timeout)
- self._deadline = self._timeout
- self._on_error = on_error
-
- def __call__(self, *args, **kwargs) -> Any:
- raise NotImplementedError("Not implemented in base class")
-
- @property
- def deadline(self) -> float | None:
- """
- DEPRECATED: use ``timeout`` instead. Refer to the ``Retry`` class
- documentation for details.
- """
- return self._timeout
-
- @property
- def timeout(self) -> float | None:
- return self._timeout
-
- def with_deadline(self, deadline: float | None) -> Self:
- """Return a copy of this retry with the given timeout.
-
- DEPRECATED: use :meth:`with_timeout` instead. Refer to the ``Retry`` class
- documentation for details.
-
- Args:
- deadline (float|None): How long to keep retrying, in seconds. If None,
- no timeout is enforced.
-
- Returns:
- Retry: A new retry instance with the given timeout.
- """
- return self.with_timeout(deadline)
-
- def with_timeout(self, timeout: float | None) -> Self:
- """Return a copy of this retry with the given timeout.
-
- Args:
- timeout (float): How long to keep retrying, in seconds. If None,
- no timeout will be enforced.
-
- Returns:
- Retry: A new retry instance with the given timeout.
- """
- return type(self)(
- predicate=self._predicate,
- initial=self._initial,
- maximum=self._maximum,
- multiplier=self._multiplier,
- timeout=timeout,
- on_error=self._on_error,
- )
-
- def with_predicate(self, predicate: Callable[[Exception], bool]) -> Self:
- """Return a copy of this retry with the given predicate.
-
- Args:
- predicate (Callable[Exception]): A callable that should return
- ``True`` if the given exception is retryable.
-
- Returns:
- Retry: A new retry instance with the given predicate.
- """
- return type(self)(
- predicate=predicate,
- initial=self._initial,
- maximum=self._maximum,
- multiplier=self._multiplier,
- timeout=self._timeout,
- on_error=self._on_error,
- )
-
- def with_delay(
- self,
- initial: Optional[float] = None,
- maximum: Optional[float] = None,
- multiplier: Optional[float] = None,
- ) -> Self:
- """Return a copy of this retry with the given delay options.
-
- Args:
- initial (float): The minimum amount of time to delay (in seconds). This must
- be greater than 0. If None, the current value is used.
- maximum (float): The maximum amount of time to delay (in seconds). If None, the
- current value is used.
- multiplier (float): The multiplier applied to the delay. If None, the current
- value is used.
-
- Returns:
- Retry: A new retry instance with the given delay options.
- """
- return type(self)(
- predicate=self._predicate,
- initial=initial if initial is not None else self._initial,
- maximum=maximum if maximum is not None else self._maximum,
- multiplier=multiplier if multiplier is not None else self._multiplier,
- timeout=self._timeout,
- on_error=self._on_error,
- )
-
- def __str__(self) -> str:
- return (
- "<{} predicate={}, initial={:.1f}, maximum={:.1f}, "
- "multiplier={:.1f}, timeout={}, on_error={}>".format(
- type(self).__name__,
- self._predicate,
- self._initial,
- self._maximum,
- self._multiplier,
- self._timeout, # timeout can be None, thus no {:.1f}
- self._on_error,
- )
- )
diff --git a/google/api_core/retry/retry_streaming.py b/google/api_core/retry/retry_streaming.py
deleted file mode 100644
index e4474c8..0000000
--- a/google/api_core/retry/retry_streaming.py
+++ /dev/null
@@ -1,264 +0,0 @@
-# Copyright 2023 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Generator wrapper for retryable streaming RPCs.
-"""
-from __future__ import annotations
-
-from typing import (
- Callable,
- Optional,
- List,
- Tuple,
- Iterable,
- Generator,
- TypeVar,
- Any,
- TYPE_CHECKING,
-)
-
-import sys
-import time
-import functools
-
-from google.api_core.retry.retry_base import _BaseRetry
-from google.api_core.retry.retry_base import _retry_error_helper
-from google.api_core.retry import exponential_sleep_generator
-from google.api_core.retry import build_retry_error
-from google.api_core.retry import RetryFailureReason
-
-if TYPE_CHECKING:
- if sys.version_info >= (3, 10):
- from typing import ParamSpec
- else:
- from typing_extensions import ParamSpec
-
- _P = ParamSpec("_P") # target function call parameters
- _Y = TypeVar("_Y") # yielded values
-
-
-def retry_target_stream(
- target: Callable[_P, Iterable[_Y]],
- predicate: Callable[[Exception], bool],
- sleep_generator: Iterable[float],
- timeout: Optional[float] = None,
- on_error: Optional[Callable[[Exception], None]] = None,
- exception_factory: Callable[
- [List[Exception], RetryFailureReason, Optional[float]],
- Tuple[Exception, Optional[Exception]],
- ] = build_retry_error,
- init_args: tuple = (),
- init_kwargs: dict = {},
- **kwargs,
-) -> Generator[_Y, Any, None]:
- """Create a generator wrapper that retries the wrapped stream if it fails.
-
- This is the lowest-level retry helper. Generally, you'll use the
- higher-level retry helper :class:`Retry`.
-
- Args:
- target: The generator function to call and retry.
- predicate: A callable used to determine if an
- exception raised by the target should be considered retryable.
- It should return True to retry or False otherwise.
- sleep_generator: An infinite iterator that determines
- how long to sleep between retries.
- timeout: How long to keep retrying the target.
- Note: timeout is only checked before initiating a retry, so the target may
- run past the timeout value as long as it is healthy.
- on_error: If given, the on_error callback will be called with each
- retryable exception raised by the target. Any error raised by this
- function will *not* be caught.
- exception_factory: A function that is called when the retryable reaches
- a terminal failure state, used to construct an exception to be raised.
- It takes a list of all exceptions encountered, a retry.RetryFailureReason
- enum indicating the failure cause, and the original timeout value
- as arguments. It should return a tuple of the exception to be raised,
- along with the cause exception if any. The default implementation will raise
- a RetryError on timeout, or the last exception encountered otherwise.
- init_args: Positional arguments to pass to the target function.
- init_kwargs: Keyword arguments to pass to the target function.
-
- Returns:
- Generator: A retryable generator that wraps the target generator function.
-
- Raises:
- ValueError: If the sleep generator stops yielding values.
- Exception: a custom exception specified by the exception_factory if provided.
- If no exception_factory is provided:
- google.api_core.RetryError: If the timeout is exceeded while retrying.
- Exception: If the target raises an error that isn't retryable.
- """
-
- timeout = kwargs.get("deadline", timeout)
- deadline: Optional[float] = (
- time.monotonic() + timeout if timeout is not None else None
- )
- error_list: list[Exception] = []
- sleep_iter = iter(sleep_generator)
-
- # continue trying until an attempt completes, or a terminal exception is raised in _retry_error_helper
- # TODO: support max_attempts argument: https://github.com/googleapis/python-api-core/issues/535
- while True:
- # Start a new retry loop
- try:
- # Note: in the future, we can add a ResumptionStrategy object
- # to generate new args between calls. For now, use the same args
- # for each attempt.
- subgenerator = target(*init_args, **init_kwargs)
- return (yield from subgenerator)
- # handle exceptions raised by the subgenerator
- # pylint: disable=broad-except
- # This function explicitly must deal with broad exceptions.
- except Exception as exc:
- # defer to shared logic for handling errors
- next_sleep = _retry_error_helper(
- exc,
- deadline,
- sleep_iter,
- error_list,
- predicate,
- on_error,
- exception_factory,
- timeout,
- )
- # if exception not raised, sleep before next attempt
- time.sleep(next_sleep)
-
-
-class StreamingRetry(_BaseRetry):
- """Exponential retry decorator for streaming synchronous RPCs.
-
- This class returns a Generator when called, which wraps the target
- stream in retry logic. If any exception is raised by the target, the
- entire stream will be retried within the wrapper.
-
- Although the default behavior is to retry transient API errors, a
- different predicate can be provided to retry other exceptions.
-
- Important Note: when a stream encounters a retryable error, it will
- silently construct a fresh iterator instance in the background
- and continue yielding (likely duplicate) values as if no error occurred.
- This is the most general way to retry a stream, but it often is not the
- desired behavior. Example: iter([1, 2, 1/0]) -> [1, 2, 1, 2, ...]
-
- There are two ways to build more advanced retry logic for streams:
-
- 1. Wrap the target
- Use a ``target`` that maintains state between retries, and creates a
- different generator on each retry call. For example, you can wrap a
- network call in a function that modifies the request based on what has
- already been returned:
-
- .. code-block:: python
-
- def attempt_with_modified_request(target, request, seen_items=[]):
- # remove seen items from request on each attempt
- new_request = modify_request(request, seen_items)
- new_generator = target(new_request)
- for item in new_generator:
- yield item
- seen_items.append(item)
-
- retry_wrapped_fn = StreamingRetry()(attempt_with_modified_request)
- retryable_generator = retry_wrapped_fn(target, request)
-
- 2. Wrap the retry generator
- Alternatively, you can wrap the retryable generator itself before
- passing it to the end-user to add a filter on the stream. For
- example, you can keep track of the items that were successfully yielded
- in previous retry attempts, and only yield new items when the
- new attempt surpasses the previous ones:
-
- .. code-block:: python
-
- def retryable_with_filter(target):
- stream_idx = 0
- # reset stream_idx when the stream is retried
- def on_error(e):
- nonlocal stream_idx
- stream_idx = 0
- # build retryable
- retryable_gen = StreamingRetry(...)(target)
- # keep track of what has been yielded out of filter
- seen_items = []
- for item in retryable_gen():
- if stream_idx >= len(seen_items):
- seen_items.append(item)
- yield item
- elif item != seen_items[stream_idx]:
- raise ValueError("Stream differs from last attempt")
- stream_idx += 1
-
- filter_retry_wrapped = retryable_with_filter(target)
-
- Args:
- predicate (Callable[Exception]): A callable that should return ``True``
- if the given exception is retryable.
- initial (float): The minimum amount of time to delay in seconds. This
- must be greater than 0.
- maximum (float): The maximum amount of time to delay in seconds.
- multiplier (float): The multiplier applied to the delay.
- timeout (float): How long to keep retrying, in seconds.
- Note: timeout is only checked before initiating a retry, so the target may
- run past the timeout value as long as it is healthy.
- on_error (Callable[Exception]): A function to call while processing
- a retryable exception. Any error raised by this function will
- *not* be caught.
- deadline (float): DEPRECATED: use `timeout` instead. For backward
- compatibility, if specified it will override the ``timeout`` parameter.
- """
-
- def __call__(
- self,
- func: Callable[_P, Iterable[_Y]],
- on_error: Callable[[Exception], Any] | None = None,
- ) -> Callable[_P, Generator[_Y, Any, None]]:
- """Wrap a callable with retry behavior.
-
- Args:
- func (Callable): The callable to add retry behavior to.
- on_error (Optional[Callable[Exception]]): If given, the
- on_error callback will be called with each retryable exception
- raised by the wrapped function. Any error raised by this
- function will *not* be caught. If on_error was specified in the
- constructor, this value will be ignored.
-
- Returns:
- Callable: A callable that will invoke ``func`` with retry
- behavior.
- """
- if self._on_error is not None:
- on_error = self._on_error
-
- @functools.wraps(func)
- def retry_wrapped_func(
- *args: _P.args, **kwargs: _P.kwargs
- ) -> Generator[_Y, Any, None]:
- """A wrapper that calls target function with retry."""
- sleep_generator = exponential_sleep_generator(
- self._initial, self._maximum, multiplier=self._multiplier
- )
- return retry_target_stream(
- func,
- predicate=self._predicate,
- sleep_generator=sleep_generator,
- timeout=self._timeout,
- on_error=on_error,
- init_args=args,
- init_kwargs=kwargs,
- )
-
- return retry_wrapped_func
diff --git a/google/api_core/retry/retry_streaming_async.py b/google/api_core/retry/retry_streaming_async.py
deleted file mode 100644
index 5e5fa24..0000000
--- a/google/api_core/retry/retry_streaming_async.py
+++ /dev/null
@@ -1,328 +0,0 @@
-# Copyright 2023 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Generator wrapper for retryable async streaming RPCs.
-"""
-from __future__ import annotations
-
-from typing import (
- cast,
- Any,
- Callable,
- Iterable,
- AsyncIterator,
- AsyncIterable,
- Awaitable,
- TypeVar,
- AsyncGenerator,
- TYPE_CHECKING,
-)
-
-import asyncio
-import time
-import sys
-import functools
-
-from google.api_core.retry.retry_base import _BaseRetry
-from google.api_core.retry.retry_base import _retry_error_helper
-from google.api_core.retry import exponential_sleep_generator
-from google.api_core.retry import build_retry_error
-from google.api_core.retry import RetryFailureReason
-
-
-if TYPE_CHECKING:
- if sys.version_info >= (3, 10):
- from typing import ParamSpec
- else:
- from typing_extensions import ParamSpec
-
- _P = ParamSpec("_P") # target function call parameters
- _Y = TypeVar("_Y") # yielded values
-
-
-async def retry_target_stream(
- target: Callable[_P, AsyncIterable[_Y] | Awaitable[AsyncIterable[_Y]]],
- predicate: Callable[[Exception], bool],
- sleep_generator: Iterable[float],
- timeout: float | None = None,
- on_error: Callable[[Exception], None] | None = None,
- exception_factory: Callable[
- [list[Exception], RetryFailureReason, float | None],
- tuple[Exception, Exception | None],
- ] = build_retry_error,
- init_args: tuple = (),
- init_kwargs: dict = {},
- **kwargs,
-) -> AsyncGenerator[_Y, None]:
- """Create a generator wrapper that retries the wrapped stream if it fails.
-
- This is the lowest-level retry helper. Generally, you'll use the
- higher-level retry helper :class:`AsyncRetry`.
-
- Args:
- target: The generator function to call and retry.
- predicate: A callable used to determine if an
- exception raised by the target should be considered retryable.
- It should return True to retry or False otherwise.
- sleep_generator: An infinite iterator that determines
- how long to sleep between retries.
- timeout: How long to keep retrying the target.
- Note: timeout is only checked before initiating a retry, so the target may
- run past the timeout value as long as it is healthy.
- on_error: If given, the on_error callback will be called with each
- retryable exception raised by the target. Any error raised by this
- function will *not* be caught.
- exception_factory: A function that is called when the retryable reaches
- a terminal failure state, used to construct an exception to be raised.
- It takes a list of all exceptions encountered, a retry.RetryFailureReason
- enum indicating the failure cause, and the original timeout value
- as arguments. It should return a tuple of the exception to be raised,
- along with the cause exception if any. The default implementation will raise
- a RetryError on timeout, or the last exception encountered otherwise.
- init_args: Positional arguments to pass to the target function.
- init_kwargs: Keyword arguments to pass to the target function.
-
- Returns:
- AsyncGenerator: A retryable generator that wraps the target generator function.
-
- Raises:
- ValueError: If the sleep generator stops yielding values.
- Exception: a custom exception specified by the exception_factory if provided.
- If no exception_factory is provided:
- google.api_core.RetryError: If the timeout is exceeded while retrying.
- Exception: If the target raises an error that isn't retryable.
- """
- target_iterator: AsyncIterator[_Y] | None = None
- timeout = kwargs.get("deadline", timeout)
- deadline = time.monotonic() + timeout if timeout else None
- # keep track of retryable exceptions we encounter to pass in to exception_factory
- error_list: list[Exception] = []
- sleep_iter = iter(sleep_generator)
- target_is_generator: bool | None = None
-
- # continue trying until an attempt completes, or a terminal exception is raised in _retry_error_helper
- # TODO: support max_attempts argument: https://github.com/googleapis/python-api-core/issues/535
- while True:
- # Start a new retry loop
- try:
- # Note: in the future, we can add a ResumptionStrategy object
- # to generate new args between calls. For now, use the same args
- # for each attempt.
- target_output: AsyncIterable[_Y] | Awaitable[AsyncIterable[_Y]] = target(
- *init_args, **init_kwargs
- )
- try:
- # gapic functions return the generator behind an awaitable
- # unwrap the awaitable so we can work with the generator directly
- target_output = await target_output # type: ignore
- except TypeError:
- # was not awaitable, continue
- pass
- target_iterator = cast(AsyncIterable["_Y"], target_output).__aiter__()
-
- if target_is_generator is None:
- # Check if target supports generator features (asend, athrow, aclose)
- target_is_generator = bool(getattr(target_iterator, "asend", None))
-
- sent_in = None
- while True:
- ## Read from target_iterator
- # If the target is a generator, we will advance it with `asend`
- # otherwise, we will use `anext`
- if target_is_generator:
- next_value = await target_iterator.asend(sent_in) # type: ignore
- else:
- next_value = await target_iterator.__anext__()
- ## Yield from Wrapper to caller
- try:
- # yield latest value from target
- # exceptions from `athrow` and `aclose` are injected here
- sent_in = yield next_value
- except GeneratorExit:
- # if wrapper received `aclose` while waiting on yield,
- # it will raise GeneratorExit here
- if target_is_generator:
- # pass to inner target_iterator for handling
- await cast(AsyncGenerator["_Y", None], target_iterator).aclose()
- else:
- raise
- return
- except: # noqa: E722
- # bare except catches any exception passed to `athrow`
- if target_is_generator:
- # delegate error handling to target_iterator
- await cast(AsyncGenerator["_Y", None], target_iterator).athrow(
- cast(BaseException, sys.exc_info()[1])
- )
- else:
- raise
- return
- except StopAsyncIteration:
- # if iterator exhausted, return
- return
- # handle exceptions raised by the target_iterator
- # pylint: disable=broad-except
- # This function explicitly must deal with broad exceptions.
- except Exception as exc:
- # defer to shared logic for handling errors
- next_sleep = _retry_error_helper(
- exc,
- deadline,
- sleep_iter,
- error_list,
- predicate,
- on_error,
- exception_factory,
- timeout,
- )
- # if exception not raised, sleep before next attempt
- await asyncio.sleep(next_sleep)
-
- finally:
- if target_is_generator and target_iterator is not None:
- await cast(AsyncGenerator["_Y", None], target_iterator).aclose()
-
-
-class AsyncStreamingRetry(_BaseRetry):
- """Exponential retry decorator for async streaming rpcs.
-
- This class returns an AsyncGenerator when called, which wraps the target
- stream in retry logic. If any exception is raised by the target, the
- entire stream will be retried within the wrapper.
-
- Although the default behavior is to retry transient API errors, a
- different predicate can be provided to retry other exceptions.
-
- Important Note: when a stream is encounters a retryable error, it will
- silently construct a fresh iterator instance in the background
- and continue yielding (likely duplicate) values as if no error occurred.
- This is the most general way to retry a stream, but it often is not the
- desired behavior. Example: iter([1, 2, 1/0]) -> [1, 2, 1, 2, ...]
-
- There are two ways to build more advanced retry logic for streams:
-
- 1. Wrap the target
- Use a ``target`` that maintains state between retries, and creates a
- different generator on each retry call. For example, you can wrap a
- grpc call in a function that modifies the request based on what has
- already been returned:
-
- .. code-block:: python
-
- async def attempt_with_modified_request(target, request, seen_items=[]):
- # remove seen items from request on each attempt
- new_request = modify_request(request, seen_items)
- new_generator = await target(new_request)
- async for item in new_generator:
- yield item
- seen_items.append(item)
-
- retry_wrapped = AsyncRetry(is_stream=True,...)(attempt_with_modified_request, target, request, [])
-
- 2. Wrap the retry generator
- Alternatively, you can wrap the retryable generator itself before
- passing it to the end-user to add a filter on the stream. For
- example, you can keep track of the items that were successfully yielded
- in previous retry attempts, and only yield new items when the
- new attempt surpasses the previous ones:
-
- .. code-block:: python
-
- async def retryable_with_filter(target):
- stream_idx = 0
- # reset stream_idx when the stream is retried
- def on_error(e):
- nonlocal stream_idx
- stream_idx = 0
- # build retryable
- retryable_gen = AsyncRetry(is_stream=True, ...)(target)
- # keep track of what has been yielded out of filter
- seen_items = []
- async for item in retryable_gen:
- if stream_idx >= len(seen_items):
- yield item
- seen_items.append(item)
- elif item != previous_stream[stream_idx]:
- raise ValueError("Stream differs from last attempt")"
- stream_idx += 1
-
- filter_retry_wrapped = retryable_with_filter(target)
-
- Args:
- predicate (Callable[Exception]): A callable that should return ``True``
- if the given exception is retryable.
- initial (float): The minimum amount of time to delay in seconds. This
- must be greater than 0.
- maximum (float): The maximum amount of time to delay in seconds.
- multiplier (float): The multiplier applied to the delay.
- timeout (Optional[float]): How long to keep retrying in seconds.
- Note: timeout is only checked before initiating a retry, so the target may
- run past the timeout value as long as it is healthy.
- on_error (Optional[Callable[Exception]]): A function to call while processing
- a retryable exception. Any error raised by this function will
- *not* be caught.
- is_stream (bool): Indicates whether the input function
- should be treated as a stream function (i.e. an AsyncGenerator,
- or function or coroutine that returns an AsyncIterable).
- If True, the iterable will be wrapped with retry logic, and any
- failed outputs will restart the stream. If False, only the input
- function call itself will be retried. Defaults to False.
- To avoid duplicate values, retryable streams should typically be
- wrapped in additional filter logic before use.
- deadline (float): DEPRECATED use ``timeout`` instead. If set it will
- override ``timeout`` parameter.
- """
-
- def __call__(
- self,
- func: Callable[..., AsyncIterable[_Y] | Awaitable[AsyncIterable[_Y]]],
- on_error: Callable[[Exception], Any] | None = None,
- ) -> Callable[_P, Awaitable[AsyncGenerator[_Y, None]]]:
- """Wrap a callable with retry behavior.
-
- Args:
- func (Callable): The callable or stream to add retry behavior to.
- on_error (Optional[Callable[Exception]]): If given, the
- on_error callback will be called with each retryable exception
- raised by the wrapped function. Any error raised by this
- function will *not* be caught. If on_error was specified in the
- constructor, this value will be ignored.
-
- Returns:
- Callable: A callable that will invoke ``func`` with retry
- behavior.
- """
- if self._on_error is not None:
- on_error = self._on_error
-
- @functools.wraps(func)
- async def retry_wrapped_func(
- *args: _P.args, **kwargs: _P.kwargs
- ) -> AsyncGenerator[_Y, None]:
- """A wrapper that calls target function with retry."""
- sleep_generator = exponential_sleep_generator(
- self._initial, self._maximum, multiplier=self._multiplier
- )
- return retry_target_stream(
- func,
- self._predicate,
- sleep_generator,
- self._timeout,
- on_error,
- init_args=args,
- init_kwargs=kwargs,
- )
-
- return retry_wrapped_func
diff --git a/google/api_core/retry/retry_unary.py b/google/api_core/retry/retry_unary.py
deleted file mode 100644
index 6d36bc7..0000000
--- a/google/api_core/retry/retry_unary.py
+++ /dev/null
@@ -1,302 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Helpers for retrying functions with exponential back-off.
-
-The :class:`Retry` decorator can be used to retry functions that raise
-exceptions using exponential backoff. Because a exponential sleep algorithm is
-used, the retry is limited by a `timeout`. The timeout determines the window
-in which retries will be attempted. This is used instead of total number of retries
-because it is difficult to ascertain the amount of time a function can block
-when using total number of retries and exponential backoff.
-
-By default, this decorator will retry transient
-API errors (see :func:`if_transient_error`). For example:
-
-.. code-block:: python
-
- @retry.Retry()
- def call_flaky_rpc():
- return client.flaky_rpc()
-
- # Will retry flaky_rpc() if it raises transient API errors.
- result = call_flaky_rpc()
-
-You can pass a custom predicate to retry on different exceptions, such as
-waiting for an eventually consistent item to be available:
-
-.. code-block:: python
-
- @retry.Retry(predicate=if_exception_type(exceptions.NotFound))
- def check_if_exists():
- return client.does_thing_exist()
-
- is_available = check_if_exists()
-
-Some client library methods apply retry automatically. These methods can accept
-a ``retry`` parameter that allows you to configure the behavior:
-
-.. code-block:: python
-
- my_retry = retry.Retry(timeout=60)
- result = client.some_method(retry=my_retry)
-
-"""
-
-from __future__ import annotations
-
-import functools
-import sys
-import time
-import inspect
-import warnings
-from typing import Any, Callable, Iterable, TypeVar, TYPE_CHECKING
-
-from google.api_core.retry.retry_base import _BaseRetry
-from google.api_core.retry.retry_base import _retry_error_helper
-from google.api_core.retry.retry_base import exponential_sleep_generator
-from google.api_core.retry.retry_base import build_retry_error
-from google.api_core.retry.retry_base import RetryFailureReason
-
-
-if TYPE_CHECKING:
- if sys.version_info >= (3, 10):
- from typing import ParamSpec
- else:
- from typing_extensions import ParamSpec
-
- _P = ParamSpec("_P") # target function call parameters
- _R = TypeVar("_R") # target function returned value
-
-_ASYNC_RETRY_WARNING = "Using the synchronous google.api_core.retry.Retry with asynchronous calls may lead to unexpected results. Please use google.api_core.retry_async.AsyncRetry instead."
-
-
-def retry_target(
- target: Callable[[], _R],
- predicate: Callable[[Exception], bool],
- sleep_generator: Iterable[float],
- timeout: float | None = None,
- on_error: Callable[[Exception], None] | None = None,
- exception_factory: Callable[
- [list[Exception], RetryFailureReason, float | None],
- tuple[Exception, Exception | None],
- ] = build_retry_error,
- **kwargs,
-):
- """Call a function and retry if it fails.
-
- This is the lowest-level retry helper. Generally, you'll use the
- higher-level retry helper :class:`Retry`.
-
- Args:
- target(Callable): The function to call and retry. This must be a
- nullary function - apply arguments with `functools.partial`.
- predicate (Callable[Exception]): A callable used to determine if an
- exception raised by the target should be considered retryable.
- It should return True to retry or False otherwise.
- sleep_generator (Iterable[float]): An infinite iterator that determines
- how long to sleep between retries.
- timeout (Optional[float]): How long to keep retrying the target.
- Note: timeout is only checked before initiating a retry, so the target may
- run past the timeout value as long as it is healthy.
- on_error (Optional[Callable[Exception]]): If given, the on_error
- callback will be called with each retryable exception raised by the
- target. Any error raised by this function will *not* be caught.
- exception_factory: A function that is called when the retryable reaches
- a terminal failure state, used to construct an exception to be raised.
- It takes a list of all exceptions encountered, a retry.RetryFailureReason
- enum indicating the failure cause, and the original timeout value
- as arguments. It should return a tuple of the exception to be raised,
- along with the cause exception if any. The default implementation will raise
- a RetryError on timeout, or the last exception encountered otherwise.
- deadline (float): DEPRECATED: use ``timeout`` instead. For backward
- compatibility, if specified it will override ``timeout`` parameter.
-
- Returns:
- Any: the return value of the target function.
-
- Raises:
- ValueError: If the sleep generator stops yielding values.
- Exception: a custom exception specified by the exception_factory if provided.
- If no exception_factory is provided:
- google.api_core.RetryError: If the timeout is exceeded while retrying.
- Exception: If the target raises an error that isn't retryable.
- """
-
- timeout = kwargs.get("deadline", timeout)
-
- deadline = time.monotonic() + timeout if timeout is not None else None
- error_list: list[Exception] = []
- sleep_iter = iter(sleep_generator)
-
- # continue trying until an attempt completes, or a terminal exception is raised in _retry_error_helper
- # TODO: support max_attempts argument: https://github.com/googleapis/python-api-core/issues/535
- while True:
- try:
- result = target()
- if inspect.isawaitable(result):
- warnings.warn(_ASYNC_RETRY_WARNING)
- return result
-
- # pylint: disable=broad-except
- # This function explicitly must deal with broad exceptions.
- except Exception as exc:
- # defer to shared logic for handling errors
- next_sleep = _retry_error_helper(
- exc,
- deadline,
- sleep_iter,
- error_list,
- predicate,
- on_error,
- exception_factory,
- timeout,
- )
- # if exception not raised, sleep before next attempt
- time.sleep(next_sleep)
-
-
-class Retry(_BaseRetry):
- """Exponential retry decorator for unary synchronous RPCs.
-
- This class is a decorator used to add retry or polling behavior to an RPC
- call.
-
- Although the default behavior is to retry transient API errors, a
- different predicate can be provided to retry other exceptions.
-
- There are two important concepts that retry/polling behavior may operate on,
- Deadline and Timeout, which need to be properly defined for the correct
- usage of this class and the rest of the library.
-
- Deadline: a fixed point in time by which a certain operation must
- terminate. For example, if a certain operation has a deadline
- "2022-10-18T23:30:52.123Z" it must terminate (successfully or with an
- error) by that time, regardless of when it was started or whether it
- was started at all.
-
- Timeout: the maximum duration of time after which a certain operation
- must terminate (successfully or with an error). The countdown begins right
- after an operation was started. For example, if an operation was started at
- 09:24:00 with timeout of 75 seconds, it must terminate no later than
- 09:25:15.
-
- Unfortunately, in the past this class (and the api-core library as a whole) has not
- been properly distinguishing the concepts of "timeout" and "deadline", and the
- ``deadline`` parameter has meant ``timeout``. That is why
- ``deadline`` has been deprecated and ``timeout`` should be used instead. If the
- ``deadline`` parameter is set, it will override the ``timeout`` parameter.
- In other words, ``retry.deadline`` should be treated as just a deprecated alias for
- ``retry.timeout``.
-
- Said another way, it is safe to assume that this class and the rest of this
- library operate in terms of timeouts (not deadlines) unless explicitly
- noted the usage of deadline semantics.
-
- It is also important to
- understand the three most common applications of the Timeout concept in the
- context of this library.
-
- Usually the generic Timeout term may stand for one of the following actual
- timeouts: RPC Timeout, Retry Timeout, or Polling Timeout.
-
- RPC Timeout: a value supplied by the client to the server so
- that the server side knows the maximum amount of time it is expected to
- spend handling that specific RPC. For example, in the case of gRPC transport,
- RPC Timeout is represented by setting "grpc-timeout" header in the HTTP2
- request. The `timeout` property of this class normally never represents the
- RPC Timeout as it is handled separately by the ``google.api_core.timeout``
- module of this library.
-
- Retry Timeout: this is the most common meaning of the ``timeout`` property
- of this class, and defines how long a certain RPC may be retried in case
- the server returns an error.
-
- Polling Timeout: defines how long the
- client side is allowed to call the polling RPC repeatedly to check a status of a
- long-running operation. Each polling RPC is
- expected to succeed (its errors are supposed to be handled by the retry
- logic). The decision as to whether a new polling attempt needs to be made is based
- not on the RPC status code but on the status of the returned
- status of an operation. In other words: we will poll a long-running operation until
- the operation is done or the polling timeout expires. Each poll will inform us of
- the status of the operation. The poll consists of an RPC to the server that may
- itself be retried as per the poll-specific retry settings in case of errors. The
- operation-level retry settings do NOT apply to polling-RPC retries.
-
- With the actual timeout types being defined above, the client libraries
- often refer to just Timeout without clarifying which type specifically
- that is. In that case the actual timeout type (sometimes also referred to as
- Logical Timeout) can be determined from the context. If it is a unary rpc
- call (i.e. a regular one) Timeout usually stands for the RPC Timeout (if
- provided directly as a standalone value) or Retry Timeout (if provided as
- ``retry.timeout`` property of the unary RPC's retry config). For
- ``Operation`` or ``PollingFuture`` in general Timeout stands for
- Polling Timeout.
-
- Args:
- predicate (Callable[Exception]): A callable that should return ``True``
- if the given exception is retryable.
- initial (float): The minimum amount of time to delay in seconds. This
- must be greater than 0.
- maximum (float): The maximum amount of time to delay in seconds.
- multiplier (float): The multiplier applied to the delay.
- timeout (Optional[float]): How long to keep retrying, in seconds.
- Note: timeout is only checked before initiating a retry, so the target may
- run past the timeout value as long as it is healthy.
- on_error (Callable[Exception]): A function to call while processing
- a retryable exception. Any error raised by this function will
- *not* be caught.
- deadline (float): DEPRECATED: use `timeout` instead. For backward
- compatibility, if specified it will override the ``timeout`` parameter.
- """
-
- def __call__(
- self,
- func: Callable[_P, _R],
- on_error: Callable[[Exception], Any] | None = None,
- ) -> Callable[_P, _R]:
- """Wrap a callable with retry behavior.
-
- Args:
- func (Callable): The callable to add retry behavior to.
- on_error (Optional[Callable[Exception]]): If given, the
- on_error callback will be called with each retryable exception
- raised by the wrapped function. Any error raised by this
- function will *not* be caught. If on_error was specified in the
- constructor, this value will be ignored.
-
- Returns:
- Callable: A callable that will invoke ``func`` with retry
- behavior.
- """
- if self._on_error is not None:
- on_error = self._on_error
-
- @functools.wraps(func)
- def retry_wrapped_func(*args: _P.args, **kwargs: _P.kwargs) -> _R:
- """A wrapper that calls target function with retry."""
- target = functools.partial(func, *args, **kwargs)
- sleep_generator = exponential_sleep_generator(
- self._initial, self._maximum, multiplier=self._multiplier
- )
- return retry_target(
- target,
- self._predicate,
- sleep_generator,
- timeout=self._timeout,
- on_error=on_error,
- )
-
- return retry_wrapped_func
diff --git a/google/api_core/retry/retry_unary_async.py b/google/api_core/retry/retry_unary_async.py
deleted file mode 100644
index 1f72476..0000000
--- a/google/api_core/retry/retry_unary_async.py
+++ /dev/null
@@ -1,239 +0,0 @@
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Helpers for retrying coroutine functions with exponential back-off.
-
-The :class:`AsyncRetry` decorator shares most functionality and behavior with
-:class:`Retry`, but supports coroutine functions. Please refer to description
-of :class:`Retry` for more details.
-
-By default, this decorator will retry transient
-API errors (see :func:`if_transient_error`). For example:
-
-.. code-block:: python
-
- @retry_async.AsyncRetry()
- async def call_flaky_rpc():
- return await client.flaky_rpc()
-
- # Will retry flaky_rpc() if it raises transient API errors.
- result = await call_flaky_rpc()
-
-You can pass a custom predicate to retry on different exceptions, such as
-waiting for an eventually consistent item to be available:
-
-.. code-block:: python
-
- @retry_async.AsyncRetry(predicate=retry_async.if_exception_type(exceptions.NotFound))
- async def check_if_exists():
- return await client.does_thing_exist()
-
- is_available = await check_if_exists()
-
-Some client library methods apply retry automatically. These methods can accept
-a ``retry`` parameter that allows you to configure the behavior:
-
-.. code-block:: python
-
- my_retry = retry_async.AsyncRetry(timeout=60)
- result = await client.some_method(retry=my_retry)
-
-"""
-
-from __future__ import annotations
-
-import asyncio
-import time
-import functools
-from typing import (
- Awaitable,
- Any,
- Callable,
- Iterable,
- TypeVar,
- TYPE_CHECKING,
-)
-
-from google.api_core.retry.retry_base import _BaseRetry
-from google.api_core.retry.retry_base import _retry_error_helper
-from google.api_core.retry.retry_base import exponential_sleep_generator
-from google.api_core.retry.retry_base import build_retry_error
-from google.api_core.retry.retry_base import RetryFailureReason
-
-# for backwards compatibility, expose helpers in this module
-from google.api_core.retry.retry_base import if_exception_type # noqa
-from google.api_core.retry.retry_base import if_transient_error # noqa
-
-if TYPE_CHECKING:
- import sys
-
- if sys.version_info >= (3, 10):
- from typing import ParamSpec
- else:
- from typing_extensions import ParamSpec
-
- _P = ParamSpec("_P") # target function call parameters
- _R = TypeVar("_R") # target function returned value
-
-_DEFAULT_INITIAL_DELAY = 1.0 # seconds
-_DEFAULT_MAXIMUM_DELAY = 60.0 # seconds
-_DEFAULT_DELAY_MULTIPLIER = 2.0
-_DEFAULT_DEADLINE = 60.0 * 2.0 # seconds
-_DEFAULT_TIMEOUT = 60.0 * 2.0 # seconds
-
-
-async def retry_target(
- target: Callable[[], Awaitable[_R]],
- predicate: Callable[[Exception], bool],
- sleep_generator: Iterable[float],
- timeout: float | None = None,
- on_error: Callable[[Exception], None] | None = None,
- exception_factory: Callable[
- [list[Exception], RetryFailureReason, float | None],
- tuple[Exception, Exception | None],
- ] = build_retry_error,
- **kwargs,
-):
- """Await a coroutine and retry if it fails.
-
- This is the lowest-level retry helper. Generally, you'll use the
- higher-level retry helper :class:`Retry`.
-
- Args:
- target(Callable[[], Any]): The function to call and retry. This must be a
- nullary function - apply arguments with `functools.partial`.
- predicate (Callable[Exception]): A callable used to determine if an
- exception raised by the target should be considered retryable.
- It should return True to retry or False otherwise.
- sleep_generator (Iterable[float]): An infinite iterator that determines
- how long to sleep between retries.
- timeout (Optional[float]): How long to keep retrying the target, in seconds.
- Note: timeout is only checked before initiating a retry, so the target may
- run past the timeout value as long as it is healthy.
- on_error (Optional[Callable[Exception]]): If given, the on_error
- callback will be called with each retryable exception raised by the
- target. Any error raised by this function will *not* be caught.
- exception_factory: A function that is called when the retryable reaches
- a terminal failure state, used to construct an exception to be raised.
- It takes a list of all exceptions encountered, a retry.RetryFailureReason
- enum indicating the failure cause, and the original timeout value
- as arguments. It should return a tuple of the exception to be raised,
- along with the cause exception if any. The default implementation will raise
- a RetryError on timeout, or the last exception encountered otherwise.
- deadline (float): DEPRECATED use ``timeout`` instead. For backward
- compatibility, if set it will override the ``timeout`` parameter.
-
- Returns:
- Any: the return value of the target function.
-
- Raises:
- ValueError: If the sleep generator stops yielding values.
- Exception: a custom exception specified by the exception_factory if provided.
- If no exception_factory is provided:
- google.api_core.RetryError: If the timeout is exceeded while retrying.
- Exception: If the target raises an error that isn't retryable.
- """
-
- timeout = kwargs.get("deadline", timeout)
-
- deadline = time.monotonic() + timeout if timeout is not None else None
- error_list: list[Exception] = []
- sleep_iter = iter(sleep_generator)
-
- # continue trying until an attempt completes, or a terminal exception is raised in _retry_error_helper
- # TODO: support max_attempts argument: https://github.com/googleapis/python-api-core/issues/535
- while True:
- try:
- return await target()
- # pylint: disable=broad-except
- # This function explicitly must deal with broad exceptions.
- except Exception as exc:
- # defer to shared logic for handling errors
- next_sleep = _retry_error_helper(
- exc,
- deadline,
- sleep_iter,
- error_list,
- predicate,
- on_error,
- exception_factory,
- timeout,
- )
- # if exception not raised, sleep before next attempt
- await asyncio.sleep(next_sleep)
-
-
-class AsyncRetry(_BaseRetry):
- """Exponential retry decorator for async coroutines.
-
- This class is a decorator used to add exponential back-off retry behavior
- to an RPC call.
-
- Although the default behavior is to retry transient API errors, a
- different predicate can be provided to retry other exceptions.
-
- Args:
- predicate (Callable[Exception]): A callable that should return ``True``
- if the given exception is retryable.
- initial (float): The minimum amount of time to delay in seconds. This
- must be greater than 0.
- maximum (float): The maximum amount of time to delay in seconds.
- multiplier (float): The multiplier applied to the delay.
- timeout (Optional[float]): How long to keep retrying in seconds.
- Note: timeout is only checked before initiating a retry, so the target may
- run past the timeout value as long as it is healthy.
- on_error (Optional[Callable[Exception]]): A function to call while processing
- a retryable exception. Any error raised by this function will
- *not* be caught.
- deadline (float): DEPRECATED use ``timeout`` instead. If set it will
- override ``timeout`` parameter.
- """
-
- def __call__(
- self,
- func: Callable[..., Awaitable[_R]],
- on_error: Callable[[Exception], Any] | None = None,
- ) -> Callable[_P, Awaitable[_R]]:
- """Wrap a callable with retry behavior.
-
- Args:
- func (Callable): The callable or stream to add retry behavior to.
- on_error (Optional[Callable[Exception]]): If given, the
- on_error callback will be called with each retryable exception
- raised by the wrapped function. Any error raised by this
- function will *not* be caught. If on_error was specified in the
- constructor, this value will be ignored.
-
- Returns:
- Callable: A callable that will invoke ``func`` with retry
- behavior.
- """
- if self._on_error is not None:
- on_error = self._on_error
-
- @functools.wraps(func)
- async def retry_wrapped_func(*args: _P.args, **kwargs: _P.kwargs) -> _R:
- """A wrapper that calls target function with retry."""
- sleep_generator = exponential_sleep_generator(
- self._initial, self._maximum, multiplier=self._multiplier
- )
- return await retry_target(
- functools.partial(func, *args, **kwargs),
- predicate=self._predicate,
- sleep_generator=sleep_generator,
- timeout=self._timeout,
- on_error=on_error,
- )
-
- return retry_wrapped_func
diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py
deleted file mode 100644
index 90a2d5a..0000000
--- a/google/api_core/retry_async.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2024 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# The following imports are for backwards compatibility with https://github.com/googleapis/python-api-core/blob/4d7d2edee2c108d43deb151e6e0fdceb56b73275/google/api_core/retry_async.py
-#
-# TODO: Revert these imports on the next major version release (https://github.com/googleapis/python-api-core/issues/576)
-from google.api_core import datetime_helpers # noqa: F401
-from google.api_core import exceptions # noqa: F401
-from google.api_core.retry import exponential_sleep_generator # noqa: F401
-from google.api_core.retry import if_exception_type # noqa: F401
-from google.api_core.retry import if_transient_error # noqa: F401
-from google.api_core.retry.retry_unary_async import AsyncRetry
-from google.api_core.retry.retry_unary_async import retry_target
-
-__all__ = (
- "AsyncRetry",
- "datetime_helpers",
- "exceptions",
- "exponential_sleep_generator",
- "if_exception_type",
- "if_transient_error",
- "retry_target",
-)
diff --git a/google/api_core/timeout.py b/google/api_core/timeout.py
deleted file mode 100644
index 55b195e..0000000
--- a/google/api_core/timeout.py
+++ /dev/null
@@ -1,294 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Decorators for applying timeout arguments to functions.
-
-These decorators are used to wrap API methods to apply either a
-Deadline-dependent (recommended), constant (DEPRECATED) or exponential
-(DEPRECATED) timeout argument.
-
-For example, imagine an API method that can take a while to return results,
-such as one that might block until a resource is ready:
-
-.. code-block:: python
-
- def is_thing_ready(timeout=None):
- response = requests.get('https://example.com/is_thing_ready')
- response.raise_for_status()
- return response.json()
-
-This module allows a function like this to be wrapped so that timeouts are
-automatically determined, for example:
-
-.. code-block:: python
-
- timeout_ = timeout.ExponentialTimeout()
- is_thing_ready_with_timeout = timeout_(is_thing_ready)
-
- for n in range(10):
- try:
- is_thing_ready_with_timeout({'example': 'data'})
- except:
- pass
-
-In this example the first call to ``is_thing_ready`` will have a relatively
-small timeout (like 1 second). If the resource is available and the request
-completes quickly, the loop exits. But, if the resource isn't yet available
-and the request times out, it'll be retried - this time with a larger timeout.
-
-In the broader context these decorators are typically combined with
-:mod:`google.api_core.retry` to implement API methods with a signature that
-matches ``api_method(request, timeout=None, retry=None)``.
-"""
-
-from __future__ import unicode_literals
-
-import datetime
-import functools
-
-from google.api_core import datetime_helpers
-
-_DEFAULT_INITIAL_TIMEOUT = 5.0 # seconds
-_DEFAULT_MAXIMUM_TIMEOUT = 30.0 # seconds
-_DEFAULT_TIMEOUT_MULTIPLIER = 2.0
-# If specified, must be in seconds. If none, deadline is not used in the
-# timeout calculation.
-_DEFAULT_DEADLINE = None
-
-
-class TimeToDeadlineTimeout(object):
- """A decorator that decreases timeout set for an RPC based on how much time
- has left till its deadline. The deadline is calculated as
- ``now + initial_timeout`` when this decorator is first called for an rpc.
-
- In other words this decorator implements deadline semantics in terms of a
- sequence of decreasing timeouts t0 > t1 > t2 ... tn >= 0.
-
- Args:
- timeout (Optional[float]): the timeout (in seconds) to applied to the
- wrapped function. If `None`, the target function is expected to
- never timeout.
- """
-
- def __init__(self, timeout=None, clock=datetime_helpers.utcnow):
- self._timeout = timeout
- self._clock = clock
-
- def __call__(self, func):
- """Apply the timeout decorator.
-
- Args:
- func (Callable): The function to apply the timeout argument to.
- This function must accept a timeout keyword argument.
-
- Returns:
- Callable: The wrapped function.
- """
-
- first_attempt_timestamp = self._clock().timestamp()
-
- @functools.wraps(func)
- def func_with_timeout(*args, **kwargs):
- """Wrapped function that adds timeout."""
-
- if self._timeout is not None:
- # All calculations are in seconds
- now_timestamp = self._clock().timestamp()
-
- # To avoid usage of nonlocal but still have round timeout
- # numbers for first attempt (in most cases the only attempt made
- # for an RPC.
- if now_timestamp - first_attempt_timestamp < 0.001:
- now_timestamp = first_attempt_timestamp
-
- time_since_first_attempt = now_timestamp - first_attempt_timestamp
- remaining_timeout = self._timeout - time_since_first_attempt
-
- # Although the `deadline` parameter in `google.api_core.retry.Retry`
- # is deprecated, and should be treated the same as the `timeout`,
- # it is still possible for the `deadline` argument in
- # `google.api_core.retry.Retry` to be larger than the `timeout`.
- # See https://github.com/googleapis/python-api-core/issues/654
- # Only positive non-zero timeouts are supported.
- # Revert back to the initial timeout for negative or 0 timeout values.
- if remaining_timeout < 1:
- remaining_timeout = self._timeout
-
- kwargs["timeout"] = remaining_timeout
-
- return func(*args, **kwargs)
-
- return func_with_timeout
-
- def __str__(self):
- return "<TimeToDeadlineTimeout timeout={:.1f}>".format(self._timeout)
-
-
-class ConstantTimeout(object):
- """A decorator that adds a constant timeout argument.
-
- DEPRECATED: use ``TimeToDeadlineTimeout`` instead.
-
- This is effectively equivalent to
- ``functools.partial(func, timeout=timeout)``.
-
- Args:
- timeout (Optional[float]): the timeout (in seconds) to applied to the
- wrapped function. If `None`, the target function is expected to
- never timeout.
- """
-
- def __init__(self, timeout=None):
- self._timeout = timeout
-
- def __call__(self, func):
- """Apply the timeout decorator.
-
- Args:
- func (Callable): The function to apply the timeout argument to.
- This function must accept a timeout keyword argument.
-
- Returns:
- Callable: The wrapped function.
- """
-
- @functools.wraps(func)
- def func_with_timeout(*args, **kwargs):
- """Wrapped function that adds timeout."""
- kwargs["timeout"] = self._timeout
- return func(*args, **kwargs)
-
- return func_with_timeout
-
- def __str__(self):
- return "<ConstantTimeout timeout={:.1f}>".format(self._timeout)
-
-
-def _exponential_timeout_generator(initial, maximum, multiplier, deadline):
- """A generator that yields exponential timeout values.
-
- Args:
- initial (float): The initial timeout.
- maximum (float): The maximum timeout.
- multiplier (float): The multiplier applied to the timeout.
- deadline (float): The overall deadline across all invocations.
-
- Yields:
- float: A timeout value.
- """
- if deadline is not None:
- deadline_datetime = datetime_helpers.utcnow() + datetime.timedelta(
- seconds=deadline
- )
- else:
- deadline_datetime = datetime.datetime.max
-
- timeout = initial
- while True:
- now = datetime_helpers.utcnow()
- yield min(
- # The calculated timeout based on invocations.
- timeout,
- # The set maximum timeout.
- maximum,
- # The remaining time before the deadline is reached.
- float((deadline_datetime - now).seconds),
- )
- timeout = timeout * multiplier
-
-
-class ExponentialTimeout(object):
- """A decorator that adds an exponentially increasing timeout argument.
-
- DEPRECATED: the concept of incrementing timeout exponentially has been
- deprecated. Use ``TimeToDeadlineTimeout`` instead.
-
- This is useful if a function is called multiple times. Each time the
- function is called this decorator will calculate a new timeout parameter
- based on the the number of times the function has been called.
-
- For example
-
- .. code-block:: python
-
- Args:
- initial (float): The initial timeout to pass.
- maximum (float): The maximum timeout for any one call.
- multiplier (float): The multiplier applied to the timeout for each
- invocation.
- deadline (Optional[float]): The overall deadline across all
- invocations. This is used to prevent a very large calculated
- timeout from pushing the overall execution time over the deadline.
- This is especially useful in conjunction with
- :mod:`google.api_core.retry`. If ``None``, the timeouts will not
- be adjusted to accommodate an overall deadline.
- """
-
- def __init__(
- self,
- initial=_DEFAULT_INITIAL_TIMEOUT,
- maximum=_DEFAULT_MAXIMUM_TIMEOUT,
- multiplier=_DEFAULT_TIMEOUT_MULTIPLIER,
- deadline=_DEFAULT_DEADLINE,
- ):
- self._initial = initial
- self._maximum = maximum
- self._multiplier = multiplier
- self._deadline = deadline
-
- def with_deadline(self, deadline):
- """Return a copy of this timeout with the given deadline.
-
- Args:
- deadline (float): The overall deadline across all invocations.
-
- Returns:
- ExponentialTimeout: A new instance with the given deadline.
- """
- return ExponentialTimeout(
- initial=self._initial,
- maximum=self._maximum,
- multiplier=self._multiplier,
- deadline=deadline,
- )
-
- def __call__(self, func):
- """Apply the timeout decorator.
-
- Args:
- func (Callable): The function to apply the timeout argument to.
- This function must accept a timeout keyword argument.
-
- Returns:
- Callable: The wrapped function.
- """
- timeouts = _exponential_timeout_generator(
- self._initial, self._maximum, self._multiplier, self._deadline
- )
-
- @functools.wraps(func)
- def func_with_timeout(*args, **kwargs):
- """Wrapped function that adds timeout."""
- kwargs["timeout"] = next(timeouts)
- return func(*args, **kwargs)
-
- return func_with_timeout
-
- def __str__(self):
- return (
- "<ExponentialTimeout initial={:.1f}, maximum={:.1f}, "
- "multiplier={:.1f}, deadline={:.1f}>".format(
- self._initial, self._maximum, self._multiplier, self._deadline
- )
- )
diff --git a/google/api_core/universe.py b/google/api_core/universe.py
deleted file mode 100644
index 3566964..0000000
--- a/google/api_core/universe.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# Copyright 2024 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Helpers for universe domain."""
-
-from typing import Any, Optional
-
-DEFAULT_UNIVERSE = "googleapis.com"
-
-
-class EmptyUniverseError(ValueError):
- def __init__(self):
- message = "Universe Domain cannot be an empty string."
- super().__init__(message)
-
-
-class UniverseMismatchError(ValueError):
- def __init__(self, client_universe, credentials_universe):
- message = (
- f"The configured universe domain ({client_universe}) does not match the universe domain "
- f"found in the credentials ({credentials_universe}). "
- "If you haven't configured the universe domain explicitly, "
- f"`{DEFAULT_UNIVERSE}` is the default."
- )
- super().__init__(message)
-
-
-def determine_domain(
- client_universe_domain: Optional[str], universe_domain_env: Optional[str]
-) -> str:
- """Return the universe domain used by the client.
-
- Args:
- client_universe_domain (Optional[str]): The universe domain configured via the client options.
- universe_domain_env (Optional[str]): The universe domain configured via the
- "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable.
-
- Returns:
- str: The universe domain to be used by the client.
-
- Raises:
- ValueError: If the universe domain is an empty string.
- """
- universe_domain = DEFAULT_UNIVERSE
- if client_universe_domain is not None:
- universe_domain = client_universe_domain
- elif universe_domain_env is not None:
- universe_domain = universe_domain_env
- if len(universe_domain.strip()) == 0:
- raise EmptyUniverseError
- return universe_domain
-
-
-def compare_domains(client_universe: str, credentials: Any) -> bool:
- """Returns True iff the universe domains used by the client and credentials match.
-
- Args:
- client_universe (str): The universe domain configured via the client options.
- credentials Any: The credentials being used in the client.
-
- Returns:
- bool: True iff client_universe matches the universe in credentials.
-
- Raises:
- ValueError: when client_universe does not match the universe in credentials.
- """
- credentials_universe = getattr(credentials, "universe_domain", DEFAULT_UNIVERSE)
-
- if client_universe != credentials_universe:
- raise UniverseMismatchError(client_universe, credentials_universe)
- return True
diff --git a/google/api_core/version.py b/google/api_core/version.py
deleted file mode 100644
index 7e23d8c..0000000
--- a/google/api_core/version.py
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-__version__ = "2.29.1"
diff --git a/google/api_core/version_header.py b/google/api_core/version_header.py
deleted file mode 100644
index cf1972a..0000000
--- a/google/api_core/version_header.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright 2024 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-API_VERSION_METADATA_KEY = "x-goog-api-version"
-
-
-def to_api_version_header(version_identifier):
- """Returns data for the API Version header for the given `version_identifier`.
-
- Args:
- version_identifier (str): The version identifier to be used in the
- tuple returned.
-
- Returns:
- Tuple(str, str): A tuple containing the API Version metadata key and
- value.
- """
- return (API_VERSION_METADATA_KEY, version_identifier)
diff --git a/noxfile.py b/noxfile.py
deleted file mode 100644
index aed8c50..0000000
--- a/noxfile.py
+++ /dev/null
@@ -1,379 +0,0 @@
-# Copyright 2016 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Helpful notes for local usage:
-# unset PYENV_VERSION
-# pyenv local 3.14.1 3.13.10 3.12.11 3.11.4 3.10.12 3.9.17
-# PIP_INDEX_URL=https://pypi.org/simple nox
-
-from __future__ import absolute_import
-import os
-import pathlib
-import re
-import shutil
-import unittest
-
-# https://github.com/google/importlab/issues/25
-import nox
-
-
-BLACK_VERSION = "black==23.7.0"
-BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"]
-# Black and flake8 clash on the syntax for ignoring flake8's F401 in this file.
-BLACK_EXCLUDES = ["--exclude", "^/google/api_core/operations_v1/__init__.py"]
-
-PYTHON_VERSIONS = ["3.9", "3.10", "3.11", "3.12", "3.13", "3.14"]
-
-DEFAULT_PYTHON_VERSION = "3.14"
-CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute()
-
-
-# Error if a python version is missing
-nox.options.error_on_missing_interpreters = True
-
-
-@nox.session(python=DEFAULT_PYTHON_VERSION)
-def lint(session):
- """Run linters.
-
- Returns a failure if the linters find linting errors or sufficiently
- serious code quality issues.
- """
- session.install("flake8", BLACK_VERSION)
- session.install(".")
- session.run(
- "black",
- "--check",
- *BLACK_EXCLUDES,
- *BLACK_PATHS,
- )
- session.run("flake8", "google", "tests")
-
-
-@nox.session(python=DEFAULT_PYTHON_VERSION)
-def blacken(session):
- """Run black.
-
- Format code to uniform standard.
- """
- session.install(BLACK_VERSION)
- session.run("black", *BLACK_EXCLUDES, *BLACK_PATHS)
-
-
-def install_prerelease_dependencies(session, constraints_path):
- with open(constraints_path, encoding="utf-8") as constraints_file:
- constraints_text = constraints_file.read()
- # Ignore leading whitespace and comment lines.
- constraints_deps = [
- match.group(1)
- for match in re.finditer(
- r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE
- )
- ]
- if constraints_deps:
- session.install(*constraints_deps)
-
- prerel_deps = [
- "google-auth",
- "googleapis-common-protos",
- "grpcio",
- "grpcio-status",
- "proto-plus",
- "protobuf",
- ]
-
- for dep in prerel_deps:
- session.install("--pre", "--no-deps", "--upgrade", dep)
-
- # Remaining dependencies
- other_deps = [
- "requests",
- "pyasn1",
- "cryptography",
- "cachetools",
- ]
- session.install(*other_deps)
-
-
-def default(session, install_grpc=True, prerelease=False, install_async_rest=False):
- """Default unit test session.
-
- This is intended to be run **without** an interpreter set, so
- that the current ``python`` (on the ``PATH``) or the version of
- Python corresponding to the ``nox`` binary can run the tests.
- """
- if prerelease and not install_grpc:
- unittest.skip("The pre-release session cannot be run without grpc")
-
- session.install(
- "pytest",
- "pytest-cov",
- "pytest-mock",
- "pytest-xdist",
- )
-
- install_extras = []
- if install_grpc:
- # Note: The extra is called `grpc` and not `grpcio`.
- install_extras.append("grpc")
-
- constraints_dir = str(CURRENT_DIRECTORY / "testing")
- if install_async_rest:
- install_extras.append("async_rest")
- constraints_type = "async-rest-"
- else:
- constraints_type = ""
-
- lib_with_extras = f".[{','.join(install_extras)}]" if len(install_extras) else "."
- if prerelease:
- install_prerelease_dependencies(
- session,
- f"{constraints_dir}/constraints-{constraints_type}{PYTHON_VERSIONS[0]}.txt",
- )
- # This *must* be the last install command to get the package from source.
- session.install("-e", lib_with_extras, "--no-deps")
- else:
- constraints_file = (
- f"{constraints_dir}/constraints-{constraints_type}{session.python}.txt"
- )
- # fall back to standard constraints file
- if not pathlib.Path(constraints_file).exists():
- constraints_file = f"{constraints_dir}/constraints-{session.python}.txt"
-
- session.install(
- "-e",
- lib_with_extras,
- "-c",
- constraints_file,
- )
-
- # Print out package versions of dependencies
- session.run(
- "python", "-c", "import google.protobuf; print(google.protobuf.__version__)"
- )
- # Support for proto.version was added in v1.23.0
- # https://github.com/googleapis/proto-plus-python/releases/tag/v1.23.0
- session.run(
- "python",
- "-c",
- """import proto; hasattr(proto, "version") and print(proto.version.__version__)""",
- )
- if install_grpc:
- session.run("python", "-c", "import grpc; print(grpc.__version__)")
- session.run("python", "-c", "import google.auth; print(google.auth.__version__)")
-
- pytest_args = [
- "python",
- "-m",
- "pytest",
- *(
- # Helpful for running a single test or testfile.
- session.posargs
- or [
- "--quiet",
- "--cov=google.api_core",
- "--cov=tests.unit",
- "--cov-append",
- "--cov-config=.coveragerc",
- "--cov-report=",
- "--cov-fail-under=0",
- # Running individual tests with parallelism enabled is usually not helpful.
- "-n=auto",
- os.path.join("tests", "unit"),
- ]
- ),
- ]
-
- session.install("asyncmock", "pytest-asyncio")
-
- # Having positional arguments means the user wants to run specific tests.
- # Best not to add additional tests to that list.
- if not session.posargs:
- pytest_args.append("--cov=tests.asyncio")
- pytest_args.append(os.path.join("tests", "asyncio"))
-
- session.run(*pytest_args)
-
-
-@nox.session(python=PYTHON_VERSIONS)
-@nox.parametrize(
- ["install_grpc", "install_async_rest", "python_versions", "legacy_proto"],
- [
- (True, False, None, None), # Run unit tests with grpcio installed
- (False, False, None, None), # Run unit tests without grpcio installed
- (
- True,
- True,
- None,
- None,
- ), # Run unit tests with grpcio and async rest installed
- # TODO: Remove once we stop support for protobuf 4.x.
- (
- True,
- False,
- ["3.9", "3.10", "3.11"],
- 4,
- ), # Run proto4 tests with grpcio/grpcio-gcp installed
- ],
-)
-def unit(
- session, install_grpc, install_async_rest, python_versions=None, legacy_proto=None
-):
- """Run the unit test suite with the given configuration parameters.
-
- If `python_versions` is provided, the test suite only runs when the Python version (xx.yy) is
- one of the values in `python_versions`.
-
- If `legacy_proto` is provided, this test suite will explicitly install the proto library at
- that major version. Only a few values are supported at any one time; the intent is to test
- deprecated but noyet abandoned versions.
- """
-
- if python_versions and session.python not in python_versions:
- session.log(f"Skipping session for Python {session.python}")
- session.skip()
-
- # TODO: consider converting the following into a `match` statement once
- # we drop Python 3.9 support.
- if legacy_proto:
- if legacy_proto == 4:
- # Pin protobuf to a 4.x version to ensure coverage for the legacy code path.
- session.install("protobuf>=4.25.8,<5.0.0")
- else:
- assert False, f"Unknown legacy_proto: {legacy_proto}"
-
- default(
- session=session,
- install_grpc=install_grpc,
- install_async_rest=install_async_rest,
- )
-
-
-@nox.session(python=DEFAULT_PYTHON_VERSION)
-def prerelease_deps(session):
- """Run the unit test suite."""
- default(session, prerelease=True)
-
-
-@nox.session(python=DEFAULT_PYTHON_VERSION)
-def lint_setup_py(session):
- """Verify that setup.py is valid (including RST check)."""
-
- session.install("docutils", "Pygments", "setuptools")
- session.run("python", "setup.py", "check", "--restructuredtext", "--strict")
-
-
-@nox.session(python=DEFAULT_PYTHON_VERSION)
-def mypy(session):
- """Run type-checking."""
- session.install(".[grpc,async_rest]", "mypy")
- session.install(
- "types-setuptools",
- "types-requests",
- "types-protobuf",
- )
- session.run("mypy", "google", "tests")
-
-
-@nox.session(python=DEFAULT_PYTHON_VERSION)
-def cover(session):
- """Run the final coverage report.
-
- This outputs the coverage report aggregating coverage from the unit
- test runs (not system test runs), and then erases coverage data.
- """
- session.install("coverage", "pytest-cov")
- session.run("coverage", "report", "--show-missing", "--fail-under=100")
- session.run("coverage", "erase")
-
-
-@nox.session(python="3.10")
-def docs(session):
- """Build the docs for this library."""
-
- session.install("-e", ".[grpc]")
- session.install(
- # We need to pin to specific versions of the `sphinxcontrib-*` packages
- # which still support sphinx 4.x.
- # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344
- # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345.
- "sphinxcontrib-applehelp==1.0.4",
- "sphinxcontrib-devhelp==1.0.2",
- "sphinxcontrib-htmlhelp==2.0.1",
- "sphinxcontrib-qthelp==1.0.3",
- "sphinxcontrib-serializinghtml==1.1.5",
- "sphinx==4.5.0",
- "alabaster",
- "recommonmark",
- )
-
- shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
- session.run(
- "sphinx-build",
- "-W", # warnings as errors
- "-T", # show full traceback on exception
- "-N", # no colors
- "-b",
- "html",
- "-d",
- os.path.join("docs", "_build", "doctrees", ""),
- os.path.join("docs", ""),
- os.path.join("docs", "_build", "html", ""),
- )
-
-
-@nox.session(python="3.10")
-def docfx(session):
- """Build the docfx yaml files for this library."""
-
- session.install("-e", ".")
- session.install(
- # We need to pin to specific versions of the `sphinxcontrib-*` packages
- # which still support sphinx 4.x.
- # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344
- # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345.
- "sphinxcontrib-applehelp==1.0.4",
- "sphinxcontrib-devhelp==1.0.2",
- "sphinxcontrib-htmlhelp==2.0.1",
- "sphinxcontrib-qthelp==1.0.3",
- "sphinxcontrib-serializinghtml==1.1.5",
- "gcp-sphinx-docfx-yaml",
- "alabaster",
- "recommonmark",
- )
-
- shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
- session.run(
- "sphinx-build",
- "-T", # show full traceback on exception
- "-N", # no colors
- "-D",
- (
- "extensions=sphinx.ext.autodoc,"
- "sphinx.ext.autosummary,"
- "docfx_yaml.extension,"
- "sphinx.ext.intersphinx,"
- "sphinx.ext.coverage,"
- "sphinx.ext.napoleon,"
- "sphinx.ext.todo,"
- "sphinx.ext.viewcode,"
- "recommonmark"
- ),
- "-b",
- "html",
- "-d",
- os.path.join("docs", "_build", "doctrees", ""),
- os.path.join("docs", ""),
- os.path.join("docs", "_build", "html", ""),
- )
diff --git a/pyproject.toml b/pyproject.toml
deleted file mode 100644
index fba7719..0000000
--- a/pyproject.toml
+++ /dev/null
@@ -1,103 +0,0 @@
-# Copyright 2024 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-[build-system]
-requires = ["setuptools"]
-build-backend = "setuptools.build_meta"
-
-[project]
-name = "google-api-core"
-authors = [{ name = "Google LLC", email = "googleapis-packages@google.com" }]
-license = { text = "Apache 2.0" }
-requires-python = ">=3.9"
-readme = "README.rst"
-description = "Google API client core library"
-classifiers = [
- # Should be one of:
- # "Development Status :: 3 - Alpha"
- # "Development Status :: 4 - Beta"
- # "Development Status :: 5 - Production/Stable"
- "Development Status :: 5 - Production/Stable",
- "Intended Audience :: Developers",
- "License :: OSI Approved :: Apache Software License",
- "Programming Language :: Python",
- "Programming Language :: Python :: 3",
-
- "Programming Language :: Python :: 3.9",
- "Programming Language :: Python :: 3.10",
- "Programming Language :: Python :: 3.11",
- "Programming Language :: Python :: 3.12",
- "Programming Language :: Python :: 3.13",
- "Programming Language :: Python :: 3.14",
- "Operating System :: OS Independent",
- "Topic :: Internet",
-]
-dependencies = [
- "googleapis-common-protos >= 1.56.3, < 2.0.0",
- "protobuf >= 4.25.8, < 7.0.0",
- "proto-plus >= 1.22.3, < 2.0.0",
- "proto-plus >= 1.25.0, < 2.0.0; python_version >= '3.13'",
- "google-auth >= 2.14.1, < 3.0.0",
- "requests >= 2.20.0, < 3.0.0",
-]
-dynamic = ["version"]
-
-[project.urls]
-Documentation = "https://googleapis.dev/python/google-api-core/latest/"
-Repository = "https://github.com/googleapis/python-api-core"
-
-[project.optional-dependencies]
-async_rest = ["google-auth[aiohttp] >= 2.35.0, < 3.0.0"]
-grpc = [
- "grpcio >= 1.33.2, < 2.0.0",
- "grpcio >= 1.49.1, < 2.0.0; python_version >= '3.11'",
- "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'",
- "grpcio-status >= 1.33.2, < 2.0.0",
- "grpcio-status >= 1.49.1, < 2.0.0; python_version >= '3.11'",
- "grpcio-status >= 1.75.1, < 2.0.0; python_version >= '3.14'",
-]
-
-
-[tool.setuptools.dynamic]
-version = { attr = "google.api_core.version.__version__" }
-
-[tool.setuptools.packages.find]
-# Only include packages under the 'google' namespace. Do not include tests,
-# benchmarks, etc.
-include = ["google*"]
-
-[tool.mypy]
-python_version = "3.14"
-namespace_packages = true
-ignore_missing_imports = true
-
-[tool.pytest]
-filterwarnings = [
- # treat all warnings as errors
- "error",
- # Prevent Python version warnings from interfering with tests
- "ignore:.* Python version .*:FutureWarning",
- # Remove once https://github.com/pytest-dev/pytest-cov/issues/621 is fixed
- "ignore:.*The --rsyncdir command line argument and rsyncdirs config variable are deprecated:DeprecationWarning",
- # Remove once https://github.com/protocolbuffers/protobuf/issues/12186 is fixed
- "ignore:.*custom tp_new.*in Python 3.14:DeprecationWarning",
-
-
- # Remove once the minimum supported version of googleapis-common-protos is 1.62.0
- "ignore:.*pkg_resources.declare_namespace:DeprecationWarning",
- "ignore:.*pkg_resources is deprecated as an API:DeprecationWarning",
- # Remove once https://github.com/grpc/grpc/issues/35086 is fixed (and version newer than 1.60.0 is published)
- "ignore:There is no current event loop:DeprecationWarning",
-
-]
diff --git a/renovate.json b/renovate.json
deleted file mode 100644
index c7875c4..0000000
--- a/renovate.json
+++ /dev/null
@@ -1,12 +0,0 @@
-{
- "extends": [
- "config:base",
- "group:all",
- ":preserveSemverRanges",
- ":disableDependencyDashboard"
- ],
- "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py", ".github/workflows/unittest.yml"],
- "pip_requirements": {
- "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"]
- }
-}
diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh
deleted file mode 100755
index 120b0dd..0000000
--- a/scripts/decrypt-secrets.sh
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/bin/bash
-
-# Copyright 2024 Google LLC All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
-ROOT=$( dirname "$DIR" )
-
-# Work from the project root.
-cd $ROOT
-
-# Prevent it from overriding files.
-# We recommend that sample authors use their own service account files and cloud project.
-# In that case, they are supposed to prepare these files by themselves.
-if [[ -f "testing/test-env.sh" ]] || \
- [[ -f "testing/service-account.json" ]] || \
- [[ -f "testing/client-secrets.json" ]]; then
- echo "One or more target files exist, aborting."
- exit 1
-fi
-
-# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources.
-PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}"
-
-gcloud secrets versions access latest --secret="python-docs-samples-test-env" \
- --project="${PROJECT_ID}" \
- > testing/test-env.sh
-gcloud secrets versions access latest \
- --secret="python-docs-samples-service-account" \
- --project="${PROJECT_ID}" \
- > testing/service-account.json
-gcloud secrets versions access latest \
- --secret="python-docs-samples-client-secrets" \
- --project="${PROJECT_ID}" \
- > testing/client-secrets.json
diff --git a/scripts/readme-gen/readme_gen.py b/scripts/readme-gen/readme_gen.py
deleted file mode 100644
index 8f5e248..0000000
--- a/scripts/readme-gen/readme_gen.py
+++ /dev/null
@@ -1,69 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright 2024 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Generates READMEs using configuration defined in yaml."""
-
-import argparse
-import io
-import os
-import subprocess
-
-import jinja2
-import yaml
-
-
-jinja_env = jinja2.Environment(
- trim_blocks=True,
- loader=jinja2.FileSystemLoader(
- os.path.abspath(os.path.join(os.path.dirname(__file__), "templates"))
- ),
- autoescape=True,
-)
-
-README_TMPL = jinja_env.get_template("README.tmpl.rst")
-
-
-def get_help(file):
- return subprocess.check_output(["python", file, "--help"]).decode()
-
-
-def main():
- parser = argparse.ArgumentParser()
- parser.add_argument("source")
- parser.add_argument("--destination", default="README.rst")
-
- args = parser.parse_args()
-
- source = os.path.abspath(args.source)
- root = os.path.dirname(source)
- destination = os.path.join(root, args.destination)
-
- jinja_env.globals["get_help"] = get_help
-
- with io.open(source, "r") as f:
- config = yaml.load(f)
-
- # This allows get_help to execute in the right directory.
- os.chdir(root)
-
- output = README_TMPL.render(config)
-
- with io.open(destination, "w") as f:
- f.write(output)
-
-
-if __name__ == "__main__":
- main()
diff --git a/scripts/readme-gen/templates/README.tmpl.rst b/scripts/readme-gen/templates/README.tmpl.rst
deleted file mode 100644
index 4fd2397..0000000
--- a/scripts/readme-gen/templates/README.tmpl.rst
+++ /dev/null
@@ -1,87 +0,0 @@
-{# The following line is a lie. BUT! Once jinja2 is done with it, it will
- become truth! #}
-.. This file is automatically generated. Do not edit this file directly.
-
-{{product.name}} Python Samples
-===============================================================================
-
-.. image:: https://gstatic.com/cloudssh/images/open-btn.png
- :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst
-
-
-This directory contains samples for {{product.name}}. {{product.description}}
-
-{{description}}
-
-.. _{{product.name}}: {{product.url}}
-
-{% if required_api_url %}
-To run the sample, you need to enable the API at: {{required_api_url}}
-{% endif %}
-
-{% if required_role %}
-To run the sample, you need to have `{{required_role}}` role.
-{% endif %}
-
-{{other_required_steps}}
-
-{% if setup %}
-Setup
--------------------------------------------------------------------------------
-
-{% for section in setup %}
-
-{% include section + '.tmpl.rst' %}
-
-{% endfor %}
-{% endif %}
-
-{% if samples %}
-Samples
--------------------------------------------------------------------------------
-
-{% for sample in samples %}
-{{sample.name}}
-+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
-
-{% if not sample.hide_cloudshell_button %}
-.. image:: https://gstatic.com/cloudssh/images/open-btn.png
- :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst
-{% endif %}
-
-
-{{sample.description}}
-
-To run this sample:
-
-.. code-block:: bash
-
- $ python {{sample.file}}
-{% if sample.show_help %}
-
- {{get_help(sample.file)|indent}}
-{% endif %}
-
-
-{% endfor %}
-{% endif %}
-
-{% if cloud_client_library %}
-
-The client library
--------------------------------------------------------------------------------
-
-This sample uses the `Google Cloud Client Library for Python`_.
-You can read the documentation for more details on API usage and use GitHub
-to `browse the source`_ and `report issues`_.
-
-.. _Google Cloud Client Library for Python:
- https://googlecloudplatform.github.io/google-cloud-python/
-.. _browse the source:
- https://github.com/GoogleCloudPlatform/google-cloud-python
-.. _report issues:
- https://github.com/GoogleCloudPlatform/google-cloud-python/issues
-
-{% endif %}
-
-.. _Google Cloud SDK: https://cloud.google.com/sdk/
\ No newline at end of file
diff --git a/scripts/readme-gen/templates/auth.tmpl.rst b/scripts/readme-gen/templates/auth.tmpl.rst
deleted file mode 100644
index 1446b94..0000000
--- a/scripts/readme-gen/templates/auth.tmpl.rst
+++ /dev/null
@@ -1,9 +0,0 @@
-Authentication
-++++++++++++++
-
-This sample requires you to have authentication setup. Refer to the
-`Authentication Getting Started Guide`_ for instructions on setting up
-credentials for applications.
-
-.. _Authentication Getting Started Guide:
- https://cloud.google.com/docs/authentication/getting-started
diff --git a/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/scripts/readme-gen/templates/auth_api_key.tmpl.rst
deleted file mode 100644
index 11957ce..0000000
--- a/scripts/readme-gen/templates/auth_api_key.tmpl.rst
+++ /dev/null
@@ -1,14 +0,0 @@
-Authentication
-++++++++++++++
-
-Authentication for this service is done via an `API Key`_. To obtain an API
-Key:
-
-1. Open the `Cloud Platform Console`_
-2. Make sure that billing is enabled for your project.
-3. From the **Credentials** page, create a new **API Key** or use an existing
- one for your project.
-
-.. _API Key:
- https://developers.google.com/api-client-library/python/guide/aaa_apikeys
-.. _Cloud Console: https://console.cloud.google.com/project?_
diff --git a/scripts/readme-gen/templates/install_deps.tmpl.rst b/scripts/readme-gen/templates/install_deps.tmpl.rst
deleted file mode 100644
index f21db80..0000000
--- a/scripts/readme-gen/templates/install_deps.tmpl.rst
+++ /dev/null
@@ -1,29 +0,0 @@
-Install Dependencies
-++++++++++++++++++++
-
-#. Clone python-docs-samples and change directory to the sample directory you want to use.
-
- .. code-block:: bash
-
- $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git
-
-#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions.
-
- .. _Python Development Environment Setup Guide:
- https://cloud.google.com/python/setup
-
-#. Create a virtualenv. Samples are compatible with Python 3.9+.
-
- .. code-block:: bash
-
- $ virtualenv env
- $ source env/bin/activate
-
-#. Install the dependencies needed to run the samples.
-
- .. code-block:: bash
-
- $ pip install -r requirements.txt
-
-.. _pip: https://pip.pypa.io/
-.. _virtualenv: https://virtualenv.pypa.io/
diff --git a/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/scripts/readme-gen/templates/install_portaudio.tmpl.rst
deleted file mode 100644
index 5ea33d1..0000000
--- a/scripts/readme-gen/templates/install_portaudio.tmpl.rst
+++ /dev/null
@@ -1,35 +0,0 @@
-Install PortAudio
-+++++++++++++++++
-
-Install `PortAudio`_. This is required by the `PyAudio`_ library to stream
-audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the
-platform.
-
-* For Mac OS X, you can use `Homebrew`_::
-
- brew install portaudio
-
- **Note**: if you encounter an error when running `pip install` that indicates
- it can't find `portaudio.h`, try running `pip install` with the following
- flags::
-
- pip install --global-option='build_ext' \
- --global-option='-I/usr/local/include' \
- --global-option='-L/usr/local/lib' \
- pyaudio
-
-* For Debian / Ubuntu Linux::
-
- apt-get install portaudio19-dev python-all-dev
-
-* Windows may work without having to install PortAudio explicitly (it will get
- installed with PyAudio).
-
-For more details, see the `PyAudio installation`_ page.
-
-
-.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/
-.. _PortAudio: http://www.portaudio.com/
-.. _PyAudio installation:
- https://people.csail.mit.edu/hubert/pyaudio/#downloads
-.. _Homebrew: http://brew.sh
diff --git a/setup.cfg b/setup.cfg
deleted file mode 100644
index e69de29..0000000
--- a/setup.cfg
+++ /dev/null
diff --git a/setup.py b/setup.py
deleted file mode 100644
index 168877f..0000000
--- a/setup.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright 2018 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import setuptools
-
-
-setuptools.setup()
diff --git a/testing/.gitignore b/testing/.gitignore
deleted file mode 100644
index b05fbd6..0000000
--- a/testing/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-test-env.sh
-service-account.json
-client-secrets.json
\ No newline at end of file
diff --git a/testing/constraints-3.10.txt b/testing/constraints-3.10.txt
deleted file mode 100644
index e69de29..0000000
--- a/testing/constraints-3.10.txt
+++ /dev/null
diff --git a/testing/constraints-3.11.txt b/testing/constraints-3.11.txt
deleted file mode 100644
index e69de29..0000000
--- a/testing/constraints-3.11.txt
+++ /dev/null
diff --git a/testing/constraints-3.12.txt b/testing/constraints-3.12.txt
deleted file mode 100644
index e69de29..0000000
--- a/testing/constraints-3.12.txt
+++ /dev/null
diff --git a/testing/constraints-3.13.txt b/testing/constraints-3.13.txt
deleted file mode 100644
index e69de29..0000000
--- a/testing/constraints-3.13.txt
+++ /dev/null
diff --git a/testing/constraints-3.14.txt b/testing/constraints-3.14.txt
deleted file mode 100644
index e69de29..0000000
--- a/testing/constraints-3.14.txt
+++ /dev/null
diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt
deleted file mode 100644
index 5429893..0000000
--- a/testing/constraints-3.9.txt
+++ /dev/null
@@ -1,14 +0,0 @@
-# This constraints file is used to check that lower bounds
-# are correct in pyproject.toml
-# List *all* library dependencies and extras in this file.
-# Pin the version to the lower bound.
-#
-# e.g., if pyproject.toml has "foo >= 1.14.0, < 2.0.0dev",
-# Then this file should have foo==1.14.0
-googleapis-common-protos==1.56.3
-protobuf==4.25.8
-google-auth==2.14.1
-requests==2.20.0
-grpcio==1.33.2
-grpcio-status==1.33.2
-proto-plus==1.22.3
diff --git a/testing/constraints-async-rest-3.9.txt b/testing/constraints-async-rest-3.9.txt
deleted file mode 100644
index e18d8ad..0000000
--- a/testing/constraints-async-rest-3.9.txt
+++ /dev/null
@@ -1,20 +0,0 @@
-# This constraints file is used to check that lower bounds
-# are correct in pyproject.toml
-# List *all* library dependencies and extras in this file.
-# Pin the version to the lower bound.
-#
-# e.g., if pyproject.toml has "foo >= 1.14.0, < 2.0.0dev",
-# Then this file should have foo==1.14.0
-googleapis-common-protos==1.56.3
-protobuf==4.25.8
-google-auth==2.35.0
-requests==2.20.0
-grpcio==1.33.2
-grpcio-status==1.33.2
-proto-plus==1.22.3
-# Some tests import aiohttp to test async rest
-# from google-auth[aiohttp]
-# google-auth[aiohttp] v2.39.0 is the first release that requires aiohttp 3.6.2
-# see - change: https://github.com/googleapis/google-auth-library-python/blame/9d5c0d81e8e69719a2b4cd034bf1ed5d128fdf0a/setup.py#L35
-# - release: https://github.com/googleapis/google-auth-library-python/commits/main/?after=9d5c0d81e8e69719a2b4cd034bf1ed5d128fdf0a+104
-aiohttp==3.6.2
diff --git a/tests/__init__.py b/tests/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tests/__init__.py
+++ /dev/null
diff --git a/tests/asyncio/__init__.py b/tests/asyncio/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tests/asyncio/__init__.py
+++ /dev/null
diff --git a/tests/asyncio/future/__init__.py b/tests/asyncio/future/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tests/asyncio/future/__init__.py
+++ /dev/null
diff --git a/tests/asyncio/future/test_async_future.py b/tests/asyncio/future/test_async_future.py
deleted file mode 100644
index 659f41c..0000000
--- a/tests/asyncio/future/test_async_future.py
+++ /dev/null
@@ -1,227 +0,0 @@
-# Copyright 2017, Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import asyncio
-from unittest import mock
-
-import pytest
-
-from google.api_core import exceptions
-from google.api_core.future import async_future
-
-
-class AsyncFuture(async_future.AsyncFuture):
- async def done(self):
- return False
-
- async def cancel(self):
- return True
-
- async def cancelled(self):
- return False
-
- async def running(self):
- return True
-
-
-@pytest.mark.asyncio
-async def test_polling_future_constructor():
- future = AsyncFuture()
- assert not await future.done()
- assert not await future.cancelled()
- assert await future.running()
- assert await future.cancel()
-
-
-@pytest.mark.asyncio
-async def test_set_result():
- future = AsyncFuture()
-
- future.set_result(1)
-
- assert await future.result() == 1
- callback_called = asyncio.Event()
-
- def callback(unused_future):
- callback_called.set()
-
- future.add_done_callback(callback)
- await callback_called.wait()
-
-
-@pytest.mark.asyncio
-async def test_set_exception():
- future = AsyncFuture()
- exception = ValueError("meep")
-
- future.set_exception(exception)
-
- assert await future.exception() == exception
- with pytest.raises(ValueError):
- await future.result()
-
- callback_called = asyncio.Event()
-
- def callback(unused_future):
- callback_called.set()
-
- future.add_done_callback(callback)
- await callback_called.wait()
-
-
-@pytest.mark.asyncio
-async def test_invoke_callback_exception():
- future = AsyncFuture()
- future.set_result(42)
-
- # This should not raise, despite the callback causing an exception.
- callback_called = asyncio.Event()
-
- def callback(unused_future):
- callback_called.set()
- raise ValueError()
-
- future.add_done_callback(callback)
- await callback_called.wait()
-
-
-class AsyncFutureWithPoll(AsyncFuture):
- def __init__(self):
- super().__init__()
- self.poll_count = 0
- self.event = asyncio.Event()
-
- async def done(self):
- self.poll_count += 1
- await self.event.wait()
- self.set_result(42)
- return True
-
-
-@pytest.mark.asyncio
-async def test_result_with_polling():
- future = AsyncFutureWithPoll()
-
- future.event.set()
- result = await future.result()
-
- assert result == 42
- assert future.poll_count == 1
- # Repeated calls should not cause additional polling
- assert await future.result() == result
- assert future.poll_count == 1
-
-
-class AsyncFutureTimeout(AsyncFutureWithPoll):
- async def done(self):
- await asyncio.sleep(0.2)
- return False
-
-
-@pytest.mark.asyncio
-async def test_result_timeout():
- future = AsyncFutureTimeout()
- with pytest.raises(asyncio.TimeoutError):
- await future.result(timeout=0.2)
-
-
-@pytest.mark.asyncio
-async def test_exception_timeout():
- future = AsyncFutureTimeout()
- with pytest.raises(asyncio.TimeoutError):
- await future.exception(timeout=0.2)
-
-
-@pytest.mark.asyncio
-async def test_result_timeout_with_retry():
- future = AsyncFutureTimeout()
- with pytest.raises(asyncio.TimeoutError):
- await future.exception(timeout=0.4)
-
-
-class AsyncFutureTransient(AsyncFutureWithPoll):
- def __init__(self, errors):
- super().__init__()
- self._errors = errors
-
- async def done(self):
- if self._errors:
- error, self._errors = self._errors[0], self._errors[1:]
- raise error("testing")
- self.poll_count += 1
- self.set_result(42)
- return True
-
-
-@mock.patch("asyncio.sleep", autospec=True)
-@pytest.mark.asyncio
-async def test_result_transient_error(unused_sleep):
- future = AsyncFutureTransient(
- (
- exceptions.TooManyRequests,
- exceptions.InternalServerError,
- exceptions.BadGateway,
- )
- )
- result = await future.result()
- assert result == 42
- assert future.poll_count == 1
- # Repeated calls should not cause additional polling
- assert await future.result() == result
- assert future.poll_count == 1
-
-
-@pytest.mark.asyncio
-async def test_callback_concurrency():
- future = AsyncFutureWithPoll()
-
- callback_called = asyncio.Event()
-
- def callback(unused_future):
- callback_called.set()
-
- future.add_done_callback(callback)
-
- # Give the thread a second to poll
- await asyncio.sleep(1)
- assert future.poll_count == 1
-
- future.event.set()
- await callback_called.wait()
-
-
-@pytest.mark.asyncio
-async def test_double_callback_concurrency():
- future = AsyncFutureWithPoll()
-
- callback_called = asyncio.Event()
-
- def callback(unused_future):
- callback_called.set()
-
- callback_called2 = asyncio.Event()
-
- def callback2(unused_future):
- callback_called2.set()
-
- future.add_done_callback(callback)
- future.add_done_callback(callback2)
-
- # Give the thread a second to poll
- await asyncio.sleep(1)
- future.event.set()
-
- assert future.poll_count == 1
- await callback_called.wait()
- await callback_called2.wait()
diff --git a/tests/asyncio/gapic/test_config_async.py b/tests/asyncio/gapic/test_config_async.py
deleted file mode 100644
index dbb05d5..0000000
--- a/tests/asyncio/gapic/test_config_async.py
+++ /dev/null
@@ -1,95 +0,0 @@
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-import pytest
-
-try:
- import grpc # noqa: F401
-except ImportError:
- pytest.skip("No GRPC", allow_module_level=True)
-
-from google.api_core import exceptions
-from google.api_core.gapic_v1 import config_async
-
-
-INTERFACE_CONFIG = {
- "retry_codes": {
- "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"],
- "other": ["FAILED_PRECONDITION"],
- "non_idempotent": [],
- },
- "retry_params": {
- "default": {
- "initial_retry_delay_millis": 1000,
- "retry_delay_multiplier": 2.5,
- "max_retry_delay_millis": 120000,
- "initial_rpc_timeout_millis": 120000,
- "rpc_timeout_multiplier": 1.0,
- "max_rpc_timeout_millis": 120000,
- "total_timeout_millis": 600000,
- },
- "other": {
- "initial_retry_delay_millis": 1000,
- "retry_delay_multiplier": 1,
- "max_retry_delay_millis": 1000,
- "initial_rpc_timeout_millis": 1000,
- "rpc_timeout_multiplier": 1,
- "max_rpc_timeout_millis": 1000,
- "total_timeout_millis": 1000,
- },
- },
- "methods": {
- "AnnotateVideo": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
- },
- "Other": {
- "timeout_millis": 60000,
- "retry_codes_name": "other",
- "retry_params_name": "other",
- },
- "Plain": {"timeout_millis": 30000},
- },
-}
-
-
-def test_create_method_configs():
- method_configs = config_async.parse_method_configs(INTERFACE_CONFIG)
-
- retry, timeout = method_configs["AnnotateVideo"]
- assert retry._predicate(exceptions.DeadlineExceeded(None))
- assert retry._predicate(exceptions.ServiceUnavailable(None))
- assert retry._initial == 1.0
- assert retry._multiplier == 2.5
- assert retry._maximum == 120.0
- assert retry._deadline == 600.0
- assert timeout._initial == 120.0
- assert timeout._multiplier == 1.0
- assert timeout._maximum == 120.0
-
- retry, timeout = method_configs["Other"]
- assert retry._predicate(exceptions.FailedPrecondition(None))
- assert retry._initial == 1.0
- assert retry._multiplier == 1.0
- assert retry._maximum == 1.0
- assert retry._deadline == 1.0
- assert timeout._initial == 1.0
- assert timeout._multiplier == 1.0
- assert timeout._maximum == 1.0
-
- retry, timeout = method_configs["Plain"]
- assert retry is None
- assert timeout._timeout == 30.0
diff --git a/tests/asyncio/gapic/test_method_async.py b/tests/asyncio/gapic/test_method_async.py
deleted file mode 100644
index 40dd168..0000000
--- a/tests/asyncio/gapic/test_method_async.py
+++ /dev/null
@@ -1,274 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import datetime
-
-try:
- from unittest import mock
- from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
-except ImportError: # pragma: NO COVER
- import mock # type: ignore
-import pytest
-
-try:
- from grpc import aio, Compression
-except ImportError:
- pytest.skip("No GRPC", allow_module_level=True)
-
-from google.api_core import exceptions
-from google.api_core import gapic_v1
-from google.api_core import grpc_helpers_async
-from google.api_core import retry_async
-from google.api_core import timeout
-
-
-def _utcnow_monotonic():
- current_time = datetime.datetime.min
- delta = datetime.timedelta(seconds=0.5)
- while True:
- yield current_time
- current_time += delta
-
-
-@pytest.mark.asyncio
-async def test_wrap_method_basic():
- fake_call = grpc_helpers_async.FakeUnaryUnaryCall(42)
- method = mock.Mock(spec=aio.UnaryUnaryMultiCallable, return_value=fake_call)
-
- wrapped_method = gapic_v1.method_async.wrap_method(method)
-
- result = await wrapped_method(1, 2, meep="moop")
-
- assert result == 42
- method.assert_called_once_with(1, 2, meep="moop", metadata=mock.ANY)
-
- # Check that the default client info was specified in the metadata.
- metadata = method.call_args[1]["metadata"]
- assert len(metadata) == 1
- client_info = gapic_v1.client_info.DEFAULT_CLIENT_INFO
- user_agent_metadata = client_info.to_grpc_metadata()
- assert user_agent_metadata in metadata
-
-
-@pytest.mark.asyncio
-async def test_wrap_method_with_no_client_info():
- fake_call = grpc_helpers_async.FakeUnaryUnaryCall()
- method = mock.Mock(spec=aio.UnaryUnaryMultiCallable, return_value=fake_call)
-
- wrapped_method = gapic_v1.method_async.wrap_method(method, client_info=None)
-
- await wrapped_method(1, 2, meep="moop")
-
- method.assert_called_once_with(1, 2, meep="moop")
-
-
-@pytest.mark.asyncio
-async def test_wrap_method_with_custom_client_info():
- client_info = gapic_v1.client_info.ClientInfo(
- python_version=1,
- grpc_version=2,
- api_core_version=3,
- gapic_version=4,
- client_library_version=5,
- protobuf_runtime_version=6,
- )
- fake_call = grpc_helpers_async.FakeUnaryUnaryCall()
- method = mock.Mock(spec=aio.UnaryUnaryMultiCallable, return_value=fake_call)
-
- wrapped_method = gapic_v1.method_async.wrap_method(method, client_info=client_info)
-
- await wrapped_method(1, 2, meep="moop")
-
- method.assert_called_once_with(1, 2, meep="moop", metadata=mock.ANY)
-
- # Check that the custom client info was specified in the metadata.
- metadata = method.call_args[1]["metadata"]
- assert client_info.to_grpc_metadata() in metadata
-
-
-@pytest.mark.asyncio
-async def test_wrap_method_with_no_compression():
- fake_call = grpc_helpers_async.FakeUnaryUnaryCall()
- method = mock.Mock(spec=aio.UnaryUnaryMultiCallable, return_value=fake_call)
-
- wrapped_method = gapic_v1.method_async.wrap_method(method)
-
- await wrapped_method(1, 2, meep="moop", compression=None)
-
- method.assert_called_once_with(1, 2, meep="moop", metadata=mock.ANY)
-
-
-@pytest.mark.asyncio
-async def test_wrap_method_with_custom_compression():
- compression = Compression.Gzip
- fake_call = grpc_helpers_async.FakeUnaryUnaryCall()
- method = mock.Mock(spec=aio.UnaryUnaryMultiCallable, return_value=fake_call)
-
- wrapped_method = gapic_v1.method_async.wrap_method(
- method, default_compression=compression
- )
-
- await wrapped_method(1, 2, meep="moop", compression=Compression.Deflate)
-
- method.assert_called_once_with(
- 1, 2, meep="moop", metadata=mock.ANY, compression=Compression.Deflate
- )
-
-
-@pytest.mark.asyncio
-async def test_invoke_wrapped_method_with_metadata():
- fake_call = grpc_helpers_async.FakeUnaryUnaryCall()
- method = mock.Mock(spec=aio.UnaryUnaryMultiCallable, return_value=fake_call)
-
- wrapped_method = gapic_v1.method_async.wrap_method(method)
-
- await wrapped_method(mock.sentinel.request, metadata=[("a", "b")])
-
- method.assert_called_once_with(mock.sentinel.request, metadata=mock.ANY)
- metadata = method.call_args[1]["metadata"]
- # Metadata should have two items: the client info metadata and our custom
- # metadata.
- assert len(metadata) == 2
- assert ("a", "b") in metadata
-
-
-@pytest.mark.asyncio
-async def test_invoke_wrapped_method_with_metadata_as_none():
- fake_call = grpc_helpers_async.FakeUnaryUnaryCall()
- method = mock.Mock(spec=aio.UnaryUnaryMultiCallable, return_value=fake_call)
-
- wrapped_method = gapic_v1.method_async.wrap_method(method)
-
- await wrapped_method(mock.sentinel.request, metadata=None)
-
- method.assert_called_once_with(mock.sentinel.request, metadata=mock.ANY)
- metadata = method.call_args[1]["metadata"]
- # Metadata should have just one items: the client info metadata.
- assert len(metadata) == 1
-
-
-@mock.patch("asyncio.sleep")
-@pytest.mark.asyncio
-async def test_wrap_method_with_default_retry_timeout_and_compression(unused_sleep):
- fake_call = grpc_helpers_async.FakeUnaryUnaryCall(42)
- method = mock.Mock(
- spec=aio.UnaryUnaryMultiCallable,
- side_effect=[exceptions.InternalServerError(None), fake_call],
- )
-
- default_retry = retry_async.AsyncRetry()
- default_timeout = timeout.ConstantTimeout(60)
- default_compression = Compression.Gzip
- wrapped_method = gapic_v1.method_async.wrap_method(
- method, default_retry, default_timeout, default_compression
- )
-
- result = await wrapped_method()
-
- assert result == 42
- assert method.call_count == 2
- method.assert_called_with(
- timeout=60, compression=default_compression, metadata=mock.ANY
- )
-
-
-@mock.patch("asyncio.sleep")
-@pytest.mark.asyncio
-async def test_wrap_method_with_default_retry_and_timeout_using_sentinel(unused_sleep):
- fake_call = grpc_helpers_async.FakeUnaryUnaryCall(42)
- method = mock.Mock(
- spec=aio.UnaryUnaryMultiCallable,
- side_effect=[exceptions.InternalServerError(None), fake_call],
- )
-
- default_retry = retry_async.AsyncRetry()
- default_timeout = timeout.ConstantTimeout(60)
- default_compression = Compression.Gzip
- wrapped_method = gapic_v1.method_async.wrap_method(
- method, default_retry, default_timeout, default_compression
- )
-
- result = await wrapped_method(
- retry=gapic_v1.method_async.DEFAULT,
- timeout=gapic_v1.method_async.DEFAULT,
- compression=gapic_v1.method_async.DEFAULT,
- )
-
- assert result == 42
- assert method.call_count == 2
- method.assert_called_with(
- timeout=60, compression=Compression.Gzip, metadata=mock.ANY
- )
-
-
-@mock.patch("asyncio.sleep")
-@pytest.mark.asyncio
-async def test_wrap_method_with_overriding_retry_timeout_and_compression(unused_sleep):
- fake_call = grpc_helpers_async.FakeUnaryUnaryCall(42)
- method = mock.Mock(
- spec=aio.UnaryUnaryMultiCallable,
- side_effect=[exceptions.NotFound(None), fake_call],
- )
-
- default_retry = retry_async.AsyncRetry()
- default_timeout = timeout.ConstantTimeout(60)
- default_compression = Compression.Gzip
- wrapped_method = gapic_v1.method_async.wrap_method(
- method, default_retry, default_timeout, default_compression
- )
-
- result = await wrapped_method(
- retry=retry_async.AsyncRetry(
- retry_async.if_exception_type(exceptions.NotFound)
- ),
- timeout=timeout.ConstantTimeout(22),
- compression=Compression.Deflate,
- )
-
- assert result == 42
- assert method.call_count == 2
- method.assert_called_with(
- timeout=22, compression=Compression.Deflate, metadata=mock.ANY
- )
-
-
-@pytest.mark.asyncio
-async def test_wrap_method_with_overriding_timeout_as_a_number():
- fake_call = grpc_helpers_async.FakeUnaryUnaryCall(42)
- method = mock.Mock(spec=aio.UnaryUnaryMultiCallable, return_value=fake_call)
- default_retry = retry_async.AsyncRetry()
- default_timeout = timeout.ConstantTimeout(60)
- wrapped_method = gapic_v1.method_async.wrap_method(
- method, default_retry, default_timeout
- )
-
- result = await wrapped_method(timeout=22)
-
- assert result == 42
-
- actual_timeout = method.call_args[1]["timeout"]
- metadata = method.call_args[1]["metadata"]
- assert metadata == mock.ANY
- assert actual_timeout == pytest.approx(22, abs=0.05)
-
-
-@pytest.mark.asyncio
-async def test_wrap_method_without_wrap_errors():
- fake_call = mock.AsyncMock()
-
- wrapped_method = gapic_v1.method_async.wrap_method(fake_call, kind="rest")
- with mock.patch("google.api_core.grpc_helpers_async.wrap_errors") as method:
- await wrapped_method()
-
- method.assert_not_called()
diff --git a/tests/asyncio/operations_v1/__init__.py b/tests/asyncio/operations_v1/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tests/asyncio/operations_v1/__init__.py
+++ /dev/null
diff --git a/tests/asyncio/operations_v1/test_operations_async_client.py b/tests/asyncio/operations_v1/test_operations_async_client.py
deleted file mode 100644
index e5b20dc..0000000
--- a/tests/asyncio/operations_v1/test_operations_async_client.py
+++ /dev/null
@@ -1,126 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from unittest import mock
-
-import pytest
-
-try:
- from grpc import aio, Compression
-except ImportError: # pragma: NO COVER
- pytest.skip("No GRPC", allow_module_level=True)
-
-from google.api_core import grpc_helpers_async
-from google.api_core import operations_v1
-from google.api_core import page_iterator_async
-from google.longrunning import operations_pb2
-from google.protobuf import empty_pb2
-
-
-def _mock_grpc_objects(response):
- fake_call = grpc_helpers_async.FakeUnaryUnaryCall(response)
- method = mock.Mock(spec=aio.UnaryUnaryMultiCallable, return_value=fake_call)
- mocked_channel = mock.Mock()
- mocked_channel.unary_unary = mock.Mock(return_value=method)
- return mocked_channel, method, fake_call
-
-
-@pytest.mark.asyncio
-async def test_get_operation():
- mocked_channel, method, fake_call = _mock_grpc_objects(
- operations_pb2.Operation(name="meep")
- )
- client = operations_v1.OperationsAsyncClient(mocked_channel)
-
- response = await client.get_operation(
- "name", metadata=[("header", "foo")], compression=Compression.Gzip
- )
- assert method.call_count == 1
- assert tuple(method.call_args_list[0])[0][0].name == "name"
- assert ("header", "foo") in tuple(method.call_args_list[0])[1]["metadata"]
- assert tuple(method.call_args_list[0])[1]["compression"] == Compression.Gzip
- assert ("x-goog-request-params", "name=name") in tuple(method.call_args_list[0])[1][
- "metadata"
- ]
- assert response == fake_call.response
-
-
-@pytest.mark.asyncio
-async def test_list_operations():
- operations = [
- operations_pb2.Operation(name="1"),
- operations_pb2.Operation(name="2"),
- ]
- list_response = operations_pb2.ListOperationsResponse(operations=operations)
-
- mocked_channel, method, fake_call = _mock_grpc_objects(list_response)
- client = operations_v1.OperationsAsyncClient(mocked_channel)
-
- pager = await client.list_operations(
- "name", "filter", metadata=[("header", "foo")], compression=Compression.Gzip
- )
-
- assert isinstance(pager, page_iterator_async.AsyncIterator)
- responses = []
- async for response in pager:
- responses.append(response)
-
- assert responses == operations
-
- assert method.call_count == 1
- assert ("header", "foo") in tuple(method.call_args_list[0])[1]["metadata"]
- assert tuple(method.call_args_list[0])[1]["compression"] == Compression.Gzip
- assert ("x-goog-request-params", "name=name") in tuple(method.call_args_list[0])[1][
- "metadata"
- ]
- request = tuple(method.call_args_list[0])[0][0]
- assert isinstance(request, operations_pb2.ListOperationsRequest)
- assert request.name == "name"
- assert request.filter == "filter"
-
-
-@pytest.mark.asyncio
-async def test_delete_operation():
- mocked_channel, method, fake_call = _mock_grpc_objects(empty_pb2.Empty())
- client = operations_v1.OperationsAsyncClient(mocked_channel)
-
- await client.delete_operation(
- "name", metadata=[("header", "foo")], compression=Compression.Gzip
- )
-
- assert method.call_count == 1
- assert tuple(method.call_args_list[0])[0][0].name == "name"
- assert ("header", "foo") in tuple(method.call_args_list[0])[1]["metadata"]
- assert tuple(method.call_args_list[0])[1]["compression"] == Compression.Gzip
- assert ("x-goog-request-params", "name=name") in tuple(method.call_args_list[0])[1][
- "metadata"
- ]
-
-
-@pytest.mark.asyncio
-async def test_cancel_operation():
- mocked_channel, method, fake_call = _mock_grpc_objects(empty_pb2.Empty())
- client = operations_v1.OperationsAsyncClient(mocked_channel)
-
- await client.cancel_operation(
- "name", metadata=[("header", "foo")], compression=Compression.Gzip
- )
-
- assert method.call_count == 1
- assert tuple(method.call_args_list[0])[0][0].name == "name"
- assert ("header", "foo") in tuple(method.call_args_list[0])[1]["metadata"]
- assert tuple(method.call_args_list[0])[1]["compression"] == Compression.Gzip
- assert ("x-goog-request-params", "name=name") in tuple(method.call_args_list[0])[1][
- "metadata"
- ]
diff --git a/tests/asyncio/retry/__init__.py b/tests/asyncio/retry/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tests/asyncio/retry/__init__.py
+++ /dev/null
diff --git a/tests/asyncio/retry/test_retry_streaming_async.py b/tests/asyncio/retry/test_retry_streaming_async.py
deleted file mode 100644
index e44f536..0000000
--- a/tests/asyncio/retry/test_retry_streaming_async.py
+++ /dev/null
@@ -1,601 +0,0 @@
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import asyncio
-import datetime
-import re
-
-try:
- from unittest import mock
- from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
-except ImportError: # pragma: NO COVER
- import mock # type: ignore
-
-import pytest
-
-from google.api_core import exceptions
-from google.api_core import retry_async
-from google.api_core.retry import retry_streaming_async
-
-from ...unit.retry.test_retry_base import Test_BaseRetry
-
-
-@pytest.mark.asyncio
-async def test_retry_streaming_target_bad_sleep_generator():
- from google.api_core.retry.retry_streaming_async import retry_target_stream
-
- with pytest.raises(ValueError, match="Sleep generator"):
- await retry_target_stream(None, lambda x: True, [], None).__anext__()
-
-
-@mock.patch("asyncio.sleep", autospec=True)
-@pytest.mark.asyncio
-async def test_retry_streaming_target_dynamic_backoff(sleep):
- """
- sleep_generator should be iterated after on_error, to support dynamic backoff
- """
- from functools import partial
- from google.api_core.retry.retry_streaming_async import retry_target_stream
-
- sleep.side_effect = RuntimeError("stop after sleep")
- # start with empty sleep generator; values are added after exception in push_sleep_value
- sleep_values = []
- error_target = partial(TestAsyncStreamingRetry._generator_mock, error_on=0)
- inserted_sleep = 99
-
- def push_sleep_value(err):
- sleep_values.append(inserted_sleep)
-
- with pytest.raises(RuntimeError):
- await retry_target_stream(
- error_target,
- predicate=lambda x: True,
- sleep_generator=sleep_values,
- on_error=push_sleep_value,
- ).__anext__()
- assert sleep.call_count == 1
- sleep.assert_called_once_with(inserted_sleep)
-
-
-class TestAsyncStreamingRetry(Test_BaseRetry):
- def _make_one(self, *args, **kwargs):
- return retry_streaming_async.AsyncStreamingRetry(*args, **kwargs)
-
- def test___str__(self):
- def if_exception_type(exc):
- return bool(exc) # pragma: NO COVER
-
- # Explicitly set all attributes as changed Retry defaults should not
- # cause this test to start failing.
- retry_ = retry_streaming_async.AsyncStreamingRetry(
- predicate=if_exception_type,
- initial=1.0,
- maximum=60.0,
- multiplier=2.0,
- timeout=120.0,
- on_error=None,
- )
- assert re.match(
- (
- r"<AsyncStreamingRetry predicate=<function.*?if_exception_type.*?>, "
- r"initial=1.0, maximum=60.0, multiplier=2.0, timeout=120.0, "
- r"on_error=None>"
- ),
- str(retry_),
- )
-
- @staticmethod
- async def _generator_mock(
- num=5,
- error_on=None,
- exceptions_seen=None,
- sleep_time=0,
- ):
- """
- Helper to create a mock generator that yields a number of values
- Generator can optionally raise an exception on a specific iteration
-
- Args:
- - num (int): the number of values to yield
- - error_on (int): if given, the generator will raise a ValueError on the specified iteration
- - exceptions_seen (list): if given, the generator will append any exceptions to this list before raising
- - sleep_time (int): if given, the generator will asyncio.sleep for this many seconds before yielding each value
- """
- try:
- for i in range(num):
- if sleep_time:
- await asyncio.sleep(sleep_time)
- if error_on is not None and i == error_on:
- raise ValueError("generator mock error")
- yield i
- except (Exception, BaseException, GeneratorExit) as e:
- # keep track of exceptions seen by generator
- if exceptions_seen is not None:
- exceptions_seen.append(e)
- raise
-
- @mock.patch("asyncio.sleep", autospec=True)
- @pytest.mark.asyncio
- async def test___call___generator_success(self, sleep):
- """
- Test that a retry-decorated generator yields values as expected
- This test checks a generator with no issues
- """
- from collections.abc import AsyncGenerator
-
- retry_ = retry_streaming_async.AsyncStreamingRetry()
- decorated = retry_(self._generator_mock)
-
- num = 10
- generator = await decorated(num)
- # check types
- assert isinstance(generator, AsyncGenerator)
- assert isinstance(self._generator_mock(num), AsyncGenerator)
- # check yield contents
- unpacked = [i async for i in generator]
- assert len(unpacked) == num
- expected = [i async for i in self._generator_mock(num)]
- for a, b in zip(unpacked, expected):
- assert a == b
- sleep.assert_not_called()
-
- @mock.patch("asyncio.sleep", autospec=True)
- @pytest.mark.asyncio
- async def test___call___generator_retry(self, sleep):
- """
- Tests that a retry-decorated generator will retry on errors
- """
- on_error = mock.Mock(return_value=None)
- retry_ = retry_streaming_async.AsyncStreamingRetry(
- on_error=on_error,
- predicate=retry_async.if_exception_type(ValueError),
- timeout=None,
- )
- generator = await retry_(self._generator_mock)(error_on=3)
- # error thrown on 3
- # generator should contain 0, 1, 2 looping
- unpacked = [await generator.__anext__() for i in range(10)]
- assert unpacked == [0, 1, 2, 0, 1, 2, 0, 1, 2, 0]
- assert on_error.call_count == 3
- await generator.aclose()
-
- @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n)
- @mock.patch("asyncio.sleep", autospec=True)
- @pytest.mark.parametrize("use_deadline_arg", [True, False])
- @pytest.mark.asyncio
- async def test___call___generator_retry_hitting_timeout(
- self, sleep, uniform, use_deadline_arg
- ):
- """
- Tests that a retry-decorated generator will throw a RetryError
- after using the time budget
- """
- import time
-
- timeout_val = 9.9
- # support "deadline" as an alias for "timeout"
- timeout_kwarg = (
- {"timeout": timeout_val}
- if not use_deadline_arg
- else {"deadline": timeout_val}
- )
-
- on_error = mock.Mock()
- retry_ = retry_streaming_async.AsyncStreamingRetry(
- predicate=retry_async.if_exception_type(ValueError),
- initial=1.0,
- maximum=1024.0,
- multiplier=2.0,
- **timeout_kwarg,
- )
-
- time_now = time.monotonic()
- now_patcher = mock.patch(
- "time.monotonic",
- return_value=time_now,
- )
-
- decorated = retry_(self._generator_mock, on_error=on_error)
- generator = await decorated(error_on=1)
-
- with now_patcher as patched_now:
- # Make sure that calls to fake asyncio.sleep() also advance the mocked
- # time clock.
- def increase_time(sleep_delay):
- patched_now.return_value += sleep_delay
-
- sleep.side_effect = increase_time
-
- with pytest.raises(exceptions.RetryError):
- [i async for i in generator]
-
- assert on_error.call_count == 4
- # check the delays
- assert sleep.call_count == 3 # once between each successive target calls
- last_wait = sleep.call_args.args[0]
- total_wait = sum(call_args.args[0] for call_args in sleep.call_args_list)
- # next wait would have put us over, so ended early
- assert last_wait == 4
- assert total_wait == 7
-
- @pytest.mark.asyncio
- async def test___call___generator_cancellations(self):
- """
- cancel calls should propagate to the generator
- """
- # test without cancel as retryable
- retry_ = retry_streaming_async.AsyncStreamingRetry()
- utcnow = datetime.datetime.now(datetime.timezone.utc)
- mock.patch("google.api_core.datetime_helpers.utcnow", return_value=utcnow)
- generator = await retry_(self._generator_mock)(sleep_time=0.2)
- assert await generator.__anext__() == 0
- task = asyncio.create_task(generator.__anext__())
- task.cancel()
- with pytest.raises(asyncio.CancelledError):
- await task
- with pytest.raises(StopAsyncIteration):
- await generator.__anext__()
-
- @mock.patch("asyncio.sleep", autospec=True)
- @pytest.mark.asyncio
- async def test___call___with_generator_send(self, sleep):
- """
- Send should be passed through retry into target generator
- """
-
- async def _mock_send_gen():
- """
- always yield whatever was sent in
- """
- in_ = yield
- while True:
- in_ = yield in_
-
- retry_ = retry_streaming_async.AsyncStreamingRetry()
-
- decorated = retry_(_mock_send_gen)
-
- generator = await decorated()
- result = await generator.__anext__()
- # first yield should be None
- assert result is None
- in_messages = ["test_1", "hello", "world"]
- out_messages = []
- for msg in in_messages:
- recv = await generator.asend(msg)
- out_messages.append(recv)
- assert in_messages == out_messages
- await generator.aclose()
-
- @mock.patch("asyncio.sleep", autospec=True)
- @pytest.mark.asyncio
- async def test___call___generator_send_retry(self, sleep):
- """
- Send should be retried if target generator raises an error
- """
- on_error = mock.Mock(return_value=None)
- retry_ = retry_streaming_async.AsyncStreamingRetry(
- on_error=on_error,
- predicate=retry_async.if_exception_type(ValueError),
- timeout=None,
- )
- generator = await retry_(self._generator_mock)(error_on=3)
- with pytest.raises(TypeError) as exc_info:
- await generator.asend("cannot send to fresh generator")
- assert exc_info.match("can't send non-None value")
- await generator.aclose()
-
- # error thrown on 3
- # generator should contain 0, 1, 2 looping
- generator = await retry_(self._generator_mock)(error_on=3)
- assert await generator.__anext__() == 0
- unpacked = [await generator.asend(i) for i in range(10)]
- assert unpacked == [1, 2, 0, 1, 2, 0, 1, 2, 0, 1]
- assert on_error.call_count == 3
- await generator.aclose()
-
- @mock.patch("asyncio.sleep", autospec=True)
- @pytest.mark.asyncio
- async def test___call___with_generator_close(self, sleep):
- """
- Close should be passed through retry into target generator
- """
- retry_ = retry_streaming_async.AsyncStreamingRetry()
- decorated = retry_(self._generator_mock)
- exception_list = []
- generator = await decorated(10, exceptions_seen=exception_list)
- for i in range(2):
- await generator.__anext__()
- await generator.aclose()
-
- assert isinstance(exception_list[0], GeneratorExit)
- with pytest.raises(StopAsyncIteration):
- # calling next on closed generator should raise error
- await generator.__anext__()
-
- @mock.patch("asyncio.sleep", autospec=True)
- @pytest.mark.asyncio
- async def test___call___with_new_generator_close(self, sleep):
- """
- Close should be passed through retry into target generator,
- even when it hasn't been iterated yet
- """
- retry_ = retry_streaming_async.AsyncStreamingRetry()
- decorated = retry_(self._generator_mock)
- exception_list = []
- generator = await decorated(10, exceptions_seen=exception_list)
- await generator.aclose()
-
- with pytest.raises(StopAsyncIteration):
- # calling next on closed generator should raise error
- await generator.__anext__()
-
- @mock.patch("asyncio.sleep", autospec=True)
- @pytest.mark.asyncio
- async def test___call___with_generator_throw(self, sleep):
- """
- Throw should be passed through retry into target generator
- """
-
- # The generator should not retry when it encounters a non-retryable error
- retry_ = retry_streaming_async.AsyncStreamingRetry(
- predicate=retry_async.if_exception_type(ValueError),
- )
- decorated = retry_(self._generator_mock)
- exception_list = []
- generator = await decorated(10, exceptions_seen=exception_list)
- for i in range(2):
- await generator.__anext__()
- with pytest.raises(BufferError):
- await generator.athrow(BufferError("test"))
- assert isinstance(exception_list[0], BufferError)
- with pytest.raises(StopAsyncIteration):
- # calling next on closed generator should raise error
- await generator.__anext__()
-
- # In contrast, the generator should retry if we throw a retryable exception
- exception_list = []
- generator = await decorated(10, exceptions_seen=exception_list)
- for i in range(2):
- await generator.__anext__()
- throw_val = await generator.athrow(ValueError("test"))
- assert throw_val == 0
- assert isinstance(exception_list[0], ValueError)
- # calling next on generator should not raise error, because it was retried
- assert await generator.__anext__() == 1
-
- @pytest.mark.parametrize("awaitable_wrapped", [True, False])
- @mock.patch("asyncio.sleep", autospec=True)
- @pytest.mark.asyncio
- async def test___call___with_iterable_send(self, sleep, awaitable_wrapped):
- """
- Send should work like next if the wrapped iterable does not support it
- """
- retry_ = retry_streaming_async.AsyncStreamingRetry()
-
- def iterable_fn():
- class CustomIterable:
- def __init__(self):
- self.i = -1
-
- def __aiter__(self):
- return self
-
- async def __anext__(self):
- self.i += 1
- return self.i
-
- return CustomIterable()
-
- if awaitable_wrapped:
-
- async def wrapper():
- return iterable_fn()
-
- decorated = retry_(wrapper)
- else:
- decorated = retry_(iterable_fn)
-
- retryable = await decorated()
- # initiate the generator by calling next
- result = await retryable.__anext__()
- assert result == 0
- # test sending values
- assert await retryable.asend("test") == 1
- assert await retryable.asend("test2") == 2
- assert await retryable.asend("test3") == 3
- await retryable.aclose()
-
- @pytest.mark.parametrize("awaitable_wrapped", [True, False])
- @mock.patch("asyncio.sleep", autospec=True)
- @pytest.mark.asyncio
- async def test___call___with_iterable_close(self, sleep, awaitable_wrapped):
- """
- close should be handled by wrapper if wrapped iterable does not support it
- """
- retry_ = retry_streaming_async.AsyncStreamingRetry()
-
- def iterable_fn():
- class CustomIterable:
- def __init__(self):
- self.i = -1
-
- def __aiter__(self):
- return self
-
- async def __anext__(self):
- self.i += 1
- return self.i
-
- return CustomIterable()
-
- if awaitable_wrapped:
-
- async def wrapper():
- return iterable_fn()
-
- decorated = retry_(wrapper)
- else:
- decorated = retry_(iterable_fn)
-
- # try closing active generator
- retryable = await decorated()
- assert await retryable.__anext__() == 0
- await retryable.aclose()
- with pytest.raises(StopAsyncIteration):
- await retryable.__anext__()
- # try closing new generator
- new_retryable = await decorated()
- await new_retryable.aclose()
- with pytest.raises(StopAsyncIteration):
- await new_retryable.__anext__()
-
- @pytest.mark.parametrize("awaitable_wrapped", [True, False])
- @mock.patch("asyncio.sleep", autospec=True)
- @pytest.mark.asyncio
- async def test___call___with_iterable_throw(self, sleep, awaitable_wrapped):
- """
- Throw should work even if the wrapped iterable does not support it
- """
-
- predicate = retry_async.if_exception_type(ValueError)
- retry_ = retry_streaming_async.AsyncStreamingRetry(predicate=predicate)
-
- def iterable_fn():
- class CustomIterable:
- def __init__(self):
- self.i = -1
-
- def __aiter__(self):
- return self
-
- async def __anext__(self):
- self.i += 1
- return self.i
-
- return CustomIterable()
-
- if awaitable_wrapped:
-
- async def wrapper():
- return iterable_fn()
-
- decorated = retry_(wrapper)
- else:
- decorated = retry_(iterable_fn)
-
- # try throwing with active generator
- retryable = await decorated()
- assert await retryable.__anext__() == 0
- # should swallow errors in predicate
- await retryable.athrow(ValueError("test"))
- # should raise errors not in predicate
- with pytest.raises(BufferError):
- await retryable.athrow(BufferError("test"))
- with pytest.raises(StopAsyncIteration):
- await retryable.__anext__()
- # try throwing with new generator
- new_retryable = await decorated()
- with pytest.raises(BufferError):
- await new_retryable.athrow(BufferError("test"))
- with pytest.raises(StopAsyncIteration):
- await new_retryable.__anext__()
-
- @pytest.mark.asyncio
- async def test_exc_factory_non_retryable_error(self):
- """
- generator should give the option to override exception creation logic
- test when non-retryable error is thrown
- """
- from google.api_core.retry import RetryFailureReason
- from google.api_core.retry.retry_streaming_async import retry_target_stream
-
- timeout = 6
- sent_errors = [ValueError("test"), ValueError("test2"), BufferError("test3")]
- expected_final_err = RuntimeError("done")
- expected_source_err = ZeroDivisionError("test4")
-
- def factory(*args, **kwargs):
- assert len(kwargs) == 0
- assert args[0] == sent_errors
- assert args[1] == RetryFailureReason.NON_RETRYABLE_ERROR
- assert args[2] == timeout
- return expected_final_err, expected_source_err
-
- generator = retry_target_stream(
- self._generator_mock,
- retry_async.if_exception_type(ValueError),
- [0] * 3,
- timeout=timeout,
- exception_factory=factory,
- )
- # initialize the generator
- await generator.__anext__()
- # trigger some retryable errors
- await generator.athrow(sent_errors[0])
- await generator.athrow(sent_errors[1])
- # trigger a non-retryable error
- with pytest.raises(expected_final_err.__class__) as exc_info:
- await generator.athrow(sent_errors[2])
- assert exc_info.value == expected_final_err
- assert exc_info.value.__cause__ == expected_source_err
-
- @pytest.mark.asyncio
- async def test_exc_factory_timeout(self):
- """
- generator should give the option to override exception creation logic
- test when timeout is exceeded
- """
- import time
- from google.api_core.retry import RetryFailureReason
- from google.api_core.retry.retry_streaming_async import retry_target_stream
-
- timeout = 2
- time_now = time.monotonic()
- now_patcher = mock.patch(
- "time.monotonic",
- return_value=time_now,
- )
-
- with now_patcher as patched_now:
- timeout = 2
- sent_errors = [ValueError("test"), ValueError("test2"), ValueError("test3")]
- expected_final_err = RuntimeError("done")
- expected_source_err = ZeroDivisionError("test4")
-
- def factory(*args, **kwargs):
- assert len(kwargs) == 0
- assert args[0] == sent_errors
- assert args[1] == RetryFailureReason.TIMEOUT
- assert args[2] == timeout
- return expected_final_err, expected_source_err
-
- generator = retry_target_stream(
- self._generator_mock,
- retry_async.if_exception_type(ValueError),
- [0] * 3,
- timeout=timeout,
- exception_factory=factory,
- )
- # initialize the generator
- await generator.__anext__()
- # trigger some retryable errors
- await generator.athrow(sent_errors[0])
- await generator.athrow(sent_errors[1])
- # trigger a timeout
- patched_now.return_value += timeout + 1
- with pytest.raises(expected_final_err.__class__) as exc_info:
- await generator.athrow(sent_errors[2])
- assert exc_info.value == expected_final_err
- assert exc_info.value.__cause__ == expected_source_err
diff --git a/tests/asyncio/retry/test_retry_unary_async.py b/tests/asyncio/retry/test_retry_unary_async.py
deleted file mode 100644
index e7fdc96..0000000
--- a/tests/asyncio/retry/test_retry_unary_async.py
+++ /dev/null
@@ -1,342 +0,0 @@
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import datetime
-import re
-
-try:
- from unittest import mock
- from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
-except ImportError: # pragma: NO COVER
- import mock # type: ignore
-import pytest
-
-from google.api_core import exceptions
-from google.api_core import retry_async
-
-from ...unit.retry.test_retry_base import Test_BaseRetry
-
-
-@mock.patch("asyncio.sleep", autospec=True)
-@mock.patch(
- "google.api_core.datetime_helpers.utcnow",
- return_value=datetime.datetime.min,
- autospec=True,
-)
-@pytest.mark.asyncio
-async def test_retry_target_success(utcnow, sleep):
- predicate = retry_async.if_exception_type(ValueError)
- call_count = [0]
-
- async def target():
- call_count[0] += 1
- if call_count[0] < 3:
- raise ValueError()
- return 42
-
- result = await retry_async.retry_target(target, predicate, range(10), None)
-
- assert result == 42
- assert call_count[0] == 3
- sleep.assert_has_calls([mock.call(0), mock.call(1)])
-
-
-@mock.patch("asyncio.sleep", autospec=True)
-@mock.patch(
- "google.api_core.datetime_helpers.utcnow",
- return_value=datetime.datetime.min,
- autospec=True,
-)
-@pytest.mark.asyncio
-async def test_retry_target_w_on_error(utcnow, sleep):
- predicate = retry_async.if_exception_type(ValueError)
- call_count = {"target": 0}
- to_raise = ValueError()
-
- async def target():
- call_count["target"] += 1
- if call_count["target"] < 3:
- raise to_raise
- return 42
-
- on_error = mock.Mock()
-
- result = await retry_async.retry_target(
- target, predicate, range(10), None, on_error=on_error
- )
-
- assert result == 42
- assert call_count["target"] == 3
-
- on_error.assert_has_calls([mock.call(to_raise), mock.call(to_raise)])
- sleep.assert_has_calls([mock.call(0), mock.call(1)])
-
-
-@mock.patch("asyncio.sleep", autospec=True)
-@mock.patch(
- "google.api_core.datetime_helpers.utcnow",
- return_value=datetime.datetime.min,
- autospec=True,
-)
-@pytest.mark.asyncio
-async def test_retry_target_non_retryable_error(utcnow, sleep):
- predicate = retry_async.if_exception_type(ValueError)
- exception = TypeError()
- target = mock.Mock(side_effect=exception)
-
- with pytest.raises(TypeError) as exc_info:
- await retry_async.retry_target(target, predicate, range(10), None)
-
- assert exc_info.value == exception
- sleep.assert_not_called()
-
-
-@mock.patch("asyncio.sleep", autospec=True)
-@mock.patch("time.monotonic", autospec=True)
-@pytest.mark.parametrize("use_deadline_arg", [True, False])
-@pytest.mark.asyncio
-async def test_retry_target_timeout_exceeded(monotonic, sleep, use_deadline_arg):
- predicate = retry_async.if_exception_type(ValueError)
- exception = ValueError("meep")
- target = mock.Mock(side_effect=exception)
- # Setup the timeline so that the first call takes 5 seconds but the second
- # call takes 6, which puts the retry over the timeout.
- monotonic.side_effect = [0, 5, 11]
-
- timeout_val = 10
- # support "deadline" as an alias for "timeout"
- timeout_kwarg = (
- {"timeout": timeout_val} if not use_deadline_arg else {"deadline": timeout_val}
- )
-
- with pytest.raises(exceptions.RetryError) as exc_info:
- await retry_async.retry_target(target, predicate, range(10), **timeout_kwarg)
-
- assert exc_info.value.cause == exception
- assert exc_info.match("Timeout of 10.0s exceeded")
- assert exc_info.match("last exception: meep")
- assert target.call_count == 2
-
- # Ensure the exception message does not include the target fn:
- # it may be a partial with user data embedded
- assert str(target) not in exc_info.exconly()
-
-
-@pytest.mark.asyncio
-async def test_retry_target_bad_sleep_generator():
- with pytest.raises(ValueError, match="Sleep generator"):
- await retry_async.retry_target(mock.sentinel.target, lambda x: True, [], None)
-
-
-@mock.patch("asyncio.sleep", autospec=True)
-@pytest.mark.asyncio
-async def test_retry_target_dynamic_backoff(sleep):
- """
- sleep_generator should be iterated after on_error, to support dynamic backoff
- """
- sleep.side_effect = RuntimeError("stop after sleep")
- # start with empty sleep generator; values are added after exception in push_sleep_value
- sleep_values = []
- exception = ValueError("trigger retry")
- error_target = mock.Mock(side_effect=exception)
- inserted_sleep = 99
-
- def push_sleep_value(err):
- sleep_values.append(inserted_sleep)
-
- with pytest.raises(RuntimeError):
- await retry_async.retry_target(
- error_target,
- predicate=lambda x: True,
- sleep_generator=sleep_values,
- on_error=push_sleep_value,
- )
- assert sleep.call_count == 1
- sleep.assert_called_once_with(inserted_sleep)
-
-
-class TestAsyncRetry(Test_BaseRetry):
- def _make_one(self, *args, **kwargs):
- return retry_async.AsyncRetry(*args, **kwargs)
-
- def test___str__(self):
- def if_exception_type(exc):
- return bool(exc) # pragma: NO COVER
-
- # Explicitly set all attributes as changed Retry defaults should not
- # cause this test to start failing.
- retry_ = retry_async.AsyncRetry(
- predicate=if_exception_type,
- initial=1.0,
- maximum=60.0,
- multiplier=2.0,
- timeout=120.0,
- on_error=None,
- )
- assert re.match(
- (
- r"<AsyncRetry predicate=<function.*?if_exception_type.*?>, "
- r"initial=1.0, maximum=60.0, multiplier=2.0, timeout=120.0, "
- r"on_error=None>"
- ),
- str(retry_),
- )
-
- @mock.patch("asyncio.sleep", autospec=True)
- @pytest.mark.asyncio
- async def test___call___and_execute_success(self, sleep):
- retry_ = retry_async.AsyncRetry()
- target = mock.AsyncMock(spec=["__call__"], return_value=42)
- # __name__ is needed by functools.partial.
- target.__name__ = "target"
-
- decorated = retry_(target)
- target.assert_not_called()
-
- result = await decorated("meep")
-
- assert result == 42
- target.assert_called_once_with("meep")
- sleep.assert_not_called()
-
- @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n)
- @mock.patch("asyncio.sleep", autospec=True)
- @pytest.mark.asyncio
- async def test___call___and_execute_retry(self, sleep, uniform):
- on_error = mock.Mock(spec=["__call__"], side_effect=[None])
- retry_ = retry_async.AsyncRetry(
- predicate=retry_async.if_exception_type(ValueError)
- )
-
- target = mock.AsyncMock(spec=["__call__"], side_effect=[ValueError(), 42])
- # __name__ is needed by functools.partial.
- target.__name__ = "target"
-
- decorated = retry_(target, on_error=on_error)
- target.assert_not_called()
-
- result = await decorated("meep")
-
- assert result == 42
- assert target.call_count == 2
- target.assert_has_calls([mock.call("meep"), mock.call("meep")])
- sleep.assert_called_once_with(retry_._initial)
- assert on_error.call_count == 1
-
- @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n)
- @mock.patch("asyncio.sleep", autospec=True)
- @pytest.mark.asyncio
- async def test___call___and_execute_retry_hitting_timeout(self, sleep, uniform):
- on_error = mock.Mock(spec=["__call__"], side_effect=[None] * 10)
- retry_ = retry_async.AsyncRetry(
- predicate=retry_async.if_exception_type(ValueError),
- initial=1.0,
- maximum=1024.0,
- multiplier=2.0,
- timeout=30.9,
- )
-
- monotonic_patcher = mock.patch("time.monotonic", return_value=0)
-
- target = mock.AsyncMock(spec=["__call__"], side_effect=[ValueError()] * 10)
- # __name__ is needed by functools.partial.
- target.__name__ = "target"
-
- decorated = retry_(target, on_error=on_error)
- target.assert_not_called()
-
- with monotonic_patcher as patched_monotonic:
- # Make sure that calls to fake asyncio.sleep() also advance the mocked
- # time clock.
- def increase_time(sleep_delay):
- patched_monotonic.return_value += sleep_delay
-
- sleep.side_effect = increase_time
-
- with pytest.raises(exceptions.RetryError):
- await decorated("meep")
-
- assert target.call_count == 5
- target.assert_has_calls([mock.call("meep")] * 5)
- assert on_error.call_count == 5
-
- # check the delays
- assert sleep.call_count == 4 # once between each successive target calls
- last_wait = sleep.call_args.args[0]
- total_wait = sum(call_args.args[0] for call_args in sleep.call_args_list)
-
- assert last_wait == 8.0
- # Next attempt would be scheduled in 16 secs, 15 + 16 = 31 > 30.9, thus
- # we do not even wait for it to be scheduled (30.9 is configured timeout).
- # This changes the previous logic of shortening the last attempt to fit
- # in the timeout. The previous logic was removed to make Python retry
- # logic consistent with the other languages and to not disrupt the
- # randomized retry delays distribution by artificially increasing a
- # probability of scheduling two (instead of one) last attempts with very
- # short delay between them, while the second retry having very low chance
- # of succeeding anyways.
- assert total_wait == 15.0
-
- @mock.patch("asyncio.sleep", autospec=True)
- @pytest.mark.asyncio
- async def test___init___without_retry_executed(self, sleep):
- _some_function = mock.Mock()
-
- retry_ = retry_async.AsyncRetry(
- predicate=retry_async.if_exception_type(ValueError), on_error=_some_function
- )
- # check the proper creation of the class
- assert retry_._on_error is _some_function
-
- target = mock.AsyncMock(spec=["__call__"], side_effect=[42])
- # __name__ is needed by functools.partial.
- target.__name__ = "target"
-
- wrapped = retry_(target)
-
- result = await wrapped("meep")
-
- assert result == 42
- target.assert_called_once_with("meep")
- sleep.assert_not_called()
- _some_function.assert_not_called()
-
- @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n)
- @mock.patch("asyncio.sleep", autospec=True)
- @pytest.mark.asyncio
- async def test___init___when_retry_is_executed(self, sleep, uniform):
- _some_function = mock.Mock()
-
- retry_ = retry_async.AsyncRetry(
- predicate=retry_async.if_exception_type(ValueError), on_error=_some_function
- )
- # check the proper creation of the class
- assert retry_._on_error is _some_function
-
- target = mock.AsyncMock(
- spec=["__call__"], side_effect=[ValueError(), ValueError(), 42]
- )
- # __name__ is needed by functools.partial.
- target.__name__ = "target"
-
- wrapped = retry_(target)
- target.assert_not_called()
-
- result = await wrapped("meep")
-
- assert result == 42
- assert target.call_count == 3
- assert _some_function.call_count == 2
- target.assert_has_calls([mock.call("meep"), mock.call("meep")])
- sleep.assert_any_call(retry_._initial)
diff --git a/tests/asyncio/test_bidi_async.py b/tests/asyncio/test_bidi_async.py
deleted file mode 100644
index add685a..0000000
--- a/tests/asyncio/test_bidi_async.py
+++ /dev/null
@@ -1,320 +0,0 @@
-# Copyright 2025, Google LLC All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import sys
-import asyncio
-
-from unittest import mock
-
-try:
- from unittest.mock import AsyncMock
-except ImportError: # pragma: NO COVER
- from mock import AsyncMock # type: ignore
-
-
-import pytest
-
-try:
- from grpc import aio
-except ImportError: # pragma: NO COVER
- pytest.skip("No GRPC", allow_module_level=True)
-
-from google.api_core import bidi_async
-from google.api_core import exceptions
-
-# TODO: remove this when droppping support for "Python 3.10" and below.
-if sys.version_info < (3, 10): # type: ignore[operator]
-
- def aiter(obj):
- return obj.__aiter__()
-
- async def anext(obj):
- return await obj.__anext__()
-
-
-@pytest.mark.asyncio
-class Test_AsyncRequestQueueGenerator:
- async def test_bounded_consume(self):
- call = mock.create_autospec(aio.Call, instance=True)
- call.done.return_value = False
-
- q = asyncio.Queue()
- await q.put(mock.sentinel.A)
- await q.put(mock.sentinel.B)
-
- generator = bidi_async._AsyncRequestQueueGenerator(q)
- generator.call = call
-
- items = []
- gen_aiter = aiter(generator)
-
- items.append(await anext(gen_aiter))
- items.append(await anext(gen_aiter))
-
- # At this point, the queue is empty. The next call to anext will sleep.
- # We make the call inactive.
- call.done.return_value = True
-
- with pytest.raises(asyncio.TimeoutError):
- await asyncio.wait_for(anext(gen_aiter), timeout=0.01)
-
- assert items == [mock.sentinel.A, mock.sentinel.B]
-
- async def test_yield_initial_and_exit(self):
- q = asyncio.Queue()
- call = mock.create_autospec(aio.Call, instance=True)
- call.done.return_value = True
-
- generator = bidi_async._AsyncRequestQueueGenerator(
- q, initial_request=mock.sentinel.A
- )
- generator.call = call
-
- assert await anext(aiter(generator)) == mock.sentinel.A
-
- async def test_yield_initial_callable_and_exit(self):
- q = asyncio.Queue()
- call = mock.create_autospec(aio.Call, instance=True)
- call.done.return_value = True
-
- generator = bidi_async._AsyncRequestQueueGenerator(
- q, initial_request=lambda: mock.sentinel.A
- )
- generator.call = call
-
- assert await anext(aiter(generator)) == mock.sentinel.A
-
- async def test_exit_when_inactive_with_item(self):
- q = asyncio.Queue()
- await q.put(mock.sentinel.A)
-
- call = mock.create_autospec(aio.Call, instance=True)
- call.done.return_value = True
-
- generator = bidi_async._AsyncRequestQueueGenerator(q)
- generator.call = call
-
- with pytest.raises(
- StopAsyncIteration,
- ):
- assert await anext(aiter(generator))
-
- # Make sure it put the item back.
- assert not q.empty()
- assert await q.get() == mock.sentinel.A
-
- async def test_exit_when_inactive_empty(self):
- q = asyncio.Queue()
- call = mock.create_autospec(aio.Call, instance=True)
- call.done.return_value = True
-
- generator = bidi_async._AsyncRequestQueueGenerator(q)
- generator.call = call
-
- with pytest.raises(asyncio.TimeoutError):
- await asyncio.wait_for(anext(aiter(generator)), timeout=0.01)
-
- async def test_exit_with_stop(self):
- q = asyncio.Queue()
- await q.put(None)
- call = mock.create_autospec(aio.Call, instance=True)
- call.done.return_value = False
-
- generator = bidi_async._AsyncRequestQueueGenerator(q)
- generator.call = call
-
- with pytest.raises(StopAsyncIteration):
- assert await anext(aiter(generator))
-
-
-def make_async_rpc():
- """Makes a mock async RPC used to test Bidi classes."""
- call = mock.create_autospec(aio.StreamStreamCall, instance=True)
- rpc = AsyncMock()
-
- def rpc_side_effect(request, metadata=None):
- call.done.return_value = False
- return call
-
- rpc.side_effect = rpc_side_effect
-
- def cancel_side_effect():
- call.done.return_value = True
- return True
-
- call.cancel.side_effect = cancel_side_effect
- call.read = AsyncMock()
-
- return rpc, call
-
-
-class AsyncClosedCall:
- def __init__(self, exception):
- self.exception = exception
-
- def done(self):
- return True
-
- async def read(self):
- raise self.exception
-
-
-class TestAsyncBidiRpc:
- def test_initial_state(self):
- bidi_rpc = bidi_async.AsyncBidiRpc(None)
- assert bidi_rpc.is_active is False
-
- def test_done_callbacks(self):
- bidi_rpc = bidi_async.AsyncBidiRpc(None)
- callback = mock.Mock(spec=["__call__"])
-
- bidi_rpc.add_done_callback(callback)
- bidi_rpc._on_call_done(mock.sentinel.future)
-
- callback.assert_called_once_with(mock.sentinel.future)
-
- @pytest.mark.asyncio
- @pytest.mark.skipif(
- sys.version_info < (3, 8), # type: ignore[operator]
- reason="Versions of Python below 3.8 don't provide support for assert_awaited_once",
- )
- async def test_metadata(self):
- rpc, call = make_async_rpc()
- bidi_rpc = bidi_async.AsyncBidiRpc(rpc, metadata=mock.sentinel.A)
- assert bidi_rpc._rpc_metadata == mock.sentinel.A
-
- await bidi_rpc.open()
- assert bidi_rpc.call == call
- rpc.assert_awaited_once()
- assert rpc.call_args.kwargs["metadata"] == mock.sentinel.A
-
- @pytest.mark.asyncio
- async def test_open(self):
- rpc, call = make_async_rpc()
- bidi_rpc = bidi_async.AsyncBidiRpc(rpc)
-
- await bidi_rpc.open()
-
- assert bidi_rpc.call == call
- assert bidi_rpc.is_active
- call.add_done_callback.assert_called_once_with(bidi_rpc._on_call_done)
-
- @pytest.mark.asyncio
- async def test_open_error_already_open(self):
- rpc, _ = make_async_rpc()
- bidi_rpc = bidi_async.AsyncBidiRpc(rpc)
-
- await bidi_rpc.open()
-
- with pytest.raises(ValueError):
- await bidi_rpc.open()
-
- @pytest.mark.asyncio
- async def test_open_error_call_error(self):
- rpc, _ = make_async_rpc()
- expected_exception = exceptions.GoogleAPICallError(
- "test", response=mock.sentinel.response
- )
- rpc.side_effect = expected_exception
- bidi_rpc = bidi_async.AsyncBidiRpc(rpc)
- callback = mock.Mock(spec=["__call__"])
- bidi_rpc.add_done_callback(callback)
-
- with pytest.raises(exceptions.GoogleAPICallError) as exc_info:
- await bidi_rpc.open()
-
- assert exc_info.value == expected_exception
- callback.assert_called_once_with(mock.sentinel.response)
-
- @pytest.mark.asyncio
- async def test_close(self):
- rpc, call = make_async_rpc()
- bidi_rpc = bidi_async.AsyncBidiRpc(rpc)
- await bidi_rpc.open()
-
- await bidi_rpc.close()
-
- call.cancel.assert_called_once()
- assert bidi_rpc.call is call
- assert bidi_rpc.is_active is False
- # ensure the request queue was signaled to stop.
- assert bidi_rpc.pending_requests == 1
- assert await bidi_rpc._request_queue.get() is None
- # ensure request and callbacks are cleaned up
- assert bidi_rpc._initial_request is None
- assert not bidi_rpc._callbacks
-
- @pytest.mark.asyncio
- async def test_close_with_no_rpc(self):
- bidi_rpc = bidi_async.AsyncBidiRpc(None)
-
- await bidi_rpc.close()
-
- assert bidi_rpc.call is None
- assert bidi_rpc.is_active is False
- # ensure the request queue was signaled to stop.
- assert bidi_rpc.pending_requests == 1
- assert await bidi_rpc._request_queue.get() is None
- # ensure request and callbacks are cleaned up
- assert bidi_rpc._initial_request is None
- assert not bidi_rpc._callbacks
-
- @pytest.mark.asyncio
- async def test_close_no_rpc(self):
- bidi_rpc = bidi_async.AsyncBidiRpc(None)
- await bidi_rpc.close()
-
- @pytest.mark.asyncio
- async def test_send(self):
- rpc, call = make_async_rpc()
- bidi_rpc = bidi_async.AsyncBidiRpc(rpc)
- await bidi_rpc.open()
-
- await bidi_rpc.send(mock.sentinel.request)
-
- assert bidi_rpc.pending_requests == 1
- assert await bidi_rpc._request_queue.get() is mock.sentinel.request
-
- @pytest.mark.asyncio
- async def test_send_not_open(self):
- bidi_rpc = bidi_async.AsyncBidiRpc(None)
-
- with pytest.raises(ValueError):
- await bidi_rpc.send(mock.sentinel.request)
-
- @pytest.mark.asyncio
- async def test_send_dead_rpc(self):
- error = ValueError()
- bidi_rpc = bidi_async.AsyncBidiRpc(None)
- bidi_rpc.call = AsyncClosedCall(error)
-
- with pytest.raises(ValueError):
- await bidi_rpc.send(mock.sentinel.request)
-
- @pytest.mark.asyncio
- async def test_recv(self):
- bidi_rpc = bidi_async.AsyncBidiRpc(None)
- bidi_rpc.call = mock.create_autospec(aio.Call, instance=True)
- bidi_rpc.call.read = AsyncMock(return_value=mock.sentinel.response)
-
- response = await bidi_rpc.recv()
-
- assert response == mock.sentinel.response
-
- @pytest.mark.asyncio
- async def test_recv_not_open(self):
- bidi_rpc = bidi_async.AsyncBidiRpc(None)
-
- with pytest.raises(ValueError):
- await bidi_rpc.recv()
diff --git a/tests/asyncio/test_grpc_helpers_async.py b/tests/asyncio/test_grpc_helpers_async.py
deleted file mode 100644
index 43700f2..0000000
--- a/tests/asyncio/test_grpc_helpers_async.py
+++ /dev/null
@@ -1,738 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-try:
- from unittest import mock
- from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
-except ImportError: # pragma: NO COVER
- import mock # type: ignore
-from ..helpers import warn_deprecated_credentials_file
-import pytest # noqa: I202
-
-try:
- import grpc
- from grpc import aio
-except ImportError: # pragma: NO COVER
- grpc = aio = None
-
-
-if grpc is None: # pragma: NO COVER
- pytest.skip("No GRPC", allow_module_level=True)
-
-
-from google.api_core import exceptions
-from google.api_core import grpc_helpers_async
-import google.auth.credentials
-
-
-class RpcErrorImpl(grpc.RpcError, grpc.Call):
- def __init__(self, code):
- super(RpcErrorImpl, self).__init__()
- self._code = code
-
- def code(self):
- return self._code
-
- def details(self):
- return None
-
- def trailing_metadata(self):
- return None
-
-
-@pytest.mark.asyncio
-async def test_wrap_unary_errors():
- grpc_error = RpcErrorImpl(grpc.StatusCode.INVALID_ARGUMENT)
- callable_ = mock.AsyncMock(spec=["__call__"], side_effect=grpc_error)
-
- wrapped_callable = grpc_helpers_async._wrap_unary_errors(callable_)
-
- with pytest.raises(exceptions.InvalidArgument) as exc_info:
- await wrapped_callable(1, 2, three="four")
-
- callable_.assert_called_once_with(1, 2, three="four")
- assert exc_info.value.response == grpc_error
-
-
-@pytest.mark.asyncio
-async def test_common_methods_in_wrapped_call():
- mock_call = mock.Mock(aio.UnaryUnaryCall, autospec=True)
- wrapped_call = grpc_helpers_async._WrappedUnaryUnaryCall().with_call(mock_call)
-
- await wrapped_call.initial_metadata()
- assert mock_call.initial_metadata.call_count == 1
-
- await wrapped_call.trailing_metadata()
- assert mock_call.trailing_metadata.call_count == 1
-
- await wrapped_call.code()
- assert mock_call.code.call_count == 1
-
- await wrapped_call.details()
- assert mock_call.details.call_count == 1
-
- wrapped_call.cancelled()
- assert mock_call.cancelled.call_count == 1
-
- wrapped_call.done()
- assert mock_call.done.call_count == 1
-
- wrapped_call.time_remaining()
- assert mock_call.time_remaining.call_count == 1
-
- wrapped_call.cancel()
- assert mock_call.cancel.call_count == 1
-
- callback = mock.sentinel.callback
- wrapped_call.add_done_callback(callback)
- mock_call.add_done_callback.assert_called_once_with(callback)
-
- await wrapped_call.wait_for_connection()
- assert mock_call.wait_for_connection.call_count == 1
-
-
-@pytest.mark.asyncio
-@pytest.mark.parametrize(
- "callable_type,expected_wrapper_type",
- [
- (grpc.aio.UnaryStreamMultiCallable, grpc_helpers_async._WrappedUnaryStreamCall),
- (grpc.aio.StreamUnaryMultiCallable, grpc_helpers_async._WrappedStreamUnaryCall),
- (
- grpc.aio.StreamStreamMultiCallable,
- grpc_helpers_async._WrappedStreamStreamCall,
- ),
- ],
-)
-async def test_wrap_errors_w_stream_type(callable_type, expected_wrapper_type):
- class ConcreteMulticallable(callable_type):
- def __call__(self, *args, **kwargs):
- raise NotImplementedError("Should not be called")
-
- with mock.patch.object(
- grpc_helpers_async, "_wrap_stream_errors"
- ) as wrap_stream_errors:
- callable_ = ConcreteMulticallable()
- grpc_helpers_async.wrap_errors(callable_)
- assert wrap_stream_errors.call_count == 1
- wrap_stream_errors.assert_called_once_with(callable_, expected_wrapper_type)
-
-
-@pytest.mark.asyncio
-async def test_wrap_stream_errors_unary_stream():
- mock_call = mock.Mock(aio.UnaryStreamCall, autospec=True)
- multicallable = mock.Mock(return_value=mock_call)
-
- wrapped_callable = grpc_helpers_async._wrap_stream_errors(
- multicallable, grpc_helpers_async._WrappedUnaryStreamCall
- )
-
- await wrapped_callable(1, 2, three="four")
- multicallable.assert_called_once_with(1, 2, three="four")
- assert mock_call.wait_for_connection.call_count == 1
-
-
-@pytest.mark.asyncio
-async def test_wrap_stream_errors_stream_unary():
- mock_call = mock.Mock(aio.StreamUnaryCall, autospec=True)
- multicallable = mock.Mock(return_value=mock_call)
-
- wrapped_callable = grpc_helpers_async._wrap_stream_errors(
- multicallable, grpc_helpers_async._WrappedStreamUnaryCall
- )
-
- await wrapped_callable(1, 2, three="four")
- multicallable.assert_called_once_with(1, 2, three="four")
- assert mock_call.wait_for_connection.call_count == 1
-
-
-@pytest.mark.asyncio
-async def test_wrap_stream_errors_stream_stream():
- mock_call = mock.Mock(aio.StreamStreamCall, autospec=True)
- multicallable = mock.Mock(return_value=mock_call)
-
- wrapped_callable = grpc_helpers_async._wrap_stream_errors(
- multicallable, grpc_helpers_async._WrappedStreamStreamCall
- )
-
- await wrapped_callable(1, 2, three="four")
- multicallable.assert_called_once_with(1, 2, three="four")
- assert mock_call.wait_for_connection.call_count == 1
-
-
-@pytest.mark.asyncio
-async def test_wrap_stream_errors_raised():
- grpc_error = RpcErrorImpl(grpc.StatusCode.INVALID_ARGUMENT)
- mock_call = mock.Mock(aio.StreamStreamCall, autospec=True)
- mock_call.wait_for_connection = mock.AsyncMock(side_effect=[grpc_error])
- multicallable = mock.Mock(return_value=mock_call)
-
- wrapped_callable = grpc_helpers_async._wrap_stream_errors(
- multicallable, grpc_helpers_async._WrappedStreamStreamCall
- )
-
- with pytest.raises(exceptions.InvalidArgument):
- await wrapped_callable()
- assert mock_call.wait_for_connection.call_count == 1
-
-
-@pytest.mark.asyncio
-async def test_wrap_stream_errors_read():
- grpc_error = RpcErrorImpl(grpc.StatusCode.INVALID_ARGUMENT)
-
- mock_call = mock.Mock(aio.StreamStreamCall, autospec=True)
- mock_call.read = mock.AsyncMock(side_effect=grpc_error)
- multicallable = mock.Mock(return_value=mock_call)
-
- wrapped_callable = grpc_helpers_async._wrap_stream_errors(
- multicallable, grpc_helpers_async._WrappedStreamStreamCall
- )
-
- wrapped_call = await wrapped_callable(1, 2, three="four")
- multicallable.assert_called_once_with(1, 2, three="four")
- assert mock_call.wait_for_connection.call_count == 1
-
- with pytest.raises(exceptions.InvalidArgument) as exc_info:
- await wrapped_call.read()
- assert exc_info.value.response == grpc_error
-
-
-@pytest.mark.asyncio
-async def test_wrap_stream_errors_aiter():
- grpc_error = RpcErrorImpl(grpc.StatusCode.INVALID_ARGUMENT)
-
- mock_call = mock.Mock(aio.StreamStreamCall, autospec=True)
- mocked_aiter = mock.Mock(spec=["__anext__"])
- mocked_aiter.__anext__ = mock.AsyncMock(
- side_effect=[mock.sentinel.response, grpc_error]
- )
- mock_call.__aiter__ = mock.Mock(return_value=mocked_aiter)
- multicallable = mock.Mock(return_value=mock_call)
-
- wrapped_callable = grpc_helpers_async._wrap_stream_errors(
- multicallable, grpc_helpers_async._WrappedStreamStreamCall
- )
- wrapped_call = await wrapped_callable()
-
- with pytest.raises(exceptions.InvalidArgument) as exc_info:
- async for response in wrapped_call:
- assert response == mock.sentinel.response
- assert exc_info.value.response == grpc_error
-
-
-@pytest.mark.asyncio
-async def test_wrap_stream_errors_aiter_non_rpc_error():
- non_grpc_error = TypeError("Not a gRPC error")
-
- mock_call = mock.Mock(aio.StreamStreamCall, autospec=True)
- mocked_aiter = mock.Mock(spec=["__anext__"])
- mocked_aiter.__anext__ = mock.AsyncMock(
- side_effect=[mock.sentinel.response, non_grpc_error]
- )
- mock_call.__aiter__ = mock.Mock(return_value=mocked_aiter)
- multicallable = mock.Mock(return_value=mock_call)
-
- wrapped_callable = grpc_helpers_async._wrap_stream_errors(
- multicallable, grpc_helpers_async._WrappedStreamStreamCall
- )
- wrapped_call = await wrapped_callable()
-
- with pytest.raises(TypeError) as exc_info:
- async for response in wrapped_call:
- assert response == mock.sentinel.response
- assert exc_info.value == non_grpc_error
-
-
-@pytest.mark.asyncio
-async def test_wrap_stream_errors_aiter_called_multiple_times():
- mock_call = mock.Mock(aio.StreamStreamCall, autospec=True)
- multicallable = mock.Mock(return_value=mock_call)
-
- wrapped_callable = grpc_helpers_async._wrap_stream_errors(
- multicallable, grpc_helpers_async._WrappedStreamStreamCall
- )
- wrapped_call = await wrapped_callable()
-
- assert wrapped_call.__aiter__() == wrapped_call.__aiter__()
-
-
-@pytest.mark.asyncio
-async def test_wrap_stream_errors_write():
- grpc_error = RpcErrorImpl(grpc.StatusCode.INVALID_ARGUMENT)
-
- mock_call = mock.Mock(aio.StreamStreamCall, autospec=True)
- mock_call.write = mock.AsyncMock(side_effect=[None, grpc_error])
- mock_call.done_writing = mock.AsyncMock(side_effect=[None, grpc_error])
- multicallable = mock.Mock(return_value=mock_call)
-
- wrapped_callable = grpc_helpers_async._wrap_stream_errors(
- multicallable, grpc_helpers_async._WrappedStreamStreamCall
- )
-
- wrapped_call = await wrapped_callable()
-
- await wrapped_call.write(mock.sentinel.request)
- with pytest.raises(exceptions.InvalidArgument) as exc_info:
- await wrapped_call.write(mock.sentinel.request)
- assert mock_call.write.call_count == 2
- assert exc_info.value.response == grpc_error
-
- await wrapped_call.done_writing()
- with pytest.raises(exceptions.InvalidArgument) as exc_info:
- await wrapped_call.done_writing()
- assert mock_call.done_writing.call_count == 2
- assert exc_info.value.response == grpc_error
-
-
-@mock.patch("google.api_core.grpc_helpers_async._wrap_unary_errors")
-def test_wrap_errors_non_streaming(wrap_unary_errors):
- callable_ = mock.create_autospec(aio.UnaryUnaryMultiCallable)
-
- result = grpc_helpers_async.wrap_errors(callable_)
-
- assert result == wrap_unary_errors.return_value
- wrap_unary_errors.assert_called_once_with(callable_)
-
-
-def test_grpc_async_stream():
- """
- GrpcAsyncStream type should be both an AsyncIterator and a grpc.aio.Call.
- """
- instance = grpc_helpers_async.GrpcAsyncStream[int]()
- assert isinstance(instance, grpc.aio.Call)
- # should implement __aiter__ and __anext__
- assert hasattr(instance, "__aiter__")
- it = instance.__aiter__()
- assert hasattr(it, "__anext__")
-
-
-def test_awaitable_grpc_call():
- """
- AwaitableGrpcCall type should be an Awaitable and a grpc.aio.Call.
- """
- instance = grpc_helpers_async.AwaitableGrpcCall()
- assert isinstance(instance, grpc.aio.Call)
- # should implement __await__
- assert hasattr(instance, "__await__")
-
-
-@mock.patch("google.api_core.grpc_helpers_async._wrap_stream_errors")
-def test_wrap_errors_streaming(wrap_stream_errors):
- callable_ = mock.create_autospec(aio.UnaryStreamMultiCallable)
-
- result = grpc_helpers_async.wrap_errors(callable_)
-
- assert result == wrap_stream_errors.return_value
- wrap_stream_errors.assert_called_once_with(
- callable_, grpc_helpers_async._WrappedUnaryStreamCall
- )
-
-
-@pytest.mark.parametrize(
- "attempt_direct_path,target,expected_target",
- [
- (None, "example.com:443", "example.com:443"),
- (False, "example.com:443", "example.com:443"),
- (True, "example.com:443", "google-c2p:///example.com"),
- (True, "dns:///example.com", "google-c2p:///example.com"),
- (True, "another-c2p:///example.com", "another-c2p:///example.com"),
- ],
-)
-@mock.patch("grpc.compute_engine_channel_credentials")
-@mock.patch(
- "google.auth.default",
- autospec=True,
- return_value=(mock.sentinel.credentials, mock.sentinel.project),
-)
-@mock.patch("grpc.aio.secure_channel")
-def test_create_channel_implicit(
- grpc_secure_channel,
- google_auth_default,
- composite_creds_call,
- attempt_direct_path,
- target,
- expected_target,
-):
- composite_creds = composite_creds_call.return_value
-
- channel = grpc_helpers_async.create_channel(
- target, attempt_direct_path=attempt_direct_path
- )
-
- assert channel is grpc_secure_channel.return_value
-
- google_auth_default.assert_called_once_with(scopes=None, default_scopes=None)
- grpc_secure_channel.assert_called_once_with(
- expected_target, composite_creds, compression=None
- )
-
-
-@pytest.mark.parametrize(
- "attempt_direct_path,target, expected_target",
- [
- (None, "example.com:443", "example.com:443"),
- (False, "example.com:443", "example.com:443"),
- (True, "example.com:443", "google-c2p:///example.com"),
- (True, "dns:///example.com", "google-c2p:///example.com"),
- (True, "another-c2p:///example.com", "another-c2p:///example.com"),
- ],
-)
-@mock.patch("google.auth.transport.grpc.AuthMetadataPlugin", autospec=True)
-@mock.patch(
- "google.auth.transport.requests.Request",
- autospec=True,
- return_value=mock.sentinel.Request,
-)
-@mock.patch("grpc.compute_engine_channel_credentials")
-@mock.patch(
- "google.auth.default",
- autospec=True,
- return_value=(mock.sentinel.credentials, mock.sentinel.project),
-)
-@mock.patch("grpc.aio.secure_channel")
-def test_create_channel_implicit_with_default_host(
- grpc_secure_channel,
- google_auth_default,
- composite_creds_call,
- request,
- auth_metadata_plugin,
- attempt_direct_path,
- target,
- expected_target,
-):
- default_host = "example.com"
- composite_creds = composite_creds_call.return_value
-
- channel = grpc_helpers_async.create_channel(
- target, default_host=default_host, attempt_direct_path=attempt_direct_path
- )
-
- assert channel is grpc_secure_channel.return_value
-
- google_auth_default.assert_called_once_with(scopes=None, default_scopes=None)
- auth_metadata_plugin.assert_called_once_with(
- mock.sentinel.credentials, mock.sentinel.Request, default_host=default_host
- )
- grpc_secure_channel.assert_called_once_with(
- expected_target, composite_creds, compression=None
- )
-
-
-@pytest.mark.parametrize(
- "attempt_direct_path",
- [
- None,
- False,
- ],
-)
-@mock.patch("grpc.composite_channel_credentials")
-@mock.patch(
- "google.auth.default",
- return_value=(mock.sentinel.credentials, mock.sentinel.project),
-)
-@mock.patch("grpc.aio.secure_channel")
-def test_create_channel_implicit_with_ssl_creds(
- grpc_secure_channel, default, composite_creds_call, attempt_direct_path
-):
- target = "example.com:443"
-
- ssl_creds = grpc.ssl_channel_credentials()
-
- grpc_helpers_async.create_channel(
- target, ssl_credentials=ssl_creds, attempt_direct_path=attempt_direct_path
- )
-
- default.assert_called_once_with(scopes=None, default_scopes=None)
- composite_creds_call.assert_called_once_with(ssl_creds, mock.ANY)
- composite_creds = composite_creds_call.return_value
- grpc_secure_channel.assert_called_once_with(
- target, composite_creds, compression=None
- )
-
-
-def test_create_channel_implicit_with_ssl_creds_attempt_direct_path_true():
- target = "example.com:443"
- ssl_creds = grpc.ssl_channel_credentials()
- with pytest.raises(
- ValueError, match="Using ssl_credentials with Direct Path is not supported"
- ):
- grpc_helpers_async.create_channel(
- target, ssl_credentials=ssl_creds, attempt_direct_path=True
- )
-
-
-@mock.patch("grpc.compute_engine_channel_credentials")
-@mock.patch(
- "google.auth.default",
- autospec=True,
- return_value=(mock.sentinel.credentials, mock.sentinel.project),
-)
-@mock.patch("grpc.aio.secure_channel")
-def test_create_channel_implicit_with_scopes(
- grpc_secure_channel, default, composite_creds_call
-):
- target = "example.com:443"
- composite_creds = composite_creds_call.return_value
-
- channel = grpc_helpers_async.create_channel(target, scopes=["one", "two"])
-
- assert channel is grpc_secure_channel.return_value
-
- default.assert_called_once_with(scopes=["one", "two"], default_scopes=None)
- grpc_secure_channel.assert_called_once_with(
- target, composite_creds, compression=None
- )
-
-
-@mock.patch("grpc.compute_engine_channel_credentials")
-@mock.patch(
- "google.auth.default",
- autospec=True,
- return_value=(mock.sentinel.credentials, mock.sentinel.project),
-)
-@mock.patch("grpc.aio.secure_channel")
-def test_create_channel_implicit_with_default_scopes(
- grpc_secure_channel, default, composite_creds_call
-):
- target = "example.com:443"
- composite_creds = composite_creds_call.return_value
-
- channel = grpc_helpers_async.create_channel(
- target, default_scopes=["three", "four"], compression=grpc.Compression.Gzip
- )
-
- assert channel is grpc_secure_channel.return_value
-
- default.assert_called_once_with(scopes=None, default_scopes=["three", "four"])
- grpc_secure_channel.assert_called_once_with(
- target, composite_creds, compression=grpc.Compression.Gzip
- )
-
-
-def test_create_channel_explicit_with_duplicate_credentials():
- target = "example:443"
-
- with pytest.raises(exceptions.DuplicateCredentialArgs) as excinfo:
- with warn_deprecated_credentials_file():
- grpc_helpers_async.create_channel(
- target,
- credentials_file="credentials.json",
- credentials=mock.sentinel.credentials,
- )
-
- assert "mutually exclusive" in str(excinfo.value)
-
-
-@mock.patch("grpc.compute_engine_channel_credentials")
-@mock.patch("google.auth.credentials.with_scopes_if_required", autospec=True)
-@mock.patch("grpc.aio.secure_channel")
-def test_create_channel_explicit(grpc_secure_channel, auth_creds, composite_creds_call):
- target = "example.com:443"
- composite_creds = composite_creds_call.return_value
-
- channel = grpc_helpers_async.create_channel(
- target, credentials=mock.sentinel.credentials, compression=grpc.Compression.Gzip
- )
-
- auth_creds.assert_called_once_with(
- mock.sentinel.credentials, scopes=None, default_scopes=None
- )
- assert channel is grpc_secure_channel.return_value
- grpc_secure_channel.assert_called_once_with(
- target, composite_creds, compression=grpc.Compression.Gzip
- )
-
-
-@mock.patch("grpc.compute_engine_channel_credentials")
-@mock.patch("grpc.aio.secure_channel")
-def test_create_channel_explicit_scoped(grpc_secure_channel, composite_creds_call):
- target = "example.com:443"
- scopes = ["1", "2"]
- composite_creds = composite_creds_call.return_value
-
- credentials = mock.create_autospec(google.auth.credentials.Scoped, instance=True)
- credentials.requires_scopes = True
-
- channel = grpc_helpers_async.create_channel(
- target,
- credentials=credentials,
- scopes=scopes,
- compression=grpc.Compression.Gzip,
- )
-
- credentials.with_scopes.assert_called_once_with(scopes, default_scopes=None)
- assert channel is grpc_secure_channel.return_value
- grpc_secure_channel.assert_called_once_with(
- target, composite_creds, compression=grpc.Compression.Gzip
- )
-
-
-@mock.patch("grpc.compute_engine_channel_credentials")
-@mock.patch("grpc.aio.secure_channel")
-def test_create_channel_explicit_default_scopes(
- grpc_secure_channel, composite_creds_call
-):
- target = "example.com:443"
- default_scopes = ["3", "4"]
- composite_creds = composite_creds_call.return_value
-
- credentials = mock.create_autospec(google.auth.credentials.Scoped, instance=True)
- credentials.requires_scopes = True
-
- channel = grpc_helpers_async.create_channel(
- target,
- credentials=credentials,
- default_scopes=default_scopes,
- compression=grpc.Compression.Gzip,
- )
-
- credentials.with_scopes.assert_called_once_with(
- scopes=None, default_scopes=default_scopes
- )
- assert channel is grpc_secure_channel.return_value
- grpc_secure_channel.assert_called_once_with(
- target, composite_creds, compression=grpc.Compression.Gzip
- )
-
-
-@mock.patch("grpc.compute_engine_channel_credentials")
-@mock.patch("grpc.aio.secure_channel")
-def test_create_channel_explicit_with_quota_project(
- grpc_secure_channel, composite_creds_call
-):
- target = "example.com:443"
- composite_creds = composite_creds_call.return_value
-
- credentials = mock.create_autospec(
- google.auth.credentials.CredentialsWithQuotaProject, instance=True
- )
-
- channel = grpc_helpers_async.create_channel(
- target, credentials=credentials, quota_project_id="project-foo"
- )
-
- credentials.with_quota_project.assert_called_once_with("project-foo")
- assert channel is grpc_secure_channel.return_value
- grpc_secure_channel.assert_called_once_with(
- target, composite_creds, compression=None
- )
-
-
-@mock.patch("grpc.compute_engine_channel_credentials")
-@mock.patch("grpc.aio.secure_channel")
-@mock.patch(
- "google.auth.load_credentials_from_file",
- autospec=True,
- return_value=(mock.sentinel.credentials, mock.sentinel.project),
-)
-def test_create_channel_with_credentials_file(
- load_credentials_from_file, grpc_secure_channel, composite_creds_call
-):
- target = "example.com:443"
-
- credentials_file = "/path/to/credentials/file.json"
- composite_creds = composite_creds_call.return_value
-
- with warn_deprecated_credentials_file():
- channel = grpc_helpers_async.create_channel(
- target, credentials_file=credentials_file
- )
-
- google.auth.load_credentials_from_file.assert_called_once_with(
- credentials_file, scopes=None, default_scopes=None
- )
- assert channel is grpc_secure_channel.return_value
- grpc_secure_channel.assert_called_once_with(
- target, composite_creds, compression=None
- )
-
-
-@mock.patch("grpc.compute_engine_channel_credentials")
-@mock.patch("grpc.aio.secure_channel")
-@mock.patch(
- "google.auth.load_credentials_from_file",
- autospec=True,
- return_value=(mock.sentinel.credentials, mock.sentinel.project),
-)
-def test_create_channel_with_credentials_file_and_scopes(
- load_credentials_from_file, grpc_secure_channel, composite_creds_call
-):
- target = "example.com:443"
- scopes = ["1", "2"]
-
- credentials_file = "/path/to/credentials/file.json"
- composite_creds = composite_creds_call.return_value
-
- with warn_deprecated_credentials_file():
- channel = grpc_helpers_async.create_channel(
- target, credentials_file=credentials_file, scopes=scopes
- )
-
- google.auth.load_credentials_from_file.assert_called_once_with(
- credentials_file, scopes=scopes, default_scopes=None
- )
- assert channel is grpc_secure_channel.return_value
- grpc_secure_channel.assert_called_once_with(
- target, composite_creds, compression=None
- )
-
-
-@mock.patch("grpc.compute_engine_channel_credentials")
-@mock.patch("grpc.aio.secure_channel")
-@mock.patch(
- "google.auth.load_credentials_from_file",
- autospec=True,
- return_value=(mock.sentinel.credentials, mock.sentinel.project),
-)
-def test_create_channel_with_credentials_file_and_default_scopes(
- load_credentials_from_file, grpc_secure_channel, composite_creds_call
-):
- target = "example.com:443"
- default_scopes = ["3", "4"]
-
- credentials_file = "/path/to/credentials/file.json"
- composite_creds = composite_creds_call.return_value
-
- with warn_deprecated_credentials_file():
- channel = grpc_helpers_async.create_channel(
- target, credentials_file=credentials_file, default_scopes=default_scopes
- )
-
- google.auth.load_credentials_from_file.assert_called_once_with(
- credentials_file, scopes=None, default_scopes=default_scopes
- )
- assert channel is grpc_secure_channel.return_value
- grpc_secure_channel.assert_called_once_with(
- target, composite_creds, compression=None
- )
-
-
-@mock.patch("grpc.aio.secure_channel")
-def test_create_channel(grpc_secure_channel):
- target = "example.com:443"
- scopes = ["test_scope"]
-
- credentials = mock.create_autospec(google.auth.credentials.Scoped, instance=True)
- credentials.requires_scopes = True
-
- grpc_helpers_async.create_channel(target, credentials=credentials, scopes=scopes)
- grpc_secure_channel.assert_called()
- credentials.with_scopes.assert_called_once_with(scopes, default_scopes=None)
-
-
-@pytest.mark.asyncio
-async def test_fake_stream_unary_call():
- fake_call = grpc_helpers_async.FakeStreamUnaryCall()
- await fake_call.wait_for_connection()
- response = await fake_call
- assert fake_call.response == response
diff --git a/tests/asyncio/test_operation_async.py b/tests/asyncio/test_operation_async.py
deleted file mode 100644
index 22a4bd6..0000000
--- a/tests/asyncio/test_operation_async.py
+++ /dev/null
@@ -1,208 +0,0 @@
-# Copyright 2017, Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-import pytest
-
-try:
- from unittest import mock
- from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
-except ImportError: # pragma: NO COVER
- import mock # type: ignore
-
-try:
- import grpc # noqa: F401
-except ImportError: # pragma: NO COVER
- pytest.skip("No GRPC", allow_module_level=True)
-
-from google.api_core import exceptions
-from google.api_core import operation_async
-from google.api_core import operations_v1
-from google.api_core import retry_async
-from google.longrunning import operations_pb2
-from google.protobuf import struct_pb2
-from google.rpc import code_pb2
-from google.rpc import status_pb2
-
-TEST_OPERATION_NAME = "test/operation"
-
-
-def make_operation_proto(
- name=TEST_OPERATION_NAME, metadata=None, response=None, error=None, **kwargs
-):
- operation_proto = operations_pb2.Operation(name=name, **kwargs)
-
- if metadata is not None:
- operation_proto.metadata.Pack(metadata)
-
- if response is not None:
- operation_proto.response.Pack(response)
-
- if error is not None:
- operation_proto.error.CopyFrom(error)
-
- return operation_proto
-
-
-def make_operation_future(client_operations_responses=None):
- if client_operations_responses is None:
- client_operations_responses = [make_operation_proto()]
-
- refresh = mock.AsyncMock(spec=["__call__"], side_effect=client_operations_responses)
- refresh.responses = client_operations_responses
- cancel = mock.AsyncMock(spec=["__call__"])
- operation_future = operation_async.AsyncOperation(
- client_operations_responses[0],
- refresh,
- cancel,
- result_type=struct_pb2.Struct,
- metadata_type=struct_pb2.Struct,
- )
-
- return operation_future, refresh, cancel
-
-
-@pytest.mark.asyncio
-async def test_constructor():
- future, refresh, _ = make_operation_future()
-
- assert future.operation == refresh.responses[0]
- assert future.operation.done is False
- assert future.operation.name == TEST_OPERATION_NAME
- assert future.metadata is None
- assert await future.running()
-
-
-@pytest.mark.asyncio
-async def test_metadata():
- expected_metadata = struct_pb2.Struct()
- future, _, _ = make_operation_future(
- [make_operation_proto(metadata=expected_metadata)]
- )
-
- assert future.metadata == expected_metadata
-
-
-@pytest.mark.asyncio
-async def test_cancellation():
- responses = [
- make_operation_proto(),
- # Second response indicates that the operation was cancelled.
- make_operation_proto(
- done=True, error=status_pb2.Status(code=code_pb2.CANCELLED)
- ),
- ]
- future, _, cancel = make_operation_future(responses)
-
- assert await future.cancel()
- assert await future.cancelled()
- cancel.assert_called_once_with()
-
- # Cancelling twice should have no effect.
- assert not await future.cancel()
- cancel.assert_called_once_with()
-
-
-@pytest.mark.asyncio
-async def test_result():
- expected_result = struct_pb2.Struct()
- responses = [
- make_operation_proto(),
- # Second operation response includes the result.
- make_operation_proto(done=True, response=expected_result),
- ]
- future, _, _ = make_operation_future(responses)
-
- result = await future.result()
-
- assert result == expected_result
- assert await future.done()
-
-
-@pytest.mark.asyncio
-async def test_done_w_retry():
- RETRY_PREDICATE = retry_async.if_exception_type(exceptions.TooManyRequests)
- test_retry = retry_async.AsyncRetry(predicate=RETRY_PREDICATE)
-
- expected_result = struct_pb2.Struct()
- responses = [
- make_operation_proto(),
- # Second operation response includes the result.
- make_operation_proto(done=True, response=expected_result),
- ]
- future, refresh, _ = make_operation_future(responses)
-
- await future.done(retry=test_retry)
- refresh.assert_called_once_with(retry=test_retry)
-
-
-@pytest.mark.asyncio
-async def test_exception():
- expected_exception = status_pb2.Status(message="meep")
- responses = [
- make_operation_proto(),
- # Second operation response includes the error.
- make_operation_proto(done=True, error=expected_exception),
- ]
- future, _, _ = make_operation_future(responses)
-
- exception = await future.exception()
-
- assert expected_exception.message in "{!r}".format(exception)
-
-
-@mock.patch("asyncio.sleep", autospec=True)
-@pytest.mark.asyncio
-async def test_unexpected_result(unused_sleep):
- responses = [
- make_operation_proto(),
- # Second operation response is done, but has not error or response.
- make_operation_proto(done=True),
- ]
- future, _, _ = make_operation_future(responses)
-
- exception = await future.exception()
-
- assert "Unexpected state" in "{!r}".format(exception)
-
-
-@pytest.mark.asyncio
-async def test_from_gapic():
- operation_proto = make_operation_proto(done=True)
- operations_client = mock.create_autospec(
- operations_v1.OperationsAsyncClient, instance=True
- )
-
- future = operation_async.from_gapic(
- operation_proto,
- operations_client,
- struct_pb2.Struct,
- metadata_type=struct_pb2.Struct,
- grpc_metadata=[("x-goog-request-params", "foo")],
- )
-
- assert future._result_type == struct_pb2.Struct
- assert future._metadata_type == struct_pb2.Struct
- assert future.operation.name == TEST_OPERATION_NAME
- assert future.done
- assert future._refresh.keywords["metadata"] == [("x-goog-request-params", "foo")]
- assert future._cancel.keywords["metadata"] == [("x-goog-request-params", "foo")]
-
-
-def test_deserialize():
- op = make_operation_proto(name="foobarbaz")
- serialized = op.SerializeToString()
- deserialized_op = operation_async.AsyncOperation.deserialize(serialized)
- assert op.name == deserialized_op.name
- assert type(op) is type(deserialized_op)
diff --git a/tests/asyncio/test_page_iterator_async.py b/tests/asyncio/test_page_iterator_async.py
deleted file mode 100644
index 63e26d0..0000000
--- a/tests/asyncio/test_page_iterator_async.py
+++ /dev/null
@@ -1,296 +0,0 @@
-# Copyright 2015 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import inspect
-
-try:
- from unittest import mock
- from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
-except ImportError: # pragma: NO COVER
- import mock # type: ignore
-import pytest
-
-from google.api_core import page_iterator_async
-
-
-class PageAsyncIteratorImpl(page_iterator_async.AsyncIterator):
- async def _next_page(self):
- return mock.create_autospec(page_iterator_async.Page, instance=True)
-
-
-class TestAsyncIterator:
- def test_constructor(self):
- client = mock.sentinel.client
- item_to_value = mock.sentinel.item_to_value
- token = "ab13nceor03"
- max_results = 1337
-
- iterator = PageAsyncIteratorImpl(
- client, item_to_value, page_token=token, max_results=max_results
- )
-
- assert not iterator._started
- assert iterator.client is client
- assert iterator.item_to_value == item_to_value
- assert iterator.max_results == max_results
- # Changing attributes.
- assert iterator.page_number == 0
- assert iterator.next_page_token == token
- assert iterator.num_results == 0
-
- @pytest.mark.asyncio
- async def test_anext(self):
- parent = mock.sentinel.parent
- page_1 = page_iterator_async.Page(
- parent,
- ("item 1.1", "item 1.2"),
- page_iterator_async._item_to_value_identity,
- )
- page_2 = page_iterator_async.Page(
- parent, ("item 2.1",), page_iterator_async._item_to_value_identity
- )
-
- async_iterator = PageAsyncIteratorImpl(None, None)
- async_iterator._next_page = mock.AsyncMock(side_effect=[page_1, page_2, None])
-
- # Consume items and check the state of the async_iterator.
- assert async_iterator.num_results == 0
- assert await async_iterator.__anext__() == "item 1.1"
- assert async_iterator.num_results == 1
-
- assert await async_iterator.__anext__() == "item 1.2"
- assert async_iterator.num_results == 2
-
- assert await async_iterator.__anext__() == "item 2.1"
- assert async_iterator.num_results == 3
-
- with pytest.raises(StopAsyncIteration):
- await async_iterator.__anext__()
-
- def test_pages_property_starts(self):
- iterator = PageAsyncIteratorImpl(None, None)
-
- assert not iterator._started
-
- assert inspect.isasyncgen(iterator.pages)
-
- assert iterator._started
-
- def test_pages_property_restart(self):
- iterator = PageAsyncIteratorImpl(None, None)
-
- assert iterator.pages
-
- # Make sure we cannot restart.
- with pytest.raises(ValueError):
- assert iterator.pages
-
- @pytest.mark.asyncio
- async def test__page_aiter_increment(self):
- iterator = PageAsyncIteratorImpl(None, None)
- page = page_iterator_async.Page(
- iterator, ("item",), page_iterator_async._item_to_value_identity
- )
- iterator._next_page = mock.AsyncMock(side_effect=[page, None])
-
- assert iterator.num_results == 0
-
- page_aiter = iterator._page_aiter(increment=True)
- await page_aiter.__anext__()
-
- assert iterator.num_results == 1
- await page_aiter.aclose()
-
- @pytest.mark.asyncio
- async def test__page_aiter_no_increment(self):
- iterator = PageAsyncIteratorImpl(None, None)
-
- assert iterator.num_results == 0
-
- page_aiter = iterator._page_aiter(increment=False)
- await page_aiter.__anext__()
-
- # results should still be 0 after fetching a page.
- assert iterator.num_results == 0
- await page_aiter.aclose()
-
- @pytest.mark.asyncio
- async def test__items_aiter(self):
- # Items to be returned.
- item1 = 17
- item2 = 100
- item3 = 211
-
- # Make pages from mock responses
- parent = mock.sentinel.parent
- page1 = page_iterator_async.Page(
- parent, (item1, item2), page_iterator_async._item_to_value_identity
- )
- page2 = page_iterator_async.Page(
- parent, (item3,), page_iterator_async._item_to_value_identity
- )
-
- iterator = PageAsyncIteratorImpl(None, None)
- iterator._next_page = mock.AsyncMock(side_effect=[page1, page2, None])
-
- items_aiter = iterator._items_aiter()
-
- assert inspect.isasyncgen(items_aiter)
-
- # Consume items and check the state of the iterator.
- assert iterator.num_results == 0
- assert await items_aiter.__anext__() == item1
- assert iterator.num_results == 1
-
- assert await items_aiter.__anext__() == item2
- assert iterator.num_results == 2
-
- assert await items_aiter.__anext__() == item3
- assert iterator.num_results == 3
-
- with pytest.raises(StopAsyncIteration):
- await items_aiter.__anext__()
-
- @pytest.mark.asyncio
- async def test___aiter__(self):
- async_iterator = PageAsyncIteratorImpl(None, None)
- async_iterator._next_page = mock.AsyncMock(side_effect=[(1, 2), (3,), None])
-
- assert not async_iterator._started
-
- result = []
- async for item in async_iterator:
- result.append(item)
-
- assert result == [1, 2, 3]
- assert async_iterator._started
-
- def test___aiter__restart(self):
- iterator = PageAsyncIteratorImpl(None, None)
-
- iterator.__aiter__()
-
- # Make sure we cannot restart.
- with pytest.raises(ValueError):
- iterator.__aiter__()
-
- def test___aiter___restart_after_page(self):
- iterator = PageAsyncIteratorImpl(None, None)
-
- assert iterator.pages
-
- # Make sure we cannot restart after starting the page iterator
- with pytest.raises(ValueError):
- iterator.__aiter__()
-
-
-class TestAsyncGRPCIterator(object):
- def test_constructor(self):
- client = mock.sentinel.client
- items_field = "items"
- iterator = page_iterator_async.AsyncGRPCIterator(
- client, mock.sentinel.method, mock.sentinel.request, items_field
- )
-
- assert not iterator._started
- assert iterator.client is client
- assert iterator.max_results is None
- assert iterator.item_to_value is page_iterator_async._item_to_value_identity
- assert iterator._method == mock.sentinel.method
- assert iterator._request == mock.sentinel.request
- assert iterator._items_field == items_field
- assert (
- iterator._request_token_field
- == page_iterator_async.AsyncGRPCIterator._DEFAULT_REQUEST_TOKEN_FIELD
- )
- assert (
- iterator._response_token_field
- == page_iterator_async.AsyncGRPCIterator._DEFAULT_RESPONSE_TOKEN_FIELD
- )
- # Changing attributes.
- assert iterator.page_number == 0
- assert iterator.next_page_token is None
- assert iterator.num_results == 0
-
- def test_constructor_options(self):
- client = mock.sentinel.client
- items_field = "items"
- request_field = "request"
- response_field = "response"
- iterator = page_iterator_async.AsyncGRPCIterator(
- client,
- mock.sentinel.method,
- mock.sentinel.request,
- items_field,
- item_to_value=mock.sentinel.item_to_value,
- request_token_field=request_field,
- response_token_field=response_field,
- max_results=42,
- )
-
- assert iterator.client is client
- assert iterator.max_results == 42
- assert iterator.item_to_value is mock.sentinel.item_to_value
- assert iterator._method == mock.sentinel.method
- assert iterator._request == mock.sentinel.request
- assert iterator._items_field == items_field
- assert iterator._request_token_field == request_field
- assert iterator._response_token_field == response_field
-
- @pytest.mark.asyncio
- async def test_iterate(self):
- request = mock.Mock(spec=["page_token"], page_token=None)
- response1 = mock.Mock(items=["a", "b"], next_page_token="1")
- response2 = mock.Mock(items=["c"], next_page_token="2")
- response3 = mock.Mock(items=["d"], next_page_token="")
- method = mock.AsyncMock(side_effect=[response1, response2, response3])
- iterator = page_iterator_async.AsyncGRPCIterator(
- mock.sentinel.client, method, request, "items"
- )
-
- assert iterator.num_results == 0
-
- items = []
- async for item in iterator:
- items.append(item)
-
- assert items == ["a", "b", "c", "d"]
-
- method.assert_called_with(request)
- assert method.call_count == 3
- assert request.page_token == "2"
-
- @pytest.mark.asyncio
- async def test_iterate_with_max_results(self):
- request = mock.Mock(spec=["page_token"], page_token=None)
- response1 = mock.Mock(items=["a", "b"], next_page_token="1")
- response2 = mock.Mock(items=["c"], next_page_token="2")
- response3 = mock.Mock(items=["d"], next_page_token="")
- method = mock.AsyncMock(side_effect=[response1, response2, response3])
- iterator = page_iterator_async.AsyncGRPCIterator(
- mock.sentinel.client, method, request, "items", max_results=3
- )
-
- assert iterator.num_results == 0
-
- items = []
- async for item in iterator:
- items.append(item)
-
- assert items == ["a", "b", "c"]
- assert iterator.num_results == 3
-
- method.assert_called_with(request)
- assert method.call_count == 2
- assert request.page_token == "1"
diff --git a/tests/asyncio/test_rest_streaming_async.py b/tests/asyncio/test_rest_streaming_async.py
deleted file mode 100644
index 13549c7..0000000
--- a/tests/asyncio/test_rest_streaming_async.py
+++ /dev/null
@@ -1,376 +0,0 @@
-# Copyright 2024 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# TODO: set random.seed explicitly in each test function.
-# See related issue: https://github.com/googleapis/python-api-core/issues/689.
-
-import datetime
-import logging
-import random
-import time
-from typing import List, AsyncIterator
-
-try:
- from unittest import mock
- from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
-except ImportError: # pragma: NO COVER
- import mock # type: ignore
-
-import pytest # noqa: I202
-
-import proto
-
-try:
- from google.auth.aio.transport import Response
-except ImportError:
- pytest.skip(
- "google-api-core[async_rest] is required to test asynchronous rest streaming.",
- allow_module_level=True,
- )
-
-from google.api_core import rest_streaming_async
-from google.api import http_pb2
-from google.api import httpbody_pb2
-
-
-from ..helpers import Composer, Song, EchoResponse, parse_responses
-
-
-__protobuf__ = proto.module(package=__name__)
-SEED = int(time.time())
-logging.info(f"Starting async rest streaming tests with random seed: {SEED}")
-random.seed(SEED)
-
-
-async def mock_async_gen(data, chunk_size=1):
- for i in range(0, len(data)): # pragma: NO COVER
- chunk = data[i : i + chunk_size]
- yield chunk.encode("utf-8")
-
-
-class ResponseMock(Response):
- class _ResponseItr(AsyncIterator[bytes]):
- def __init__(self, _response_bytes: bytes, random_split=False):
- self._responses_bytes = _response_bytes
- self._idx = 0
- self._random_split = random_split
-
- def __aiter__(self):
- return self
-
- async def __anext__(self):
- if self._idx >= len(self._responses_bytes):
- raise StopAsyncIteration
- if self._random_split:
- n = random.randint(1, len(self._responses_bytes[self._idx :]))
- else:
- n = 1
- x = self._responses_bytes[self._idx : self._idx + n]
- self._idx += n
- return x
-
- def __init__(
- self,
- responses: List[proto.Message],
- response_cls,
- random_split=False,
- ):
- self._responses = responses
- self._random_split = random_split
- self._response_message_cls = response_cls
-
- def _parse_responses(self):
- return parse_responses(self._response_message_cls, self._responses)
-
- @property
- async def headers(self):
- raise NotImplementedError()
-
- @property
- async def status_code(self):
- raise NotImplementedError()
-
- async def close(self):
- raise NotImplementedError()
-
- async def content(self, chunk_size=None):
- itr = self._ResponseItr(
- self._parse_responses(), random_split=self._random_split
- )
- async for chunk in itr:
- yield chunk
-
- async def read(self):
- raise NotImplementedError()
-
-
-@pytest.mark.asyncio
-@pytest.mark.parametrize(
- "random_split,resp_message_is_proto_plus",
- [(False, True), (False, False)],
-)
-async def test_next_simple(random_split, resp_message_is_proto_plus):
- if resp_message_is_proto_plus:
- response_type = EchoResponse
- responses = [EchoResponse(content="hello world"), EchoResponse(content="yes")]
- else:
- response_type = httpbody_pb2.HttpBody
- responses = [
- httpbody_pb2.HttpBody(content_type="hello world"),
- httpbody_pb2.HttpBody(content_type="yes"),
- ]
-
- resp = ResponseMock(
- responses=responses, random_split=random_split, response_cls=response_type
- )
- itr = rest_streaming_async.AsyncResponseIterator(resp, response_type)
- idx = 0
- async for response in itr:
- assert response == responses[idx]
- idx += 1
-
-
-@pytest.mark.asyncio
-@pytest.mark.parametrize(
- "random_split,resp_message_is_proto_plus",
- [
- (True, True),
- (False, True),
- (True, False),
- (False, False),
- ],
-)
-async def test_next_nested(random_split, resp_message_is_proto_plus):
- if resp_message_is_proto_plus:
- response_type = Song
- responses = [
- Song(title="some song", composer=Composer(given_name="some name")),
- Song(title="another song", date_added=datetime.datetime(2021, 12, 17)),
- ]
- else:
- # Although `http_pb2.HttpRule`` is used in the response, any response message
- # can be used which meets this criteria for the test of having a nested field.
- response_type = http_pb2.HttpRule
- responses = [
- http_pb2.HttpRule(
- selector="some selector",
- custom=http_pb2.CustomHttpPattern(kind="some kind"),
- ),
- http_pb2.HttpRule(
- selector="another selector",
- custom=http_pb2.CustomHttpPattern(path="some path"),
- ),
- ]
- resp = ResponseMock(
- responses=responses, random_split=random_split, response_cls=response_type
- )
- itr = rest_streaming_async.AsyncResponseIterator(resp, response_type)
- idx = 0
- async for response in itr:
- assert response == responses[idx]
- idx += 1
- assert idx == len(responses)
-
-
-@pytest.mark.asyncio
-@pytest.mark.parametrize(
- "random_split,resp_message_is_proto_plus",
- [
- (True, True),
- (False, True),
- (True, False),
- (False, False),
- ],
-)
-async def test_next_stress(random_split, resp_message_is_proto_plus):
- n = 50
- if resp_message_is_proto_plus:
- response_type = Song
- responses = [
- Song(title="title_%d" % i, composer=Composer(given_name="name_%d" % i))
- for i in range(n)
- ]
- else:
- response_type = http_pb2.HttpRule
- responses = [
- http_pb2.HttpRule(
- selector="selector_%d" % i,
- custom=http_pb2.CustomHttpPattern(path="path_%d" % i),
- )
- for i in range(n)
- ]
- resp = ResponseMock(
- responses=responses, random_split=random_split, response_cls=response_type
- )
- itr = rest_streaming_async.AsyncResponseIterator(resp, response_type)
- idx = 0
- async for response in itr:
- assert response == responses[idx]
- idx += 1
- assert idx == n
-
-
-@pytest.mark.asyncio
-@pytest.mark.parametrize(
- "random_split,resp_message_is_proto_plus",
- [
- (True, True),
- (False, True),
- (True, False),
- (False, False),
- ],
-)
-async def test_next_escaped_characters_in_string(
- random_split, resp_message_is_proto_plus
-):
- if resp_message_is_proto_plus:
- response_type = Song
- composer_with_relateds = Composer()
- relateds = ["Artist A", "Artist B"]
- composer_with_relateds.relateds = relateds
-
- responses = [
- Song(
- title='ti"tle\nfoo\tbar{}', composer=Composer(given_name="name\n\n\n")
- ),
- Song(
- title='{"this is weird": "totally"}',
- composer=Composer(given_name="\\{}\\"),
- ),
- Song(title='\\{"key": ["value",]}\\', composer=composer_with_relateds),
- ]
- else:
- response_type = http_pb2.Http
- responses = [
- http_pb2.Http(
- rules=[
- http_pb2.HttpRule(
- selector='ti"tle\nfoo\tbar{}',
- custom=http_pb2.CustomHttpPattern(kind="name\n\n\n"),
- )
- ]
- ),
- http_pb2.Http(
- rules=[
- http_pb2.HttpRule(
- selector='{"this is weird": "totally"}',
- custom=http_pb2.CustomHttpPattern(kind="\\{}\\"),
- )
- ]
- ),
- http_pb2.Http(
- rules=[
- http_pb2.HttpRule(
- selector='\\{"key": ["value",]}\\',
- custom=http_pb2.CustomHttpPattern(kind="\\{}\\"),
- )
- ]
- ),
- ]
- resp = ResponseMock(
- responses=responses, random_split=random_split, response_cls=response_type
- )
- itr = rest_streaming_async.AsyncResponseIterator(resp, response_type)
- idx = 0
- async for response in itr:
- assert response == responses[idx]
- idx += 1
- assert idx == len(responses)
-
-
-@pytest.mark.asyncio
-@pytest.mark.parametrize("response_type", [EchoResponse, httpbody_pb2.HttpBody])
-async def test_next_not_array(response_type):
- data = '{"hello": 0}'
- with mock.patch.object(
- ResponseMock, "content", return_value=mock_async_gen(data)
- ) as mock_method:
- resp = ResponseMock(responses=[], response_cls=response_type)
- itr = rest_streaming_async.AsyncResponseIterator(resp, response_type)
- with pytest.raises(ValueError):
- await itr.__anext__()
- mock_method.assert_called_once()
-
-
-@pytest.mark.asyncio
-@pytest.mark.parametrize("response_type", [EchoResponse, httpbody_pb2.HttpBody])
-async def test_cancel(response_type):
- with mock.patch.object(
- ResponseMock, "close", new_callable=mock.AsyncMock
- ) as mock_method:
- resp = ResponseMock(responses=[], response_cls=response_type)
- itr = rest_streaming_async.AsyncResponseIterator(resp, response_type)
- await itr.cancel()
- mock_method.assert_called_once()
-
-
-@pytest.mark.asyncio
-@pytest.mark.parametrize("response_type", [EchoResponse, httpbody_pb2.HttpBody])
-async def test_iterator_as_context_manager(response_type):
- with mock.patch.object(
- ResponseMock, "close", new_callable=mock.AsyncMock
- ) as mock_method:
- resp = ResponseMock(responses=[], response_cls=response_type)
- async with rest_streaming_async.AsyncResponseIterator(resp, response_type):
- pass
- mock_method.assert_called_once()
-
-
-@pytest.mark.asyncio
-@pytest.mark.parametrize(
- "response_type,return_value",
- [
- (EchoResponse, bytes('[{"content": "hello"}, {', "utf-8")),
- (httpbody_pb2.HttpBody, bytes('[{"content_type": "hello"}, {', "utf-8")),
- ],
-)
-async def test_check_buffer(response_type, return_value):
- with mock.patch.object(
- ResponseMock,
- "_parse_responses",
- return_value=return_value,
- ):
- resp = ResponseMock(responses=[], response_cls=response_type)
- itr = rest_streaming_async.AsyncResponseIterator(resp, response_type)
- with pytest.raises(ValueError):
- await itr.__anext__()
- await itr.__anext__()
-
-
-@pytest.mark.asyncio
-@pytest.mark.parametrize("response_type", [EchoResponse, httpbody_pb2.HttpBody])
-async def test_next_html(response_type):
- data = "<!DOCTYPE html><html></html>"
- with mock.patch.object(
- ResponseMock, "content", return_value=mock_async_gen(data)
- ) as mock_method:
- resp = ResponseMock(responses=[], response_cls=response_type)
-
- itr = rest_streaming_async.AsyncResponseIterator(resp, response_type)
- with pytest.raises(ValueError):
- await itr.__anext__()
- mock_method.assert_called_once()
-
-
-@pytest.mark.asyncio
-async def test_invalid_response_class():
- class SomeClass:
- pass
-
- resp = ResponseMock(responses=[], response_cls=SomeClass)
- with pytest.raises(
- ValueError,
- match="Response message class must be a subclass of proto.Message or google.protobuf.message.Message",
- ):
- rest_streaming_async.AsyncResponseIterator(resp, SomeClass)
diff --git a/tests/helpers.py b/tests/helpers.py
deleted file mode 100644
index 4c7d5db..0000000
--- a/tests/helpers.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# Copyright 2024 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Helpers for tests"""
-
-import functools
-import logging
-import pytest # noqa: I202
-from typing import List
-
-import proto
-
-from google.protobuf import duration_pb2
-from google.protobuf import timestamp_pb2
-from google.protobuf.json_format import MessageToJson
-
-
-class Genre(proto.Enum):
- GENRE_UNSPECIFIED = 0
- CLASSICAL = 1
- JAZZ = 2
- ROCK = 3
-
-
-class Composer(proto.Message):
- given_name = proto.Field(proto.STRING, number=1)
- family_name = proto.Field(proto.STRING, number=2)
- relateds = proto.RepeatedField(proto.STRING, number=3)
- indices = proto.MapField(proto.STRING, proto.STRING, number=4)
-
-
-class Song(proto.Message):
- composer = proto.Field(Composer, number=1)
- title = proto.Field(proto.STRING, number=2)
- lyrics = proto.Field(proto.STRING, number=3)
- year = proto.Field(proto.INT32, number=4)
- genre = proto.Field(Genre, number=5)
- is_five_mins_longer = proto.Field(proto.BOOL, number=6)
- score = proto.Field(proto.DOUBLE, number=7)
- likes = proto.Field(proto.INT64, number=8)
- duration = proto.Field(duration_pb2.Duration, number=9)
- date_added = proto.Field(timestamp_pb2.Timestamp, number=10)
-
-
-class EchoResponse(proto.Message):
- content = proto.Field(proto.STRING, number=1)
-
-
-def parse_responses(response_message_cls, all_responses: List[proto.Message]) -> bytes:
- # json.dumps returns a string surrounded with quotes that need to be stripped
- # in order to be an actual JSON.
- json_responses = [
- (
- response_message_cls.to_json(response).strip('"')
- if issubclass(response_message_cls, proto.Message)
- else MessageToJson(response).strip('"')
- )
- for response in all_responses
- ]
- logging.info(f"Sending JSON stream: {json_responses}")
- ret_val = "[{}]".format(",".join(json_responses))
- return bytes(ret_val, "utf-8")
-
-
-warn_deprecated_credentials_file = functools.partial(
- # This is used to test that the auth credentials file deprecation
- # warning is emitted as expected.
- pytest.warns,
- DeprecationWarning,
- match="argument is deprecated because of a potential security risk",
-)
diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tests/unit/__init__.py
+++ /dev/null
diff --git a/tests/unit/future/__init__.py b/tests/unit/future/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tests/unit/future/__init__.py
+++ /dev/null
diff --git a/tests/unit/future/test__helpers.py b/tests/unit/future/test__helpers.py
deleted file mode 100644
index a37efdd..0000000
--- a/tests/unit/future/test__helpers.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright 2017, Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from unittest import mock
-
-from google.api_core.future import _helpers
-
-
-@mock.patch("threading.Thread", autospec=True)
-def test_start_deamon_thread(unused_thread):
- deamon_thread = _helpers.start_daemon_thread(target=mock.sentinel.target)
- assert deamon_thread.daemon is True
-
-
-def test_safe_invoke_callback():
- callback = mock.Mock(spec=["__call__"], return_value=42)
- result = _helpers.safe_invoke_callback(callback, "a", b="c")
- assert result == 42
- callback.assert_called_once_with("a", b="c")
-
-
-def test_safe_invoke_callback_exception():
- callback = mock.Mock(spec=["__call__"], side_effect=ValueError())
- result = _helpers.safe_invoke_callback(callback, "a", b="c")
- assert result is None
- callback.assert_called_once_with("a", b="c")
diff --git a/tests/unit/future/test_polling.py b/tests/unit/future/test_polling.py
deleted file mode 100644
index 2f66f23..0000000
--- a/tests/unit/future/test_polling.py
+++ /dev/null
@@ -1,268 +0,0 @@
-# Copyright 2017, Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import concurrent.futures
-import threading
-import time
-from unittest import mock
-
-import pytest
-
-from google.api_core import exceptions, retry
-from google.api_core.future import polling
-
-
-class PollingFutureImpl(polling.PollingFuture):
- def done(self, retry=None):
- return False
-
- def cancel(self):
- return True
-
- def cancelled(self):
- return False
-
-
-def test_polling_future_constructor():
- future = PollingFutureImpl()
- assert not future.done()
- assert not future.cancelled()
- assert future.running()
- assert future.cancel()
- with mock.patch.object(future, "done", return_value=True):
- future.result()
-
-
-def test_set_result():
- future = PollingFutureImpl()
- callback = mock.Mock()
-
- future.set_result(1)
-
- assert future.result() == 1
- future.add_done_callback(callback)
- callback.assert_called_once_with(future)
-
-
-def test_set_exception():
- future = PollingFutureImpl()
- exception = ValueError("meep")
-
- future.set_exception(exception)
-
- assert future.exception() == exception
- with pytest.raises(ValueError):
- future.result()
-
- callback = mock.Mock()
- future.add_done_callback(callback)
- callback.assert_called_once_with(future)
-
-
-def test_invoke_callback_exception():
- future = PollingFutureImplWithPoll()
- future.set_result(42)
-
- # This should not raise, despite the callback causing an exception.
- callback = mock.Mock(side_effect=ValueError)
- future.add_done_callback(callback)
- callback.assert_called_once_with(future)
-
-
-class PollingFutureImplWithPoll(PollingFutureImpl):
- def __init__(self, max_poll_count=1):
- super(PollingFutureImplWithPoll, self).__init__()
- self.poll_count = 0
- self.event = threading.Event()
- self.max_poll_count = max_poll_count
-
- def done(self, retry=None):
- self.poll_count += 1
- if self.max_poll_count > self.poll_count:
- return False
- self.event.wait()
- self.set_result(42)
- return True
-
-
-def test_result_with_one_polling():
- future = PollingFutureImplWithPoll(max_poll_count=1)
-
- future.event.set()
- result = future.result()
-
- assert result == 42
- assert future.poll_count == 1
- # Repeated calls should not cause additional polling
- assert future.result() == result
- assert future.poll_count == 1
-
-
-def test_result_with_two_pollings():
- future = PollingFutureImplWithPoll(max_poll_count=2)
-
- future.event.set()
- result = future.result()
-
- assert result == 42
- assert future.poll_count == 2
- # Repeated calls should not cause additional polling
- assert future.result() == result
- assert future.poll_count == 2
-
-
-def test_result_with_two_pollings_custom_retry():
- future = PollingFutureImplWithPoll(max_poll_count=2)
-
- future.event.set()
- result = future.result()
-
- assert result == 42
- assert future.poll_count == 2
- # Repeated calls should not cause additional polling
- assert future.result() == result
- assert future.poll_count == 2
-
-
-class PollingFutureImplTimeout(PollingFutureImplWithPoll):
- def done(self, retry=None):
- time.sleep(1)
- return False
-
-
-def test_result_timeout():
- future = PollingFutureImplTimeout()
- with pytest.raises(concurrent.futures.TimeoutError):
- future.result(timeout=1)
-
-
-def test_exception_timeout():
- future = PollingFutureImplTimeout()
- with pytest.raises(concurrent.futures.TimeoutError):
- future.exception(timeout=1)
-
-
-class PollingFutureImplTransient(PollingFutureImplWithPoll):
- def __init__(self, errors):
- super(PollingFutureImplTransient, self).__init__()
- self._errors = errors
-
- def done(self, retry=None):
- self.poll_count += 1
- if self._errors:
- error, self._errors = self._errors[0], self._errors[1:]
- raise error("testing")
- self.set_result(42)
- return True
-
-
-def test_result_transient_error():
- future = PollingFutureImplTransient(
- (
- polling._OperationNotComplete,
- polling._OperationNotComplete,
- polling._OperationNotComplete,
- )
- )
- result = future.result()
- assert result == 42
- assert future.poll_count == 4
- # Repeated calls should not cause additional polling
- assert future.result() == result
- assert future.poll_count == 4
-
-
-def test_callback_background_thread():
- future = PollingFutureImplWithPoll()
- callback = mock.Mock()
-
- future.add_done_callback(callback)
-
- assert future._polling_thread is not None
-
- # Give the thread a second to poll
- time.sleep(1)
- assert future.poll_count == 1
-
- future.event.set()
- future._polling_thread.join()
-
- callback.assert_called_once_with(future)
-
-
-def test_double_callback_background_thread():
- future = PollingFutureImplWithPoll()
- callback = mock.Mock()
- callback2 = mock.Mock()
-
- future.add_done_callback(callback)
- current_thread = future._polling_thread
- assert current_thread is not None
-
- # only one polling thread should be created.
- future.add_done_callback(callback2)
- assert future._polling_thread is current_thread
-
- future.event.set()
- future._polling_thread.join()
-
- assert future.poll_count == 1
- callback.assert_called_once_with(future)
- callback2.assert_called_once_with(future)
-
-
-class PollingFutureImplWithoutRetry(PollingFutureImpl):
- def done(self, retry=None):
- return True
-
- def result(self, timeout=None, retry=None, polling=None):
- return super(PollingFutureImplWithoutRetry, self).result()
-
- def _blocking_poll(self, timeout=None, retry=None, polling=None):
- return super(PollingFutureImplWithoutRetry, self)._blocking_poll(
- timeout=timeout
- )
-
-
-class PollingFutureImplWith_done_or_raise(PollingFutureImpl):
- def done(self, retry=None):
- return True
-
- def _done_or_raise(self, retry=None):
- return super(PollingFutureImplWith_done_or_raise, self)._done_or_raise()
-
-
-def test_polling_future_without_retry():
- custom_retry = retry.Retry(
- predicate=retry.if_exception_type(exceptions.TooManyRequests)
- )
- future = PollingFutureImplWithoutRetry()
- assert future.done()
- assert not future.running()
- assert future.result() is None
-
- with mock.patch.object(future, "done") as done_mock:
- future._done_or_raise()
- done_mock.assert_called_once_with(retry=None)
-
- with mock.patch.object(future, "done") as done_mock:
- future._done_or_raise(retry=custom_retry)
- done_mock.assert_called_once_with(retry=custom_retry)
-
-
-def test_polling_future_with__done_or_raise():
- future = PollingFutureImplWith_done_or_raise()
- assert future.done()
- assert not future.running()
- assert future.result() is None
diff --git a/tests/unit/gapic/test_client_info.py b/tests/unit/gapic/test_client_info.py
deleted file mode 100644
index 2ca5c40..0000000
--- a/tests/unit/gapic/test_client_info.py
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import pytest
-
-try:
- import grpc # noqa: F401
-except ImportError:
- pytest.skip("No GRPC", allow_module_level=True)
-
-
-from google.api_core.gapic_v1 import client_info
-
-
-def test_to_grpc_metadata():
- info = client_info.ClientInfo()
-
- metadata = info.to_grpc_metadata()
-
- assert metadata == (client_info.METRICS_METADATA_KEY, info.to_user_agent())
diff --git a/tests/unit/gapic/test_config.py b/tests/unit/gapic/test_config.py
deleted file mode 100644
index 5e42fde..0000000
--- a/tests/unit/gapic/test_config.py
+++ /dev/null
@@ -1,94 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import pytest
-
-try:
- import grpc # noqa: F401
-except ImportError:
- pytest.skip("No GRPC", allow_module_level=True)
-
-from google.api_core import exceptions
-from google.api_core.gapic_v1 import config
-
-
-INTERFACE_CONFIG = {
- "retry_codes": {
- "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"],
- "other": ["FAILED_PRECONDITION"],
- "non_idempotent": [],
- },
- "retry_params": {
- "default": {
- "initial_retry_delay_millis": 1000,
- "retry_delay_multiplier": 2.5,
- "max_retry_delay_millis": 120000,
- "initial_rpc_timeout_millis": 120000,
- "rpc_timeout_multiplier": 1.0,
- "max_rpc_timeout_millis": 120000,
- "total_timeout_millis": 600000,
- },
- "other": {
- "initial_retry_delay_millis": 1000,
- "retry_delay_multiplier": 1,
- "max_retry_delay_millis": 1000,
- "initial_rpc_timeout_millis": 1000,
- "rpc_timeout_multiplier": 1,
- "max_rpc_timeout_millis": 1000,
- "total_timeout_millis": 1000,
- },
- },
- "methods": {
- "AnnotateVideo": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
- },
- "Other": {
- "timeout_millis": 60000,
- "retry_codes_name": "other",
- "retry_params_name": "other",
- },
- "Plain": {"timeout_millis": 30000},
- },
-}
-
-
-def test_create_method_configs():
- method_configs = config.parse_method_configs(INTERFACE_CONFIG)
-
- retry, timeout = method_configs["AnnotateVideo"]
- assert retry._predicate(exceptions.DeadlineExceeded(None))
- assert retry._predicate(exceptions.ServiceUnavailable(None))
- assert retry._initial == 1.0
- assert retry._multiplier == 2.5
- assert retry._maximum == 120.0
- assert retry._deadline == 600.0
- assert timeout._initial == 120.0
- assert timeout._multiplier == 1.0
- assert timeout._maximum == 120.0
-
- retry, timeout = method_configs["Other"]
- assert retry._predicate(exceptions.FailedPrecondition(None))
- assert retry._initial == 1.0
- assert retry._multiplier == 1.0
- assert retry._maximum == 1.0
- assert retry._deadline == 1.0
- assert timeout._initial == 1.0
- assert timeout._multiplier == 1.0
- assert timeout._maximum == 1.0
-
- retry, timeout = method_configs["Plain"]
- assert retry is None
- assert timeout._timeout == 30.0
diff --git a/tests/unit/gapic/test_method.py b/tests/unit/gapic/test_method.py
deleted file mode 100644
index 29e8fc2..0000000
--- a/tests/unit/gapic/test_method.py
+++ /dev/null
@@ -1,252 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import datetime
-from unittest import mock
-
-import pytest
-
-try:
- import grpc # noqa: F401
-except ImportError:
- pytest.skip("No GRPC", allow_module_level=True)
-
-
-from google.api_core import exceptions
-from google.api_core import retry
-from google.api_core import timeout
-import google.api_core.gapic_v1.client_info
-import google.api_core.gapic_v1.method
-import google.api_core.page_iterator
-
-
-def _utcnow_monotonic():
- curr_value = datetime.datetime.min
- delta = datetime.timedelta(seconds=0.5)
- while True:
- yield curr_value
- curr_value += delta
-
-
-def test_wrap_method_basic():
- method = mock.Mock(spec=["__call__"], return_value=42)
-
- wrapped_method = google.api_core.gapic_v1.method.wrap_method(method)
-
- result = wrapped_method(1, 2, meep="moop")
-
- assert result == 42
- method.assert_called_once_with(1, 2, meep="moop", metadata=mock.ANY)
-
- # Check that the default client info was specified in the metadata.
- metadata = method.call_args[1]["metadata"]
- assert len(metadata) == 1
- client_info = google.api_core.gapic_v1.client_info.DEFAULT_CLIENT_INFO
- user_agent_metadata = client_info.to_grpc_metadata()
- assert user_agent_metadata in metadata
-
-
-def test_wrap_method_with_no_client_info():
- method = mock.Mock(spec=["__call__"])
-
- wrapped_method = google.api_core.gapic_v1.method.wrap_method(
- method, client_info=None
- )
-
- wrapped_method(1, 2, meep="moop")
-
- method.assert_called_once_with(1, 2, meep="moop")
-
-
-def test_wrap_method_with_custom_client_info():
- client_info = google.api_core.gapic_v1.client_info.ClientInfo(
- python_version=1,
- grpc_version=2,
- api_core_version=3,
- gapic_version=4,
- client_library_version=5,
- protobuf_runtime_version=6,
- )
- method = mock.Mock(spec=["__call__"])
-
- wrapped_method = google.api_core.gapic_v1.method.wrap_method(
- method, client_info=client_info
- )
-
- wrapped_method(1, 2, meep="moop")
-
- method.assert_called_once_with(1, 2, meep="moop", metadata=mock.ANY)
-
- # Check that the custom client info was specified in the metadata.
- metadata = method.call_args[1]["metadata"]
- assert client_info.to_grpc_metadata() in metadata
-
-
-def test_invoke_wrapped_method_with_metadata():
- method = mock.Mock(spec=["__call__"])
-
- wrapped_method = google.api_core.gapic_v1.method.wrap_method(method)
-
- wrapped_method(mock.sentinel.request, metadata=[("a", "b")])
-
- method.assert_called_once_with(mock.sentinel.request, metadata=mock.ANY)
- metadata = method.call_args[1]["metadata"]
- # Metadata should have two items: the client info metadata and our custom
- # metadata.
- assert len(metadata) == 2
- assert ("a", "b") in metadata
-
-
-def test_invoke_wrapped_method_with_metadata_as_none():
- method = mock.Mock(spec=["__call__"])
-
- wrapped_method = google.api_core.gapic_v1.method.wrap_method(method)
-
- wrapped_method(mock.sentinel.request, metadata=None)
-
- method.assert_called_once_with(mock.sentinel.request, metadata=mock.ANY)
- metadata = method.call_args[1]["metadata"]
- # Metadata should have just one items: the client info metadata.
- assert len(metadata) == 1
-
-
-@mock.patch("time.sleep")
-def test_wrap_method_with_default_retry_and_timeout_and_compression(unused_sleep):
- method = mock.Mock(
- spec=["__call__"], side_effect=[exceptions.InternalServerError(None), 42]
- )
- default_retry = retry.Retry()
- default_timeout = timeout.ConstantTimeout(60)
- default_compression = grpc.Compression.Gzip
- wrapped_method = google.api_core.gapic_v1.method.wrap_method(
- method, default_retry, default_timeout, default_compression
- )
-
- result = wrapped_method()
-
- assert result == 42
- assert method.call_count == 2
- method.assert_called_with(
- timeout=60, compression=default_compression, metadata=mock.ANY
- )
-
-
-@mock.patch("time.sleep")
-def test_wrap_method_with_default_retry_and_timeout_using_sentinel(unused_sleep):
- method = mock.Mock(
- spec=["__call__"], side_effect=[exceptions.InternalServerError(None), 42]
- )
- default_retry = retry.Retry()
- default_timeout = timeout.ConstantTimeout(60)
- default_compression = grpc.Compression.Gzip
- wrapped_method = google.api_core.gapic_v1.method.wrap_method(
- method, default_retry, default_timeout, default_compression
- )
-
- result = wrapped_method(
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- compression=google.api_core.gapic_v1.method.DEFAULT,
- )
-
- assert result == 42
- assert method.call_count == 2
- method.assert_called_with(
- timeout=60, compression=default_compression, metadata=mock.ANY
- )
-
-
-@mock.patch("time.sleep")
-def test_wrap_method_with_overriding_retry_timeout_compression(unused_sleep):
- method = mock.Mock(spec=["__call__"], side_effect=[exceptions.NotFound(None), 42])
- default_retry = retry.Retry()
- default_timeout = timeout.ConstantTimeout(60)
- default_compression = grpc.Compression.Gzip
- wrapped_method = google.api_core.gapic_v1.method.wrap_method(
- method, default_retry, default_timeout, default_compression
- )
-
- result = wrapped_method(
- retry=retry.Retry(retry.if_exception_type(exceptions.NotFound)),
- timeout=timeout.ConstantTimeout(22),
- compression=grpc.Compression.Deflate,
- )
-
- assert result == 42
- assert method.call_count == 2
- method.assert_called_with(
- timeout=22,
- compression=grpc.Compression.Deflate,
- metadata=mock.ANY,
- )
-
-
-@pytest.mark.skip(reason="Known flaky due to floating point comparison. #866")
-def test_wrap_method_with_overriding_timeout_as_a_number():
- method = mock.Mock(spec=["__call__"], return_value=42)
- default_retry = retry.Retry()
- default_timeout = timeout.ConstantTimeout(60)
- wrapped_method = google.api_core.gapic_v1.method.wrap_method(
- method, default_retry, default_timeout
- )
-
- # Using "result = wrapped_method(timeout=22)" fails since wrapped_method
- # does floating point calculations that results in 21.987.. instead of 22
- result = wrapped_method(timeout=22)
-
- assert result == 42
-
- actual_timeout = method.call_args[1]["timeout"]
- metadata = method.call_args[1]["metadata"]
- assert metadata == mock.ANY
- assert actual_timeout == pytest.approx(22, abs=0.01)
-
-
-def test_wrap_method_with_overriding_constant_timeout():
- method = mock.Mock(spec=["__call__"], return_value=42)
- default_retry = retry.Retry()
- default_timeout = timeout.ConstantTimeout(60)
- wrapped_method = google.api_core.gapic_v1.method.wrap_method(
- method, default_retry, default_timeout
- )
-
- result = wrapped_method(timeout=timeout.ConstantTimeout(22))
-
- assert result == 42
-
- actual_timeout = method.call_args[1]["timeout"]
- metadata = method.call_args[1]["metadata"]
- assert metadata == mock.ANY
- assert actual_timeout == 22
-
-
-def test_wrap_method_with_call():
- method = mock.Mock()
- mock_call = mock.Mock()
- method.with_call.return_value = 42, mock_call
-
- wrapped_method = google.api_core.gapic_v1.method.wrap_method(method, with_call=True)
- result = wrapped_method()
- assert len(result) == 2
- assert result[0] == 42
- assert result[1] == mock_call
-
-
-def test_wrap_method_with_call_not_supported():
- """Raises an error if wrapped callable doesn't have with_call method."""
- method = lambda: None # noqa: E731
-
- with pytest.raises(ValueError) as exc_info:
- google.api_core.gapic_v1.method.wrap_method(method, with_call=True)
- assert "with_call=True is only supported for unary calls" in str(exc_info.value)
diff --git a/tests/unit/gapic/test_routing_header.py b/tests/unit/gapic/test_routing_header.py
deleted file mode 100644
index f0ec82e..0000000
--- a/tests/unit/gapic/test_routing_header.py
+++ /dev/null
@@ -1,103 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from enum import Enum
-
-import pytest
-
-try:
- import grpc # noqa: F401
-except ImportError:
- pytest.skip("No GRPC", allow_module_level=True)
-
-
-from google.api_core.gapic_v1 import routing_header
-
-
-def test_to_routing_header():
- params = [("name", "meep"), ("book.read", "1")]
- value = routing_header.to_routing_header(params)
- assert value == "name=meep&book.read=1"
-
-
-def test_to_routing_header_with_slashes():
- params = [("name", "me/ep"), ("book.read", "1&2")]
- value = routing_header.to_routing_header(params)
- assert value == "name=me/ep&book.read=1%262"
-
-
-def test_enum_fully_qualified():
- class Message:
- class Color(Enum):
- RED = 1
- GREEN = 2
- BLUE = 3
-
- params = [("color", Message.Color.RED)]
- value = routing_header.to_routing_header(params)
- assert value == "color=Color.RED"
- value = routing_header.to_routing_header(params, qualified_enums=True)
- assert value == "color=Color.RED"
-
-
-def test_enum_nonqualified():
- class Message:
- class Color(Enum):
- RED = 1
- GREEN = 2
- BLUE = 3
-
- params = [("color", Message.Color.RED), ("num", 5)]
- value = routing_header.to_routing_header(params, qualified_enums=False)
- assert value == "color=RED&num=5"
- params = {"color": Message.Color.RED, "num": 5}
- value = routing_header.to_routing_header(params, qualified_enums=False)
- assert value == "color=RED&num=5"
-
-
-def test_to_grpc_metadata():
- params = [("name", "meep"), ("book.read", "1")]
- metadata = routing_header.to_grpc_metadata(params)
- assert metadata == (routing_header.ROUTING_METADATA_KEY, "name=meep&book.read=1")
-
-
-@pytest.mark.parametrize(
- "key,value,expected",
- [
- ("book.read", "1", "book.read=1"),
- ("name", "me/ep", "name=me/ep"),
- ("\\", "=", "%5C=%3D"),
- (b"hello", "world", "hello=world"),
- ("✔️", "✌️", "%E2%9C%94%EF%B8%8F=%E2%9C%8C%EF%B8%8F"),
- ],
-)
-def test__urlencode_param(key, value, expected):
- result = routing_header._urlencode_param(key, value)
- assert result == expected
-
-
-def test__urlencode_param_caching_performance():
- import time
-
- key = "key" * 10000
- value = "value" * 10000
- # time with empty cache
- start_time = time.perf_counter()
- routing_header._urlencode_param(key, value)
- duration = time.perf_counter() - start_time
- second_start_time = time.perf_counter()
- routing_header._urlencode_param(key, value)
- second_duration = time.perf_counter() - second_start_time
- # second call should be approximately 10 times faster
- assert second_duration < duration / 10
diff --git a/tests/unit/operations_v1/__init__.py b/tests/unit/operations_v1/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tests/unit/operations_v1/__init__.py
+++ /dev/null
diff --git a/tests/unit/operations_v1/test_operations_client.py b/tests/unit/operations_v1/test_operations_client.py
deleted file mode 100644
index fb4b14f..0000000
--- a/tests/unit/operations_v1/test_operations_client.py
+++ /dev/null
@@ -1,103 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import pytest
-
-try:
- import grpc # noqa: F401
-except ImportError: # pragma: NO COVER
- pytest.skip("No GRPC", allow_module_level=True)
-
-from google.api_core import grpc_helpers
-from google.api_core import operations_v1
-from google.api_core import page_iterator
-from google.api_core.operations_v1 import operations_client_config
-from google.longrunning import operations_pb2
-from google.protobuf import empty_pb2
-
-
-def test_get_operation():
- channel = grpc_helpers.ChannelStub()
- client = operations_v1.OperationsClient(channel)
- channel.GetOperation.response = operations_pb2.Operation(name="meep")
-
- response = client.get_operation("name", metadata=[("header", "foo")])
-
- assert ("header", "foo") in channel.GetOperation.calls[0].metadata
- assert ("x-goog-request-params", "name=name") in channel.GetOperation.calls[
- 0
- ].metadata
- assert len(channel.GetOperation.requests) == 1
- assert channel.GetOperation.requests[0].name == "name"
- assert response == channel.GetOperation.response
-
-
-def test_list_operations():
- channel = grpc_helpers.ChannelStub()
- client = operations_v1.OperationsClient(channel)
- operations = [
- operations_pb2.Operation(name="1"),
- operations_pb2.Operation(name="2"),
- ]
- list_response = operations_pb2.ListOperationsResponse(operations=operations)
- channel.ListOperations.response = list_response
-
- response = client.list_operations("name", "filter", metadata=[("header", "foo")])
-
- assert isinstance(response, page_iterator.Iterator)
- assert list(response) == operations
-
- assert ("header", "foo") in channel.ListOperations.calls[0].metadata
- assert ("x-goog-request-params", "name=name") in channel.ListOperations.calls[
- 0
- ].metadata
- assert len(channel.ListOperations.requests) == 1
- request = channel.ListOperations.requests[0]
- assert isinstance(request, operations_pb2.ListOperationsRequest)
- assert request.name == "name"
- assert request.filter == "filter"
-
-
-def test_delete_operation():
- channel = grpc_helpers.ChannelStub()
- client = operations_v1.OperationsClient(channel)
- channel.DeleteOperation.response = empty_pb2.Empty()
-
- client.delete_operation("name", metadata=[("header", "foo")])
-
- assert ("header", "foo") in channel.DeleteOperation.calls[0].metadata
- assert ("x-goog-request-params", "name=name") in channel.DeleteOperation.calls[
- 0
- ].metadata
- assert len(channel.DeleteOperation.requests) == 1
- assert channel.DeleteOperation.requests[0].name == "name"
-
-
-def test_cancel_operation():
- channel = grpc_helpers.ChannelStub()
- client = operations_v1.OperationsClient(channel)
- channel.CancelOperation.response = empty_pb2.Empty()
-
- client.cancel_operation("name", metadata=[("header", "foo")])
-
- assert ("header", "foo") in channel.CancelOperation.calls[0].metadata
- assert ("x-goog-request-params", "name=name") in channel.CancelOperation.calls[
- 0
- ].metadata
- assert len(channel.CancelOperation.requests) == 1
- assert channel.CancelOperation.requests[0].name == "name"
-
-
-def test_operations_client_config():
- assert operations_client_config.config["interfaces"]
diff --git a/tests/unit/operations_v1/test_operations_rest_client.py b/tests/unit/operations_v1/test_operations_rest_client.py
deleted file mode 100644
index a3189cf..0000000
--- a/tests/unit/operations_v1/test_operations_rest_client.py
+++ /dev/null
@@ -1,1464 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-import os
-
-try:
- from unittest import mock
- from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
-except ImportError: # pragma: NO COVER
- import mock # type: ignore
-
-import pytest
-from typing import Any, List
-from ...helpers import warn_deprecated_credentials_file
-
-try:
- import grpc # noqa: F401
-except ImportError: # pragma: NO COVER
- pytest.skip("No GRPC", allow_module_level=True)
-from requests import Response # noqa I201
-from google.auth.transport.requests import AuthorizedSession
-
-from google.api_core import client_options
-from google.api_core import exceptions as core_exceptions
-from google.api_core import gapic_v1
-from google.api_core import parse_version_to_tuple
-from google.api_core.operations_v1 import AbstractOperationsClient
-
-import google.auth
-from google.api_core.operations_v1 import pagers
-from google.api_core.operations_v1 import pagers_async
-from google.api_core.operations_v1 import transports
-from google.auth import credentials as ga_credentials
-from google.auth import __version__ as auth_version
-from google.auth.exceptions import MutualTLSChannelError
-from google.longrunning import operations_pb2
-from google.oauth2 import service_account
-from google.protobuf import json_format # type: ignore
-from google.rpc import status_pb2 # type: ignore
-
-try:
- import aiohttp # noqa: F401
- import google.auth.aio.transport
- from google.auth.aio.transport.sessions import AsyncAuthorizedSession
- from google.api_core.operations_v1 import AsyncOperationsRestClient
- from google.auth.aio import credentials as ga_credentials_async
-
- GOOGLE_AUTH_AIO_INSTALLED = True
-except ImportError:
- GOOGLE_AUTH_AIO_INSTALLED = False
-
-HTTP_OPTIONS = {
- "google.longrunning.Operations.CancelOperation": [
- {"method": "post", "uri": "/v3/{name=operations/*}:cancel", "body": "*"},
- ],
- "google.longrunning.Operations.DeleteOperation": [
- {"method": "delete", "uri": "/v3/{name=operations/*}"},
- ],
- "google.longrunning.Operations.GetOperation": [
- {"method": "get", "uri": "/v3/{name=operations/*}"},
- ],
- "google.longrunning.Operations.ListOperations": [
- {"method": "get", "uri": "/v3/{name=operations}"},
- ],
-}
-
-PYPARAM_CLIENT: List[Any] = [
- AbstractOperationsClient,
-]
-PYPARAM_CLIENT_TRANSPORT_NAME = [
- [AbstractOperationsClient, transports.OperationsRestTransport, "rest"],
-]
-PYPARAM_CLIENT_TRANSPORT_CREDENTIALS = [
- [
- AbstractOperationsClient,
- transports.OperationsRestTransport,
- ga_credentials.AnonymousCredentials(),
- ],
-]
-
-if GOOGLE_AUTH_AIO_INSTALLED:
- PYPARAM_CLIENT.append(AsyncOperationsRestClient)
- PYPARAM_CLIENT_TRANSPORT_NAME.append(
- [
- AsyncOperationsRestClient,
- transports.AsyncOperationsRestTransport,
- "rest_asyncio",
- ]
- )
- PYPARAM_CLIENT_TRANSPORT_CREDENTIALS.append(
- [
- AsyncOperationsRestClient,
- transports.AsyncOperationsRestTransport,
- ga_credentials_async.AnonymousCredentials(),
- ]
- )
-
-
-def client_cert_source_callback():
- return b"cert bytes", b"key bytes"
-
-
-def _get_session_type(is_async: bool):
- return (
- AsyncAuthorizedSession
- if is_async and GOOGLE_AUTH_AIO_INSTALLED
- else AuthorizedSession
- )
-
-
-def _get_operations_client(is_async: bool, http_options=HTTP_OPTIONS):
- if is_async and GOOGLE_AUTH_AIO_INSTALLED:
- async_transport = transports.rest_asyncio.AsyncOperationsRestTransport(
- credentials=ga_credentials_async.AnonymousCredentials(),
- http_options=http_options,
- )
- return AsyncOperationsRestClient(transport=async_transport)
- else:
- sync_transport = transports.rest.OperationsRestTransport(
- credentials=ga_credentials.AnonymousCredentials(), http_options=http_options
- )
- return AbstractOperationsClient(transport=sync_transport)
-
-
-# If default endpoint is localhost, then default mtls endpoint will be the same.
-# This method modifies the default endpoint so the client can produce a different
-# mtls endpoint for endpoint testing purposes.
-def modify_default_endpoint(client):
- return (
- "foo.googleapis.com"
- if ("localhost" in client.DEFAULT_ENDPOINT)
- else client.DEFAULT_ENDPOINT
- )
-
-
-# TODO: Add support for mtls in async rest
-@pytest.mark.parametrize(
- "client_class",
- [
- AbstractOperationsClient,
- ],
-)
-def test__get_default_mtls_endpoint(client_class):
- api_endpoint = "example.googleapis.com"
- api_mtls_endpoint = "example.mtls.googleapis.com"
- sandbox_endpoint = "example.sandbox.googleapis.com"
- sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
- non_googleapi = "api.example.com"
-
- assert client_class._get_default_mtls_endpoint(None) is None
- assert client_class._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint
- assert (
- client_class._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint
- )
- assert (
- client_class._get_default_mtls_endpoint(sandbox_endpoint)
- == sandbox_mtls_endpoint
- )
- assert (
- client_class._get_default_mtls_endpoint(sandbox_mtls_endpoint)
- == sandbox_mtls_endpoint
- )
- assert client_class._get_default_mtls_endpoint(non_googleapi) == non_googleapi
-
-
-@pytest.mark.parametrize(
- "client_class",
- PYPARAM_CLIENT,
-)
-def test_operations_client_from_service_account_info(client_class):
- creds = ga_credentials.AnonymousCredentials()
- if "async" in str(client_class):
- # TODO(): Add support for service account info to async REST transport.
- with pytest.raises(NotImplementedError):
- info = {"valid": True}
- client_class.from_service_account_info(info)
- else:
- with mock.patch.object(
- service_account.Credentials, "from_service_account_info"
- ) as factory:
- factory.return_value = creds
- info = {"valid": True}
- client = client_class.from_service_account_info(info)
- assert client.transport._credentials == creds
- assert isinstance(client, client_class)
-
- assert client.transport._host == "https://longrunning.googleapis.com"
-
-
-@pytest.mark.parametrize(
- "transport_class",
- [
- transports.OperationsRestTransport,
- # TODO(https://github.com/googleapis/python-api-core/issues/706): Add support for
- # service account credentials in transports.AsyncOperationsRestTransport
- ],
-)
-def test_operations_client_service_account_always_use_jwt(transport_class):
- with mock.patch.object(
- service_account.Credentials, "with_always_use_jwt_access", create=True
- ) as use_jwt:
- creds = service_account.Credentials(None, None, None)
- transport_class(credentials=creds, always_use_jwt_access=True)
- use_jwt.assert_called_once_with(True)
-
- with mock.patch.object(
- service_account.Credentials, "with_always_use_jwt_access", create=True
- ) as use_jwt:
- creds = service_account.Credentials(None, None, None)
- transport_class(credentials=creds, always_use_jwt_access=False)
- use_jwt.assert_not_called()
-
-
-@pytest.mark.parametrize(
- "client_class",
- PYPARAM_CLIENT,
-)
-def test_operations_client_from_service_account_file(client_class):
- if "async" in str(client_class):
- # TODO(): Add support for service account creds to async REST transport.
- with pytest.raises(NotImplementedError):
- client_class.from_service_account_file("dummy/file/path.json")
- else:
- creds = ga_credentials.AnonymousCredentials()
- with mock.patch.object(
- service_account.Credentials, "from_service_account_file"
- ) as factory:
- factory.return_value = creds
- client = client_class.from_service_account_file("dummy/file/path.json")
- assert client.transport._credentials == creds
- assert isinstance(client, client_class)
-
- client = client_class.from_service_account_json("dummy/file/path.json")
- assert client.transport._credentials == creds
- assert isinstance(client, client_class)
-
- assert client.transport._host == "https://longrunning.googleapis.com"
-
-
-@pytest.mark.parametrize(
- "client_class,transport_class,transport_name",
- PYPARAM_CLIENT_TRANSPORT_NAME,
-)
-def test_operations_client_get_transport_class(
- client_class, transport_class, transport_name
-):
- transport = client_class.get_transport_class()
- available_transports = [
- transports.OperationsRestTransport,
- ]
- if GOOGLE_AUTH_AIO_INSTALLED:
- available_transports.append(transports.AsyncOperationsRestTransport)
- assert transport in available_transports
-
- transport = client_class.get_transport_class(transport_name)
- assert transport == transport_class
-
-
-# TODO(): Update this test case to include async REST once we have support for MTLS.
-@pytest.mark.parametrize(
- "client_class,transport_class,transport_name",
- [(AbstractOperationsClient, transports.OperationsRestTransport, "rest")],
-)
-@mock.patch.object(
- AbstractOperationsClient,
- "DEFAULT_ENDPOINT",
- modify_default_endpoint(AbstractOperationsClient),
-)
-def test_operations_client_client_options(
- client_class, transport_class, transport_name
-):
- # # Check that if channel is provided we won't create a new one.
- # with mock.patch.object(AbstractOperationsBaseClient, "get_transport_class") as gtc:
- # client = client_class(transport=transport_class())
- # gtc.assert_not_called()
-
- # # Check that if channel is provided via str we will create a new one.
- # with mock.patch.object(AbstractOperationsBaseClient, "get_transport_class") as gtc:
- # client = client_class(transport=transport_name)
- # gtc.assert_called()
-
- # Check the case api_endpoint is provided.
- options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
- with mock.patch.object(transport_class, "__init__") as patched:
- patched.return_value = None
- client = client_class(client_options=options, transport=transport_name)
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host="squid.clam.whelk",
- scopes=None,
- client_cert_source_for_mtls=None,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- always_use_jwt_access=True,
- )
-
- # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
- # "never".
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
- with mock.patch.object(transport_class, "__init__") as patched:
- patched.return_value = None
- client = client_class(transport=transport_name)
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
- scopes=None,
- client_cert_source_for_mtls=None,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- always_use_jwt_access=True,
- )
-
- # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
- # "always".
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
- with mock.patch.object(transport_class, "__init__") as patched:
- patched.return_value = None
- client = client_class(transport=transport_name)
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=client.DEFAULT_MTLS_ENDPOINT,
- scopes=None,
- client_cert_source_for_mtls=None,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- always_use_jwt_access=True,
- )
-
- # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
- # unsupported value.
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
- with pytest.raises(MutualTLSChannelError):
- client = client_class()
-
- # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value
- with mock.patch.dict(
- os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
- ):
- # Test behavior for google.auth versions < 2.43.0.
- # These versions do not have the updated mtls.should_use_client_cert logic.
- # Verify that a ValueError is raised when GOOGLE_API_USE_CLIENT_CERTIFICATE
- # is set to an unsupported value, as expected in these older versions.
- if parse_version_to_tuple(auth_version) < (2, 43, 0):
- with pytest.raises(ValueError):
- client = client_class()
- # Test behavior for google.auth versions >= 2.43.0.
- # In these versions, if GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an
- # unsupported value (e.g., not 'true' or 'false'), the expected behavior
- # of the internal google.auth.mtls.should_use_client_cert() function
- # is to return False. Expect should_use_client_cert to return False, so
- # client creation should proceed without requiring a client certificate.
- else:
- with mock.patch.object(transport_class, "__init__") as patched:
- patched.return_value = None
- client = client_class(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport_name,
- )
-
- # Check the case quota_project_id is provided
- options = client_options.ClientOptions(quota_project_id="octopus")
- with mock.patch.object(transport_class, "__init__") as patched:
- patched.return_value = None
- client = client_class(client_options=options, transport=transport_name)
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
- scopes=None,
- client_cert_source_for_mtls=None,
- quota_project_id="octopus",
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- always_use_jwt_access=True,
- )
-
- # Check the case credentials_file is provided
- with warn_deprecated_credentials_file():
- options = client_options.ClientOptions(credentials_file="credentials.json")
- with mock.patch.object(transport_class, "__init__") as patched:
- patched.return_value = None
- client = client_class(client_options=options, transport=transport_name)
- patched.assert_called_once_with(
- credentials=None,
- credentials_file="credentials.json",
- host=client.DEFAULT_ENDPOINT,
- scopes=None,
- client_cert_source_for_mtls=None,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- always_use_jwt_access=True,
- )
-
-
-# TODO: Add support for mtls in async REST
-@pytest.mark.parametrize(
- "client_class,transport_class,transport_name,use_client_cert_env",
- [
- (AbstractOperationsClient, transports.OperationsRestTransport, "rest", "true"),
- (AbstractOperationsClient, transports.OperationsRestTransport, "rest", "false"),
- ],
-)
-@mock.patch.object(
- AbstractOperationsClient,
- "DEFAULT_ENDPOINT",
- modify_default_endpoint(AbstractOperationsClient),
-)
-@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
-def test_operations_client_mtls_env_auto(
- client_class, transport_class, transport_name, use_client_cert_env
-):
- # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
- # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
-
- # Check the case client_cert_source is provided. Whether client cert is used depends on
- # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
- with mock.patch.dict(
- os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
- ):
- options = client_options.ClientOptions(
- client_cert_source=client_cert_source_callback
- )
-
- def fake_init(client_cert_source_for_mtls=None, **kwargs):
- """Invoke client_cert source if provided."""
-
- if client_cert_source_for_mtls:
- client_cert_source_for_mtls()
- return None
-
- with mock.patch.object(transport_class, "__init__") as patched:
- patched.side_effect = fake_init
- client = client_class(client_options=options)
-
- if use_client_cert_env == "false":
- expected_client_cert_source = None
- expected_host = client.DEFAULT_ENDPOINT
- else:
- expected_client_cert_source = client_cert_source_callback
- expected_host = client.DEFAULT_MTLS_ENDPOINT
-
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=expected_host,
- scopes=None,
- client_cert_source_for_mtls=expected_client_cert_source,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- always_use_jwt_access=True,
- )
-
- # Check the case ADC client cert is provided. Whether client cert is used depends on
- # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
- with mock.patch.dict(
- os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
- ):
- with mock.patch.object(transport_class, "__init__") as patched:
- with mock.patch(
- "google.auth.transport.mtls.has_default_client_cert_source",
- return_value=True,
- ):
- with mock.patch(
- "google.auth.transport.mtls.default_client_cert_source",
- return_value=client_cert_source_callback,
- ):
- if use_client_cert_env == "false":
- expected_host = client.DEFAULT_ENDPOINT
- expected_client_cert_source = None
- else:
- expected_host = client.DEFAULT_MTLS_ENDPOINT
- expected_client_cert_source = client_cert_source_callback
-
- patched.return_value = None
- client = client_class()
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=expected_host,
- scopes=None,
- client_cert_source_for_mtls=expected_client_cert_source,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- always_use_jwt_access=True,
- )
-
- # Check the case client_cert_source and ADC client cert are not provided.
- with mock.patch.dict(
- os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
- ):
- with mock.patch.object(transport_class, "__init__") as patched:
- with mock.patch(
- "google.auth.transport.mtls.has_default_client_cert_source",
- return_value=False,
- ):
- patched.return_value = None
- client = client_class()
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
- scopes=None,
- client_cert_source_for_mtls=None,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- always_use_jwt_access=True,
- )
-
-
-@pytest.mark.parametrize(
- "client_class,transport_class,transport_name",
- PYPARAM_CLIENT_TRANSPORT_NAME,
-)
-def test_operations_client_client_options_scopes(
- client_class, transport_class, transport_name
-):
- # Check the case scopes are provided.
- options = client_options.ClientOptions(
- scopes=["1", "2"],
- )
- if "async" in str(client_class):
- # TODO(): Add support for scopes to async REST transport.
- with pytest.raises(core_exceptions.AsyncRestUnsupportedParameterError):
- client_class(client_options=options, transport=transport_name)
- else:
- with mock.patch.object(transport_class, "__init__") as patched:
- patched.return_value = None
- client = client_class(client_options=options, transport=transport_name)
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
- scopes=["1", "2"],
- client_cert_source_for_mtls=None,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- always_use_jwt_access=True,
- )
-
-
-@pytest.mark.parametrize(
- "client_class,transport_class,transport_name",
- PYPARAM_CLIENT_TRANSPORT_NAME,
-)
-def test_operations_client_client_options_credentials_file(
- client_class, transport_class, transport_name
-):
- # Check the case credentials file is provided.
- with warn_deprecated_credentials_file():
- options = client_options.ClientOptions(credentials_file="credentials.json")
- if "async" in str(client_class):
- # TODO(): Add support for credentials file to async REST transport.
- with pytest.raises(core_exceptions.AsyncRestUnsupportedParameterError):
- with warn_deprecated_credentials_file():
- client_class(client_options=options, transport=transport_name)
- else:
- with mock.patch.object(transport_class, "__init__") as patched:
- patched.return_value = None
- client = client_class(client_options=options, transport=transport_name)
- patched.assert_called_once_with(
- credentials=None,
- credentials_file="credentials.json",
- host=client.DEFAULT_ENDPOINT,
- scopes=None,
- client_cert_source_for_mtls=None,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- always_use_jwt_access=True,
- )
-
-
-@pytest.mark.parametrize(
- "credentials_file",
- [None, "credentials.json"],
-)
-@mock.patch(
- "google.auth.default",
- autospec=True,
- return_value=(mock.sentinel.credentials, mock.sentinel.project),
-)
-def test_list_operations_rest(google_auth_default, credentials_file):
- if credentials_file:
- with warn_deprecated_credentials_file():
- sync_transport = transports.rest.OperationsRestTransport(
- credentials_file=credentials_file,
- http_options=HTTP_OPTIONS,
- )
- else:
- # no warning expected
- sync_transport = transports.rest.OperationsRestTransport(
- credentials_file=credentials_file,
- http_options=HTTP_OPTIONS,
- )
-
- client = AbstractOperationsClient(transport=sync_transport)
-
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(_get_session_type(is_async=False), "request") as req:
- # Designate an appropriate value for the returned response.
- return_value = operations_pb2.ListOperationsResponse(
- next_page_token="next_page_token_value",
- )
-
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 200
- json_return_value = json_format.MessageToJson(return_value)
- response_value._content = json_return_value.encode("UTF-8")
- req.return_value = response_value
- response = client.list_operations(
- name="operations", filter_="my_filter", page_size=10, page_token="abc"
- )
-
- actual_args = req.call_args
- assert actual_args.args[0] == "GET"
- assert actual_args.args[1] == "https://longrunning.googleapis.com/v3/operations"
- assert actual_args.kwargs["params"] == [
- ("filter", "my_filter"),
- ("pageSize", 10),
- ("pageToken", "abc"),
- ]
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, pagers.ListOperationsPager)
- assert response.next_page_token == "next_page_token_value"
-
-
-@pytest.mark.asyncio
-async def test_list_operations_rest_async():
- if not GOOGLE_AUTH_AIO_INSTALLED:
- pytest.skip("Skipped because google-api-core[async_rest] is not installed")
-
- client = _get_operations_client(is_async=True)
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(_get_session_type(is_async=True), "request") as req:
- # Designate an appropriate value for the returned response.
- return_value = operations_pb2.ListOperationsResponse(
- next_page_token="next_page_token_value",
- )
-
- # Wrap the value into a proper Response obj
- response_value = mock.Mock()
- response_value.status_code = 200
- json_return_value = json_format.MessageToJson(return_value)
- response_value.read = mock.AsyncMock(
- return_value=json_return_value.encode("UTF-8")
- )
- req.return_value = response_value
- response = await client.list_operations(
- name="operations", filter_="my_filter", page_size=10, page_token="abc"
- )
-
- actual_args = req.call_args
- assert actual_args.args[0] == "GET"
- assert actual_args.args[1] == "https://longrunning.googleapis.com/v3/operations"
- assert actual_args.kwargs["params"] == [
- ("filter", "my_filter"),
- ("pageSize", 10),
- ("pageToken", "abc"),
- ]
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, pagers_async.ListOperationsAsyncPager)
- assert response.next_page_token == "next_page_token_value"
-
-
-def test_list_operations_rest_failure():
- client = _get_operations_client(is_async=False, http_options=None)
-
- with mock.patch.object(_get_session_type(is_async=False), "request") as req:
- response_value = Response()
- response_value.status_code = 400
- mock_request = mock.MagicMock()
- mock_request.method = "GET"
- mock_request.url = "https://longrunning.googleapis.com:443/v1/operations"
- response_value.request = mock_request
- req.return_value = response_value
- with pytest.raises(core_exceptions.GoogleAPIError):
- client.list_operations(name="operations")
-
-
-@pytest.mark.asyncio
-async def test_list_operations_rest_failure_async():
- if not GOOGLE_AUTH_AIO_INSTALLED:
- pytest.skip("Skipped because google-api-core[async_rest] is not installed")
-
- client = _get_operations_client(is_async=True, http_options=None)
-
- with mock.patch.object(_get_session_type(is_async=True), "request") as req:
- response_value = mock.Mock()
- response_value.status_code = 400
- response_value.read = mock.AsyncMock(return_value=b"{}")
- mock_request = mock.MagicMock()
- mock_request.method = "GET"
- mock_request.url = "https://longrunning.googleapis.com:443/v1/operations"
- response_value.request = mock_request
- req.return_value = response_value
- with pytest.raises(core_exceptions.GoogleAPIError):
- await client.list_operations(name="operations")
-
-
-def test_list_operations_rest_pager():
- client = _get_operations_client(is_async=False, http_options=None)
-
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(_get_session_type(is_async=False), "request") as req:
- # TODO(kbandes): remove this mock unless there's a good reason for it.
- # with mock.patch.object(path_template, 'transcode') as transcode:
- # Set the response as a series of pages
- response = (
- operations_pb2.ListOperationsResponse(
- operations=[
- operations_pb2.Operation(),
- operations_pb2.Operation(),
- operations_pb2.Operation(),
- ],
- next_page_token="abc",
- ),
- operations_pb2.ListOperationsResponse(
- operations=[],
- next_page_token="def",
- ),
- operations_pb2.ListOperationsResponse(
- operations=[operations_pb2.Operation()],
- next_page_token="ghi",
- ),
- operations_pb2.ListOperationsResponse(
- operations=[operations_pb2.Operation(), operations_pb2.Operation()],
- ),
- )
- # Two responses for two calls
- response = response + response
-
- # Wrap the values into proper Response objs
- response = tuple(json_format.MessageToJson(x) for x in response)
- return_values = tuple(Response() for i in response)
- for return_val, response_val in zip(return_values, response):
- return_val._content = response_val.encode("UTF-8")
- return_val.status_code = 200
- req.side_effect = return_values
-
- pager = client.list_operations(name="operations")
-
- results = list(pager)
- assert len(results) == 6
- assert all(isinstance(i, operations_pb2.Operation) for i in results)
-
- pages = list(client.list_operations(name="operations").pages)
- for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page_.next_page_token == token
-
-
-@pytest.mark.asyncio
-async def test_list_operations_rest_pager_async():
- if not GOOGLE_AUTH_AIO_INSTALLED:
- pytest.skip("Skipped because google-api-core[async_rest] is not installed")
- client = _get_operations_client(is_async=True, http_options=None)
-
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(_get_session_type(is_async=True), "request") as req:
- # TODO(kbandes): remove this mock unless there's a good reason for it.
- # with mock.patch.object(path_template, 'transcode') as transcode:
- # Set the response as a series of pages
- response = (
- operations_pb2.ListOperationsResponse(
- operations=[
- operations_pb2.Operation(),
- operations_pb2.Operation(),
- operations_pb2.Operation(),
- ],
- next_page_token="abc",
- ),
- operations_pb2.ListOperationsResponse(
- operations=[],
- next_page_token="def",
- ),
- operations_pb2.ListOperationsResponse(
- operations=[operations_pb2.Operation()],
- next_page_token="ghi",
- ),
- operations_pb2.ListOperationsResponse(
- operations=[operations_pb2.Operation(), operations_pb2.Operation()],
- ),
- )
- # Two responses for two calls
- response = response + response
-
- # Wrap the values into proper Response objs
- response = tuple(json_format.MessageToJson(x) for x in response)
- return_values = tuple(mock.Mock() for i in response)
- for return_val, response_val in zip(return_values, response):
- return_val.read = mock.AsyncMock(return_value=response_val.encode("UTF-8"))
- return_val.status_code = 200
- req.side_effect = return_values
-
- pager = await client.list_operations(name="operations")
-
- responses = []
- async for response in pager:
- responses.append(response)
-
- results = list(responses)
- assert len(results) == 6
- assert all(isinstance(i, operations_pb2.Operation) for i in results)
- pager = await client.list_operations(name="operations")
-
- responses = []
- async for response in pager:
- responses.append(response)
-
- assert len(responses) == 6
- assert all(isinstance(i, operations_pb2.Operation) for i in results)
-
- pages = []
-
- async for page in pager.pages:
- pages.append(page)
- for page_, token in zip(pages, ["", "", "", "abc", "def", "ghi", ""]):
- assert page_.next_page_token == token
-
-
-def test_get_operation_rest():
- client = _get_operations_client(is_async=False)
-
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(_get_session_type(is_async=False), "request") as req:
- # Designate an appropriate value for the returned response.
- return_value = operations_pb2.Operation(
- name="operations/sample1",
- done=True,
- error=status_pb2.Status(code=411),
- )
-
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 200
- json_return_value = json_format.MessageToJson(return_value)
- response_value._content = json_return_value.encode("UTF-8")
- req.return_value = response_value
- response = client.get_operation("operations/sample1")
-
- actual_args = req.call_args
- assert actual_args.args[0] == "GET"
- assert (
- actual_args.args[1]
- == "https://longrunning.googleapis.com/v3/operations/sample1"
- )
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, operations_pb2.Operation)
- assert response.name == "operations/sample1"
- assert response.done is True
-
-
-@pytest.mark.asyncio
-async def test_get_operation_rest_async():
- if not GOOGLE_AUTH_AIO_INSTALLED:
- pytest.skip("Skipped because google-api-core[async_rest] is not installed")
- client = _get_operations_client(is_async=True)
-
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(_get_session_type(is_async=True), "request") as req:
- # Designate an appropriate value for the returned response.
- return_value = operations_pb2.Operation(
- name="operations/sample1",
- done=True,
- error=status_pb2.Status(code=411),
- )
-
- # Wrap the value into a proper Response obj
- response_value = mock.Mock()
- response_value.status_code = 200
- json_return_value = json_format.MessageToJson(return_value)
- response_value.read = mock.AsyncMock(return_value=json_return_value)
- req.return_value = response_value
- response = await client.get_operation("operations/sample1")
-
- actual_args = req.call_args
- assert actual_args.args[0] == "GET"
- assert (
- actual_args.args[1]
- == "https://longrunning.googleapis.com/v3/operations/sample1"
- )
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, operations_pb2.Operation)
- assert response.name == "operations/sample1"
- assert response.done is True
-
-
-def test_get_operation_rest_failure():
- client = _get_operations_client(is_async=False, http_options=None)
-
- with mock.patch.object(_get_session_type(is_async=False), "request") as req:
- response_value = Response()
- response_value.status_code = 400
- mock_request = mock.MagicMock()
- mock_request.method = "GET"
- mock_request.url = "https://longrunning.googleapis.com/v1/operations/sample1"
- response_value.request = mock_request
- req.return_value = response_value
- with pytest.raises(core_exceptions.GoogleAPIError):
- client.get_operation("sample0/operations/sample1")
-
-
-@pytest.mark.asyncio
-async def test_get_operation_rest_failure_async():
- if not GOOGLE_AUTH_AIO_INSTALLED:
- pytest.skip("Skipped because google-api-core[async_rest] is not installed")
- client = _get_operations_client(is_async=True, http_options=None)
-
- with mock.patch.object(_get_session_type(is_async=True), "request") as req:
- response_value = mock.Mock()
- response_value.status_code = 400
- response_value.read = mock.AsyncMock(return_value=b"{}")
- mock_request = mock.MagicMock()
- mock_request.method = "GET"
- mock_request.url = "https://longrunning.googleapis.com/v1/operations/sample1"
- response_value.request = mock_request
- req.return_value = response_value
- with pytest.raises(core_exceptions.GoogleAPIError):
- await client.get_operation("sample0/operations/sample1")
-
-
-def test_delete_operation_rest():
- client = _get_operations_client(is_async=False)
-
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(_get_session_type(is_async=False), "request") as req:
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 200
- json_return_value = ""
- response_value._content = json_return_value.encode("UTF-8")
- req.return_value = response_value
- client.delete_operation(name="operations/sample1")
- assert req.call_count == 1
- actual_args = req.call_args
- assert actual_args.args[0] == "DELETE"
- assert (
- actual_args.args[1]
- == "https://longrunning.googleapis.com/v3/operations/sample1"
- )
-
-
-@pytest.mark.asyncio
-async def test_delete_operation_rest_async():
- if not GOOGLE_AUTH_AIO_INSTALLED:
- pytest.skip("Skipped because google-api-core[async_rest] is not installed")
- client = _get_operations_client(is_async=True)
-
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(_get_session_type(is_async=True), "request") as req:
- # Wrap the value into a proper Response obj
- response_value = mock.Mock()
- response_value.status_code = 200
- json_return_value = ""
- response_value.read = mock.AsyncMock(
- return_value=json_return_value.encode("UTF-8")
- )
- req.return_value = response_value
- await client.delete_operation(name="operations/sample1")
- assert req.call_count == 1
- actual_args = req.call_args
- assert actual_args.args[0] == "DELETE"
- assert (
- actual_args.args[1]
- == "https://longrunning.googleapis.com/v3/operations/sample1"
- )
-
-
-def test_delete_operation_rest_failure():
- client = _get_operations_client(is_async=False, http_options=None)
-
- with mock.patch.object(_get_session_type(is_async=False), "request") as req:
- response_value = Response()
- response_value.status_code = 400
- mock_request = mock.MagicMock()
- mock_request.method = "DELETE"
- mock_request.url = "https://longrunning.googleapis.com/v1/operations/sample1"
- response_value.request = mock_request
- req.return_value = response_value
- with pytest.raises(core_exceptions.GoogleAPIError):
- client.delete_operation(name="sample0/operations/sample1")
-
-
-@pytest.mark.asyncio
-async def test_delete_operation_rest_failure_async():
- if not GOOGLE_AUTH_AIO_INSTALLED:
- pytest.skip("Skipped because google-api-core[async_rest] is not installed")
- client = _get_operations_client(is_async=True, http_options=None)
-
- with mock.patch.object(_get_session_type(is_async=True), "request") as req:
- response_value = mock.Mock()
- response_value.status_code = 400
- response_value.read = mock.AsyncMock(return_value=b"{}")
- mock_request = mock.MagicMock()
- mock_request.method = "DELETE"
- mock_request.url = "https://longrunning.googleapis.com/v1/operations/sample1"
- response_value.request = mock_request
- req.return_value = response_value
- with pytest.raises(core_exceptions.GoogleAPIError):
- await client.delete_operation(name="sample0/operations/sample1")
-
-
-def test_cancel_operation_rest():
- client = _get_operations_client(is_async=False)
-
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(_get_session_type(is_async=False), "request") as req:
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 200
- json_return_value = ""
- response_value._content = json_return_value.encode("UTF-8")
- req.return_value = response_value
- client.cancel_operation(name="operations/sample1")
- assert req.call_count == 1
- actual_args = req.call_args
- assert actual_args.args[0] == "POST"
- assert (
- actual_args.args[1]
- == "https://longrunning.googleapis.com/v3/operations/sample1:cancel"
- )
-
-
-@pytest.mark.asyncio
-async def test_cancel_operation_rest_async():
- if not GOOGLE_AUTH_AIO_INSTALLED:
- pytest.skip("Skipped because google-api-core[async_rest] is not installed")
- client = _get_operations_client(is_async=True)
-
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(_get_session_type(is_async=True), "request") as req:
- # Wrap the value into a proper Response obj
- response_value = mock.Mock()
- response_value.status_code = 200
- json_return_value = ""
- response_value.read = mock.AsyncMock(
- return_value=json_return_value.encode("UTF-8")
- )
- req.return_value = response_value
- await client.cancel_operation(name="operations/sample1")
- assert req.call_count == 1
- actual_args = req.call_args
- assert actual_args.args[0] == "POST"
- assert (
- actual_args.args[1]
- == "https://longrunning.googleapis.com/v3/operations/sample1:cancel"
- )
-
-
-def test_cancel_operation_rest_failure():
- client = _get_operations_client(is_async=False, http_options=None)
-
- with mock.patch.object(_get_session_type(is_async=False), "request") as req:
- response_value = Response()
- response_value.status_code = 400
- mock_request = mock.MagicMock()
- mock_request.method = "POST"
- mock_request.url = (
- "https://longrunning.googleapis.com/v1/operations/sample1:cancel"
- )
- response_value.request = mock_request
- req.return_value = response_value
- with pytest.raises(core_exceptions.GoogleAPIError):
- client.cancel_operation(name="sample0/operations/sample1")
-
-
-@pytest.mark.asyncio
-async def test_cancel_operation_rest_failure_async():
- if not GOOGLE_AUTH_AIO_INSTALLED:
- pytest.skip("Skipped because google-api-core[async_rest] is not installed")
- client = _get_operations_client(is_async=True, http_options=None)
-
- with mock.patch.object(_get_session_type(is_async=True), "request") as req:
- response_value = mock.Mock()
- response_value.status_code = 400
- response_value.read = mock.AsyncMock(return_value=b"{}")
- mock_request = mock.MagicMock()
- mock_request.method = "POST"
- mock_request.url = (
- "https://longrunning.googleapis.com/v1/operations/sample1:cancel"
- )
- response_value.request = mock_request
- req.return_value = response_value
- with pytest.raises(core_exceptions.GoogleAPIError):
- await client.cancel_operation(name="sample0/operations/sample1")
-
-
-@pytest.mark.parametrize(
- "client_class,transport_class,credentials",
- PYPARAM_CLIENT_TRANSPORT_CREDENTIALS,
-)
-def test_credentials_transport_error(client_class, transport_class, credentials):
- # It is an error to provide credentials and a transport instance.
- transport = transport_class(credentials=credentials)
- with pytest.raises(ValueError):
- client_class(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
- )
-
- # It is an error to provide a credentials file and a transport instance.
- transport = transport_class(credentials=credentials)
- with pytest.raises(ValueError):
- client_class(
- client_options={"credentials_file": "credentials.json"},
- transport=transport,
- )
-
- # It is an error to provide scopes and a transport instance.
- transport = transport_class(credentials=credentials)
- with pytest.raises(ValueError):
- client_class(
- client_options={"scopes": ["1", "2"]},
- transport=transport,
- )
-
-
-@pytest.mark.parametrize(
- "client_class,transport_class,credentials",
- PYPARAM_CLIENT_TRANSPORT_CREDENTIALS,
-)
-def test_transport_instance(client_class, transport_class, credentials):
- # A client may be instantiated with a custom transport instance.
- transport = transport_class(
- credentials=credentials,
- )
- client = client_class(transport=transport)
- assert client.transport is transport
-
-
-@pytest.mark.parametrize(
- "client_class,transport_class,credentials",
- PYPARAM_CLIENT_TRANSPORT_CREDENTIALS,
-)
-def test_transport_adc(client_class, transport_class, credentials):
- # Test default credentials are used if not provided.
- with mock.patch.object(google.auth, "default") as adc:
- adc.return_value = (credentials, None)
- transport_class()
- adc.assert_called_once()
-
-
-def test_operations_base_transport_error():
- # Passing both a credentials object and credentials_file should raise an error
- with pytest.raises(core_exceptions.DuplicateCredentialArgs):
- with warn_deprecated_credentials_file():
- transports.OperationsTransport(
- credentials=ga_credentials.AnonymousCredentials(),
- credentials_file="credentials.json",
- )
-
-
-def test_operations_base_transport():
- # Instantiate the base transport.
- with mock.patch(
- "google.api_core.operations_v1.transports.OperationsTransport.__init__"
- ) as Transport:
- Transport.return_value = None
- transport = transports.OperationsTransport(
- credentials=ga_credentials.AnonymousCredentials(),
- )
-
- # Every method on the transport should just blindly
- # raise NotImplementedError.
- methods = (
- "list_operations",
- "get_operation",
- "delete_operation",
- "cancel_operation",
- )
- for method in methods:
- with pytest.raises(NotImplementedError):
- getattr(transport, method)(request=object())
-
- with pytest.raises(NotImplementedError):
- transport.close()
-
-
-def test_operations_base_transport_with_credentials_file():
- # Instantiate the base transport with a credentials file
- with mock.patch.object(
- google.auth, "load_credentials_from_file", autospec=True
- ) as load_creds, mock.patch(
- "google.api_core.operations_v1.transports.OperationsTransport._prep_wrapped_messages"
- ) as Transport:
- Transport.return_value = None
- load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
- with warn_deprecated_credentials_file():
- transports.OperationsTransport(
- credentials_file="credentials.json",
- quota_project_id="octopus",
- )
- load_creds.assert_called_once_with(
- "credentials.json",
- scopes=None,
- default_scopes=(),
- quota_project_id="octopus",
- )
-
-
-def test_operations_base_transport_with_adc():
- # Test the default credentials are used if credentials and credentials_file are None.
- with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch(
- "google.api_core.operations_v1.transports.OperationsTransport._prep_wrapped_messages"
- ) as Transport:
- Transport.return_value = None
- adc.return_value = (ga_credentials.AnonymousCredentials(), None)
- transports.OperationsTransport()
- adc.assert_called_once()
-
-
-@pytest.mark.parametrize(
- "client_class",
- PYPARAM_CLIENT,
-)
-def test_operations_auth_adc(client_class):
- # If no credentials are provided, we should use ADC credentials.
- with mock.patch.object(google.auth, "default", autospec=True) as adc:
- adc.return_value = (ga_credentials.AnonymousCredentials(), None)
-
- if "async" in str(client_class).lower():
- # TODO(): Add support for adc to async REST transport.
- # NOTE: Ideally, the logic for adc shouldn't be called if transport
- # is set to async REST. If the user does not configure credentials
- # of type `google.auth.aio.credentials.Credentials`,
- # we should raise an exception to avoid the adc workflow.
- with pytest.raises(google.auth.exceptions.InvalidType):
- client_class()
- else:
- client_class()
- adc.assert_called_once_with(
- scopes=None,
- default_scopes=(),
- quota_project_id=None,
- )
-
-
-# TODO(https://github.com/googleapis/python-api-core/issues/705): Add
-# testing for `transports.AsyncOperationsRestTransport` once MTLS is supported
-# in `google.auth.aio.transport`.
-@pytest.mark.parametrize(
- "transport_class",
- [
- transports.OperationsRestTransport,
- ],
-)
-def test_operations_http_transport_client_cert_source_for_mtls(transport_class):
- cred = ga_credentials.AnonymousCredentials()
- with mock.patch(
- "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"
- ) as mock_configure_mtls_channel:
- transport_class(
- credentials=cred, client_cert_source_for_mtls=client_cert_source_callback
- )
- mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback)
-
-
-@pytest.mark.parametrize(
- "client_class,transport_class,credentials",
- PYPARAM_CLIENT_TRANSPORT_CREDENTIALS,
-)
-def test_operations_host_no_port(client_class, transport_class, credentials):
- client = client_class(
- credentials=credentials,
- client_options=client_options.ClientOptions(
- api_endpoint="longrunning.googleapis.com"
- ),
- )
- assert client.transport._host == "https://longrunning.googleapis.com"
-
-
-@pytest.mark.parametrize(
- "client_class,transport_class,credentials",
- PYPARAM_CLIENT_TRANSPORT_CREDENTIALS,
-)
-def test_operations_host_with_port(client_class, transport_class, credentials):
- client = client_class(
- credentials=credentials,
- client_options=client_options.ClientOptions(
- api_endpoint="longrunning.googleapis.com:8000"
- ),
- )
- assert client.transport._host == "https://longrunning.googleapis.com:8000"
-
-
-@pytest.mark.parametrize(
- "client_class",
- PYPARAM_CLIENT,
-)
-def test_common_billing_account_path(client_class):
- billing_account = "squid"
- expected = "billingAccounts/{billing_account}".format(
- billing_account=billing_account,
- )
- actual = client_class.common_billing_account_path(billing_account)
- assert expected == actual
-
-
-@pytest.mark.parametrize(
- "client_class",
- PYPARAM_CLIENT,
-)
-def test_parse_common_billing_account_path(client_class):
- expected = {
- "billing_account": "clam",
- }
- path = client_class.common_billing_account_path(**expected)
-
- # Check that the path construction is reversible.
- actual = client_class.parse_common_billing_account_path(path)
- assert expected == actual
-
-
-@pytest.mark.parametrize(
- "client_class",
- PYPARAM_CLIENT,
-)
-def test_common_folder_path(client_class):
- folder = "whelk"
- expected = "folders/{folder}".format(
- folder=folder,
- )
- actual = client_class.common_folder_path(folder)
- assert expected == actual
-
-
-@pytest.mark.parametrize(
- "client_class",
- PYPARAM_CLIENT,
-)
-def test_parse_common_folder_path(client_class):
- expected = {
- "folder": "octopus",
- }
- path = client_class.common_folder_path(**expected)
-
- # Check that the path construction is reversible.
- actual = client_class.parse_common_folder_path(path)
- assert expected == actual
-
-
-@pytest.mark.parametrize(
- "client_class",
- PYPARAM_CLIENT,
-)
-def test_common_organization_path(client_class):
- organization = "oyster"
- expected = "organizations/{organization}".format(
- organization=organization,
- )
- actual = client_class.common_organization_path(organization)
- assert expected == actual
-
-
-@pytest.mark.parametrize(
- "client_class",
- PYPARAM_CLIENT,
-)
-def test_parse_common_organization_path(client_class):
- expected = {
- "organization": "nudibranch",
- }
- path = client_class.common_organization_path(**expected)
-
- # Check that the path construction is reversible.
- actual = client_class.parse_common_organization_path(path)
- assert expected == actual
-
-
-@pytest.mark.parametrize(
- "client_class",
- PYPARAM_CLIENT,
-)
-def test_common_project_path(client_class):
- project = "cuttlefish"
- expected = "projects/{project}".format(
- project=project,
- )
- actual = client_class.common_project_path(project)
- assert expected == actual
-
-
-@pytest.mark.parametrize(
- "client_class",
- PYPARAM_CLIENT,
-)
-def test_parse_common_project_path(client_class):
- expected = {
- "project": "mussel",
- }
- path = client_class.common_project_path(**expected)
-
- # Check that the path construction is reversible.
- actual = client_class.parse_common_project_path(path)
- assert expected == actual
-
-
-@pytest.mark.parametrize(
- "client_class",
- PYPARAM_CLIENT,
-)
-def test_common_location_path(client_class):
- project = "winkle"
- location = "nautilus"
- expected = "projects/{project}/locations/{location}".format(
- project=project,
- location=location,
- )
- actual = client_class.common_location_path(project, location)
- assert expected == actual
-
-
-@pytest.mark.parametrize(
- "client_class",
- PYPARAM_CLIENT,
-)
-def test_parse_common_location_path(client_class):
- expected = {
- "project": "scallop",
- "location": "abalone",
- }
- path = client_class.common_location_path(**expected)
-
- # Check that the path construction is reversible.
- actual = client_class.parse_common_location_path(path)
- assert expected == actual
-
-
-@pytest.mark.parametrize(
- "client_class,transport_class,credentials",
- PYPARAM_CLIENT_TRANSPORT_CREDENTIALS,
-)
-def test_client_withDEFAULT_CLIENT_INFO(client_class, transport_class, credentials):
- client_info = gapic_v1.client_info.ClientInfo()
- with mock.patch.object(transport_class, "_prep_wrapped_messages") as prep:
- client_class(
- credentials=credentials,
- client_info=client_info,
- )
- prep.assert_called_once_with(client_info)
-
- with mock.patch.object(transport_class, "_prep_wrapped_messages") as prep:
- transport_class(
- credentials=credentials,
- client_info=client_info,
- )
- prep.assert_called_once_with(client_info)
diff --git a/tests/unit/retry/__init__.py b/tests/unit/retry/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tests/unit/retry/__init__.py
+++ /dev/null
diff --git a/tests/unit/retry/test_retry_base.py b/tests/unit/retry/test_retry_base.py
deleted file mode 100644
index 78fdb91..0000000
--- a/tests/unit/retry/test_retry_base.py
+++ /dev/null
@@ -1,313 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import itertools
-import re
-from unittest import mock
-
-import pytest
-import requests.exceptions
-
-from google.api_core import exceptions
-from google.api_core import retry
-from google.auth import exceptions as auth_exceptions
-
-
-def test_if_exception_type():
- predicate = retry.if_exception_type(ValueError)
-
- assert predicate(ValueError())
- assert not predicate(TypeError())
-
-
-def test_if_exception_type_multiple():
- predicate = retry.if_exception_type(ValueError, TypeError)
-
- assert predicate(ValueError())
- assert predicate(TypeError())
- assert not predicate(RuntimeError())
-
-
-def test_if_transient_error():
- assert retry.if_transient_error(exceptions.InternalServerError(""))
- assert retry.if_transient_error(exceptions.TooManyRequests(""))
- assert retry.if_transient_error(exceptions.ServiceUnavailable(""))
- assert retry.if_transient_error(requests.exceptions.ConnectionError(""))
- assert retry.if_transient_error(requests.exceptions.ChunkedEncodingError(""))
- assert retry.if_transient_error(auth_exceptions.TransportError(""))
- assert not retry.if_transient_error(exceptions.InvalidArgument(""))
-
-
-# Make uniform return half of its maximum, which will be the calculated
-# sleep time.
-@mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n)
-def test_exponential_sleep_generator_base_2(uniform):
- gen = retry.exponential_sleep_generator(1, 60, multiplier=2)
-
- result = list(itertools.islice(gen, 8))
- assert result == [1, 2, 4, 8, 16, 32, 60, 60]
-
-
-def test_build_retry_error_empty_list():
- """
- attempt to build a retry error with no errors encountered
- should return a generic RetryError
- """
- from google.api_core.retry import build_retry_error
- from google.api_core.retry import RetryFailureReason
-
- reason = RetryFailureReason.NON_RETRYABLE_ERROR
- src, cause = build_retry_error([], reason, 10)
- assert isinstance(src, exceptions.RetryError)
- assert cause is None
- assert src.message == "Unknown error"
-
-
-def test_build_retry_error_preserves_cause():
- """
- build_retry_error should preserve __cause__ from chained exceptions.
- """
- from google.api_core.retry import build_retry_error
- from google.api_core.retry import RetryFailureReason
-
- # Create an exception with explicit cause
- cause = ValueError("root cause")
- exc = RuntimeError("wrapper")
- exc.__cause__ = cause
-
- src, found_cause = build_retry_error(
- [exc], RetryFailureReason.NON_RETRYABLE_ERROR, None
- )
-
- assert src is exc
- assert found_cause is cause
-
-
-def test_build_retry_error_timeout_message():
- """
- should provide helpful error message when timeout is reached
- """
- from google.api_core.retry import build_retry_error
- from google.api_core.retry import RetryFailureReason
-
- reason = RetryFailureReason.TIMEOUT
- cause = RuntimeError("timeout")
- src, found_cause = build_retry_error([ValueError(), cause], reason, 10)
- assert isinstance(src, exceptions.RetryError)
- assert src.message == "Timeout of 10.0s exceeded"
- # should attach appropriate cause
- assert found_cause is cause
-
-
-def test_build_retry_error_empty_timeout():
- """
- attempt to build a retry error when timeout is None
- should return a generic timeout error message
- """
- from google.api_core.retry import build_retry_error
- from google.api_core.retry import RetryFailureReason
-
- reason = RetryFailureReason.TIMEOUT
- src, _ = build_retry_error([], reason, None)
- assert isinstance(src, exceptions.RetryError)
- assert src.message == "Timeout exceeded"
-
-
-class Test_BaseRetry(object):
- def _make_one(self, *args, **kwargs):
- return retry.retry_base._BaseRetry(*args, **kwargs)
-
- def test_constructor_defaults(self):
- retry_ = self._make_one()
- assert retry_._predicate == retry.if_transient_error
- assert retry_._initial == 1
- assert retry_._maximum == 60
- assert retry_._multiplier == 2
- assert retry_._timeout == 120
- assert retry_._on_error is None
- assert retry_.timeout == 120
- assert retry_.timeout == 120
-
- def test_constructor_options(self):
- _some_function = mock.Mock()
-
- retry_ = self._make_one(
- predicate=mock.sentinel.predicate,
- initial=1,
- maximum=2,
- multiplier=3,
- timeout=4,
- on_error=_some_function,
- )
- assert retry_._predicate == mock.sentinel.predicate
- assert retry_._initial == 1
- assert retry_._maximum == 2
- assert retry_._multiplier == 3
- assert retry_._timeout == 4
- assert retry_._on_error is _some_function
-
- @pytest.mark.parametrize("use_deadline", [True, False])
- @pytest.mark.parametrize("value", [None, 0, 1, 4, 42, 5.5])
- def test_with_timeout(self, use_deadline, value):
- retry_ = self._make_one(
- predicate=mock.sentinel.predicate,
- initial=1,
- maximum=2,
- multiplier=3,
- timeout=4,
- on_error=mock.sentinel.on_error,
- )
- new_retry = (
- retry_.with_timeout(value)
- if not use_deadline
- else retry_.with_deadline(value)
- )
- assert retry_ is not new_retry
- assert new_retry._timeout == value
- assert (
- new_retry.timeout == value
- if not use_deadline
- else new_retry.deadline == value
- )
-
- # the rest of the attributes should remain the same
- assert new_retry._predicate is retry_._predicate
- assert new_retry._initial == retry_._initial
- assert new_retry._maximum == retry_._maximum
- assert new_retry._multiplier == retry_._multiplier
- assert new_retry._on_error is retry_._on_error
-
- def test_with_predicate(self):
- retry_ = self._make_one(
- predicate=mock.sentinel.predicate,
- initial=1,
- maximum=2,
- multiplier=3,
- timeout=4,
- on_error=mock.sentinel.on_error,
- )
- new_retry = retry_.with_predicate(mock.sentinel.predicate)
- assert retry_ is not new_retry
- assert new_retry._predicate == mock.sentinel.predicate
-
- # the rest of the attributes should remain the same
- assert new_retry._timeout == retry_._timeout
- assert new_retry._initial == retry_._initial
- assert new_retry._maximum == retry_._maximum
- assert new_retry._multiplier == retry_._multiplier
- assert new_retry._on_error is retry_._on_error
-
- def test_with_delay_noop(self):
- retry_ = self._make_one(
- predicate=mock.sentinel.predicate,
- initial=1,
- maximum=2,
- multiplier=3,
- timeout=4,
- on_error=mock.sentinel.on_error,
- )
- new_retry = retry_.with_delay()
- assert retry_ is not new_retry
- assert new_retry._initial == retry_._initial
- assert new_retry._maximum == retry_._maximum
- assert new_retry._multiplier == retry_._multiplier
-
- @pytest.mark.parametrize(
- "originals,updated,expected",
- [
- [(1, 2, 3), (4, 5, 6), (4, 5, 6)],
- [(1, 2, 3), (0, 0, 0), (0, 0, 0)],
- [(1, 2, 3), (None, None, None), (1, 2, 3)],
- [(0, 0, 0), (None, None, None), (0, 0, 0)],
- [(1, 2, 3), (None, 0.5, None), (1, 0.5, 3)],
- [(1, 2, 3), (None, 0.5, 4), (1, 0.5, 4)],
- [(1, 2, 3), (9, None, None), (9, 2, 3)],
- ],
- )
- def test_with_delay(self, originals, updated, expected):
- retry_ = self._make_one(
- predicate=mock.sentinel.predicate,
- initial=originals[0],
- maximum=originals[1],
- multiplier=originals[2],
- timeout=14,
- on_error=mock.sentinel.on_error,
- )
- new_retry = retry_.with_delay(
- initial=updated[0], maximum=updated[1], multiplier=updated[2]
- )
- assert retry_ is not new_retry
- assert new_retry._initial == expected[0]
- assert new_retry._maximum == expected[1]
- assert new_retry._multiplier == expected[2]
-
- # the rest of the attributes should remain the same
- assert new_retry._timeout == retry_._timeout
- assert new_retry._predicate is retry_._predicate
- assert new_retry._on_error is retry_._on_error
-
- def test_with_delay_partial_options(self):
- retry_ = self._make_one(
- predicate=mock.sentinel.predicate,
- initial=1,
- maximum=2,
- multiplier=3,
- timeout=4,
- on_error=mock.sentinel.on_error,
- )
- new_retry = retry_.with_delay(initial=4)
- assert retry_ is not new_retry
- assert new_retry._initial == 4
- assert new_retry._maximum == 2
- assert new_retry._multiplier == 3
-
- new_retry = retry_.with_delay(maximum=4)
- assert retry_ is not new_retry
- assert new_retry._initial == 1
- assert new_retry._maximum == 4
- assert new_retry._multiplier == 3
-
- new_retry = retry_.with_delay(multiplier=4)
- assert retry_ is not new_retry
- assert new_retry._initial == 1
- assert new_retry._maximum == 2
- assert new_retry._multiplier == 4
-
- # the rest of the attributes should remain the same
- assert new_retry._timeout == retry_._timeout
- assert new_retry._predicate is retry_._predicate
- assert new_retry._on_error is retry_._on_error
-
- def test___str__(self):
- def if_exception_type(exc):
- return bool(exc) # pragma: NO COVER
-
- # Explicitly set all attributes as changed Retry defaults should not
- # cause this test to start failing.
- retry_ = self._make_one(
- predicate=if_exception_type,
- initial=1.0,
- maximum=60.0,
- multiplier=2.0,
- timeout=120.0,
- on_error=None,
- )
- assert re.match(
- (
- r"<_BaseRetry predicate=<function.*?if_exception_type.*?>, "
- r"initial=1.0, maximum=60.0, multiplier=2.0, timeout=120.0, "
- r"on_error=None>"
- ),
- str(retry_),
- )
diff --git a/tests/unit/retry/test_retry_imports.py b/tests/unit/retry/test_retry_imports.py
deleted file mode 100644
index 597909f..0000000
--- a/tests/unit/retry/test_retry_imports.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2024 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-def test_legacy_imports_retry_unary_sync():
- # TODO: Delete this test when when we revert these imports on the
- # next major version release
- # (https://github.com/googleapis/python-api-core/issues/576)
- from google.api_core.retry import datetime_helpers # noqa: F401
- from google.api_core.retry import exceptions # noqa: F401
- from google.api_core.retry import auth_exceptions # noqa: F401
-
-
-def test_legacy_imports_retry_unary_async():
- # TODO: Delete this test when when we revert these imports on the
- # next major version release
- # (https://github.com/googleapis/python-api-core/issues/576)
- from google.api_core import retry_async # noqa: F401
-
- # See https://github.com/googleapis/python-api-core/issues/586
- # for context on why we need to test this import this explicitly.
- from google.api_core.retry_async import AsyncRetry # noqa: F401
diff --git a/tests/unit/retry/test_retry_streaming.py b/tests/unit/retry/test_retry_streaming.py
deleted file mode 100644
index 2499b2a..0000000
--- a/tests/unit/retry/test_retry_streaming.py
+++ /dev/null
@@ -1,505 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import re
-
-try:
- from unittest import mock
- from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
-except ImportError: # pragma: NO COVER
- import mock # type: ignore
-
-import pytest
-
-from google.api_core import exceptions
-from google.api_core import retry
-from google.api_core.retry import retry_streaming
-
-from .test_retry_base import Test_BaseRetry
-
-
-def test_retry_streaming_target_bad_sleep_generator():
- with pytest.raises(
- ValueError, match="Sleep generator stopped yielding sleep values"
- ):
- next(retry_streaming.retry_target_stream(None, lambda x: True, [], None))
-
-
-@mock.patch("time.sleep", autospec=True)
-def test_retry_streaming_target_dynamic_backoff(sleep):
- """
- sleep_generator should be iterated after on_error, to support dynamic backoff
- """
- from functools import partial
-
- sleep.side_effect = RuntimeError("stop after sleep")
- # start with empty sleep generator; values are added after exception in push_sleep_value
- sleep_values = []
- error_target = partial(TestStreamingRetry._generator_mock, error_on=0)
- inserted_sleep = 99
-
- def push_sleep_value(err):
- sleep_values.append(inserted_sleep)
-
- with pytest.raises(RuntimeError):
- next(
- retry_streaming.retry_target_stream(
- error_target,
- predicate=lambda x: True,
- sleep_generator=sleep_values,
- on_error=push_sleep_value,
- )
- )
- assert sleep.call_count == 1
- sleep.assert_called_once_with(inserted_sleep)
-
-
-class TestStreamingRetry(Test_BaseRetry):
- def _make_one(self, *args, **kwargs):
- return retry_streaming.StreamingRetry(*args, **kwargs)
-
- def test___str__(self):
- def if_exception_type(exc):
- return bool(exc) # pragma: NO COVER
-
- # Explicitly set all attributes as changed Retry defaults should not
- # cause this test to start failing.
- retry_ = retry_streaming.StreamingRetry(
- predicate=if_exception_type,
- initial=1.0,
- maximum=60.0,
- multiplier=2.0,
- timeout=120.0,
- on_error=None,
- )
- assert re.match(
- (
- r"<StreamingRetry predicate=<function.*?if_exception_type.*?>, "
- r"initial=1.0, maximum=60.0, multiplier=2.0, timeout=120.0, "
- r"on_error=None>"
- ),
- str(retry_),
- )
-
- @staticmethod
- def _generator_mock(
- num=5,
- error_on=None,
- return_val=None,
- exceptions_seen=None,
- ):
- """
- Helper to create a mock generator that yields a number of values
- Generator can optionally raise an exception on a specific iteration
-
- Args:
- - num (int): the number of values to yield. After this, the generator will return `return_val`
- - error_on (int): if given, the generator will raise a ValueError on the specified iteration
- - return_val (any): if given, the generator will return this value after yielding num values
- - exceptions_seen (list): if given, the generator will append any exceptions to this list before raising
- """
- try:
- for i in range(num):
- if error_on is not None and i == error_on:
- raise ValueError("generator mock error")
- yield i
- return return_val
- except (Exception, BaseException, GeneratorExit) as e:
- # keep track of exceptions seen by generator
- if exceptions_seen is not None:
- exceptions_seen.append(e)
- raise
-
- @mock.patch("time.sleep", autospec=True)
- def test___call___success(self, sleep):
- """
- Test that a retry-decorated generator yields values as expected
- This test checks a generator with no issues
- """
- import types
- import collections
-
- retry_ = retry_streaming.StreamingRetry()
-
- decorated = retry_(self._generator_mock)
-
- num = 10
- result = decorated(num)
- # check types
- assert isinstance(decorated(num), collections.abc.Iterable)
- assert isinstance(decorated(num), types.GeneratorType)
- assert isinstance(self._generator_mock(num), collections.abc.Iterable)
- assert isinstance(self._generator_mock(num), types.GeneratorType)
- # check yield contents
- unpacked = [i for i in result]
- assert len(unpacked) == num
- for a, b in zip(unpacked, self._generator_mock(num)):
- assert a == b
- sleep.assert_not_called()
-
- @mock.patch("time.sleep", autospec=True)
- def test___call___retry(self, sleep):
- """
- Tests that a retry-decorated generator will retry on errors
- """
- on_error = mock.Mock(return_value=None)
- retry_ = retry_streaming.StreamingRetry(
- on_error=on_error,
- predicate=retry.if_exception_type(ValueError),
- timeout=None,
- )
- result = retry_(self._generator_mock)(error_on=3)
- # error thrown on 3
- # generator should contain 0, 1, 2 looping
- unpacked = [next(result) for i in range(10)]
- assert unpacked == [0, 1, 2, 0, 1, 2, 0, 1, 2, 0]
- assert on_error.call_count == 3
-
- @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n)
- @mock.patch("time.sleep", autospec=True)
- @pytest.mark.parametrize("use_deadline_arg", [True, False])
- def test___call___retry_hitting_timeout(self, sleep, uniform, use_deadline_arg):
- """
- Tests that a retry-decorated generator will throw a RetryError
- after using the time budget
- """
- import time
-
- timeout_val = 30.9
- # support "deadline" as an alias for "timeout"
- timeout_kwarg = (
- {"timeout": timeout_val}
- if not use_deadline_arg
- else {"deadline": timeout_val}
- )
-
- on_error = mock.Mock(return_value=None)
- retry_ = retry_streaming.StreamingRetry(
- predicate=retry.if_exception_type(ValueError),
- initial=1.0,
- maximum=1024.0,
- multiplier=2.0,
- **timeout_kwarg,
- )
-
- timenow = time.monotonic()
- now_patcher = mock.patch(
- "time.monotonic",
- return_value=timenow,
- )
-
- decorated = retry_(self._generator_mock, on_error=on_error)
- generator = decorated(error_on=1)
- with now_patcher as patched_now:
- # Make sure that calls to fake time.sleep() also advance the mocked
- # time clock.
- def increase_time(sleep_delay):
- patched_now.return_value += sleep_delay
-
- sleep.side_effect = increase_time
- with pytest.raises(exceptions.RetryError):
- [i for i in generator]
-
- assert on_error.call_count == 5
- # check the delays
- assert sleep.call_count == 4 # once between each successive target calls
- last_wait = sleep.call_args.args[0]
- total_wait = sum(call_args.args[0] for call_args in sleep.call_args_list)
- assert last_wait == 8.0
- assert total_wait == 15.0
-
- @mock.patch("time.sleep", autospec=True)
- def test___call___with_generator_send(self, sleep):
- """
- Send should be passed through retry into target generator
- """
-
- def _mock_send_gen():
- """
- always yield whatever was sent in
- """
- in_ = yield
- while True:
- in_ = yield in_
-
- retry_ = retry_streaming.StreamingRetry()
-
- decorated = retry_(_mock_send_gen)
-
- generator = decorated()
- result = next(generator)
- # first yield should be None
- assert result is None
- in_messages = ["test_1", "hello", "world"]
- out_messages = []
- for msg in in_messages:
- recv = generator.send(msg)
- out_messages.append(recv)
- assert in_messages == out_messages
-
- @mock.patch("time.sleep", autospec=True)
- def test___call___with_generator_send_retry(self, sleep):
- """
- Send should support retries like next
- """
- on_error = mock.Mock(return_value=None)
- retry_ = retry_streaming.StreamingRetry(
- on_error=on_error,
- predicate=retry.if_exception_type(ValueError),
- timeout=None,
- )
- result = retry_(self._generator_mock)(error_on=3)
- with pytest.raises(TypeError) as exc_info:
- # calling first send with non-None input should raise a TypeError
- result.send("can not send to fresh generator")
- assert exc_info.match("can't send non-None value")
- # initiate iteration with None
- result = retry_(self._generator_mock)(error_on=3)
- assert result.send(None) == 0
- # error thrown on 3
- # generator should contain 0, 1, 2 looping
- unpacked = [result.send(i) for i in range(10)]
- assert unpacked == [1, 2, 0, 1, 2, 0, 1, 2, 0, 1]
- assert on_error.call_count == 3
-
- @mock.patch("time.sleep", autospec=True)
- def test___call___with_iterable_send(self, sleep):
- """
- send should raise attribute error if wrapped iterator does not support it
- """
- retry_ = retry_streaming.StreamingRetry()
-
- def iterable_fn(n):
- return iter(range(n))
-
- decorated = retry_(iterable_fn)
- generator = decorated(5)
- # initialize
- next(generator)
- # call send
- with pytest.raises(AttributeError):
- generator.send("test")
-
- @mock.patch("time.sleep", autospec=True)
- def test___call___with_iterable_close(self, sleep):
- """
- close should be handled by wrapper if wrapped iterable does not support it
- """
- retry_ = retry_streaming.StreamingRetry()
-
- def iterable_fn(n):
- return iter(range(n))
-
- decorated = retry_(iterable_fn)
-
- # try closing active generator
- retryable = decorated(10)
- assert next(retryable) == 0
- retryable.close()
- with pytest.raises(StopIteration):
- next(retryable)
-
- # try closing a new generator
- retryable = decorated(10)
- retryable.close()
- with pytest.raises(StopIteration):
- next(retryable)
-
- @mock.patch("time.sleep", autospec=True)
- def test___call___with_iterable_throw(self, sleep):
- """
- Throw should work even if the wrapped iterable does not support it
- """
- predicate = retry.if_exception_type(ValueError)
- retry_ = retry_streaming.StreamingRetry(predicate=predicate)
-
- def iterable_fn(n):
- return iter(range(n))
-
- decorated = retry_(iterable_fn)
-
- # try throwing with active generator
- retryable = decorated(10)
- assert next(retryable) == 0
- # should swallow errors in predicate
- retryable.throw(ValueError)
- assert next(retryable) == 1
- # should raise on other errors
- with pytest.raises(TypeError):
- retryable.throw(TypeError)
- with pytest.raises(StopIteration):
- next(retryable)
-
- # try throwing with a new generator
- retryable = decorated(10)
- with pytest.raises(ValueError):
- retryable.throw(ValueError)
- with pytest.raises(StopIteration):
- next(retryable)
-
- @mock.patch("time.sleep", autospec=True)
- def test___call___with_generator_return(self, sleep):
- """
- Generator return value should be passed through retry decorator
- """
- retry_ = retry_streaming.StreamingRetry()
-
- decorated = retry_(self._generator_mock)
-
- expected_value = "done"
- generator = decorated(5, return_val=expected_value)
- found_value = None
- try:
- while True:
- next(generator)
- except StopIteration as e:
- found_value = e.value
- assert found_value == expected_value
-
- @mock.patch("time.sleep", autospec=True)
- def test___call___with_generator_close(self, sleep):
- """
- Close should be passed through retry into target generator
- """
- retry_ = retry_streaming.StreamingRetry()
-
- decorated = retry_(self._generator_mock)
-
- exception_list = []
- generator = decorated(10, exceptions_seen=exception_list)
- for i in range(2):
- next(generator)
- generator.close()
- assert isinstance(exception_list[0], GeneratorExit)
- with pytest.raises(StopIteration):
- # calling next on closed generator should raise error
- next(generator)
-
- @mock.patch("time.sleep", autospec=True)
- def test___call___with_generator_throw(self, sleep):
- """
- Throw should be passed through retry into target generator
- """
- retry_ = retry_streaming.StreamingRetry(
- predicate=retry.if_exception_type(ValueError),
- )
- decorated = retry_(self._generator_mock)
-
- exception_list = []
- generator = decorated(10, exceptions_seen=exception_list)
- for i in range(2):
- next(generator)
- with pytest.raises(BufferError):
- generator.throw(BufferError("test"))
- assert isinstance(exception_list[0], BufferError)
- with pytest.raises(StopIteration):
- # calling next on closed generator should raise error
- next(generator)
- # should retry if throw retryable exception
- exception_list = []
- generator = decorated(10, exceptions_seen=exception_list)
- for i in range(2):
- next(generator)
- val = generator.throw(ValueError("test"))
- assert val == 0
- assert isinstance(exception_list[0], ValueError)
- # calling next on closed generator should not raise error
- assert next(generator) == 1
-
- def test_exc_factory_non_retryable_error(self):
- """
- generator should give the option to override exception creation logic
- test when non-retryable error is thrown
- """
- from google.api_core.retry import RetryFailureReason
- from google.api_core.retry.retry_streaming import retry_target_stream
-
- timeout = None
- sent_errors = [ValueError("test"), ValueError("test2"), BufferError("test3")]
- expected_final_err = RuntimeError("done")
- expected_source_err = ZeroDivisionError("test4")
-
- def factory(*args, **kwargs):
- assert len(kwargs) == 0
- assert args[0] == sent_errors
- assert args[1] == RetryFailureReason.NON_RETRYABLE_ERROR
- assert args[2] == timeout
- return expected_final_err, expected_source_err
-
- generator = retry_target_stream(
- self._generator_mock,
- retry.if_exception_type(ValueError),
- [0] * 3,
- timeout=timeout,
- exception_factory=factory,
- )
- # initialize generator
- next(generator)
- # trigger some retryable errors
- generator.throw(sent_errors[0])
- generator.throw(sent_errors[1])
- # trigger a non-retryable error
- with pytest.raises(expected_final_err.__class__) as exc_info:
- generator.throw(sent_errors[2])
- assert exc_info.value == expected_final_err
- assert exc_info.value.__cause__ == expected_source_err
-
- def test_exc_factory_timeout(self):
- """
- generator should give the option to override exception creation logic
- test when timeout is exceeded
- """
- import time
- from google.api_core.retry import RetryFailureReason
- from google.api_core.retry.retry_streaming import retry_target_stream
-
- timeout = 2
- time_now = time.monotonic()
- now_patcher = mock.patch(
- "time.monotonic",
- return_value=time_now,
- )
-
- with now_patcher as patched_now:
- timeout = 2
- sent_errors = [ValueError("test"), ValueError("test2"), ValueError("test3")]
- expected_final_err = RuntimeError("done")
- expected_source_err = ZeroDivisionError("test4")
-
- def factory(*args, **kwargs):
- assert len(kwargs) == 0
- assert args[0] == sent_errors
- assert args[1] == RetryFailureReason.TIMEOUT
- assert args[2] == timeout
- return expected_final_err, expected_source_err
-
- generator = retry_target_stream(
- self._generator_mock,
- retry.if_exception_type(ValueError),
- [0] * 3,
- timeout=timeout,
- exception_factory=factory,
- check_timeout_on_yield=True,
- )
- # initialize generator
- next(generator)
- # trigger some retryable errors
- generator.throw(sent_errors[0])
- generator.throw(sent_errors[1])
- # trigger a timeout
- patched_now.return_value += timeout + 1
- with pytest.raises(expected_final_err.__class__) as exc_info:
- generator.throw(sent_errors[2])
- assert exc_info.value == expected_final_err
- assert exc_info.value.__cause__ == expected_source_err
diff --git a/tests/unit/retry/test_retry_unary.py b/tests/unit/retry/test_retry_unary.py
deleted file mode 100644
index f5bbcff..0000000
--- a/tests/unit/retry/test_retry_unary.py
+++ /dev/null
@@ -1,344 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import datetime
-import pytest
-import re
-
-try:
- from unittest import mock
- from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
-except ImportError: # pragma: NO COVER
- import mock # type: ignore
-
-from google.api_core import exceptions
-from google.api_core import retry
-
-from .test_retry_base import Test_BaseRetry
-
-
-@mock.patch("time.sleep", autospec=True)
-@mock.patch(
- "google.api_core.datetime_helpers.utcnow",
- return_value=datetime.datetime.min,
- autospec=True,
-)
-def test_retry_target_success(utcnow, sleep):
- predicate = retry.if_exception_type(ValueError)
- call_count = [0]
-
- def target():
- call_count[0] += 1
- if call_count[0] < 3:
- raise ValueError()
- return 42
-
- result = retry.retry_target(target, predicate, range(10), None)
-
- assert result == 42
- assert call_count[0] == 3
- sleep.assert_has_calls([mock.call(0), mock.call(1)])
-
-
-@mock.patch("time.sleep", autospec=True)
-@mock.patch(
- "google.api_core.datetime_helpers.utcnow",
- return_value=datetime.datetime.min,
- autospec=True,
-)
-def test_retry_target_w_on_error(utcnow, sleep):
- predicate = retry.if_exception_type(ValueError)
- call_count = {"target": 0}
- to_raise = ValueError()
-
- def target():
- call_count["target"] += 1
- if call_count["target"] < 3:
- raise to_raise
- return 42
-
- on_error = mock.Mock()
-
- result = retry.retry_target(target, predicate, range(10), None, on_error=on_error)
-
- assert result == 42
- assert call_count["target"] == 3
-
- on_error.assert_has_calls([mock.call(to_raise), mock.call(to_raise)])
- sleep.assert_has_calls([mock.call(0), mock.call(1)])
-
-
-@mock.patch("time.sleep", autospec=True)
-@mock.patch(
- "google.api_core.datetime_helpers.utcnow",
- return_value=datetime.datetime.min,
- autospec=True,
-)
-def test_retry_target_non_retryable_error(utcnow, sleep):
- predicate = retry.if_exception_type(ValueError)
- exception = TypeError()
- target = mock.Mock(side_effect=exception)
-
- with pytest.raises(TypeError) as exc_info:
- retry.retry_target(target, predicate, range(10), None)
-
- assert exc_info.value == exception
- sleep.assert_not_called()
-
-
-@mock.patch("asyncio.sleep", autospec=True)
-@mock.patch(
- "google.api_core.datetime_helpers.utcnow",
- return_value=datetime.datetime.min,
- autospec=True,
-)
-@pytest.mark.asyncio
-async def test_retry_target_warning_for_retry(utcnow, sleep):
- predicate = retry.if_exception_type(ValueError)
- target = mock.AsyncMock(spec=["__call__"])
-
- with pytest.warns(Warning) as exc_info:
- # Note: predicate is just a filler and doesn't affect the test
- retry.retry_target(target, predicate, range(10), None)
-
- assert len(exc_info) == 2
- assert str(exc_info[0].message) == retry.retry_unary._ASYNC_RETRY_WARNING
- sleep.assert_not_called()
-
-
-@mock.patch("time.sleep", autospec=True)
-@mock.patch("time.monotonic", autospec=True)
-@pytest.mark.parametrize("use_deadline_arg", [True, False])
-def test_retry_target_timeout_exceeded(monotonic, sleep, use_deadline_arg):
- predicate = retry.if_exception_type(ValueError)
- exception = ValueError("meep")
- target = mock.Mock(side_effect=exception)
- # Setup the timeline so that the first call takes 5 seconds but the second
- # call takes 6, which puts the retry over the timeout.
- monotonic.side_effect = [0, 5, 11]
-
- # support "deadline" as an alias for "timeout"
- kwargs = {"timeout": 10} if not use_deadline_arg else {"deadline": 10}
-
- with pytest.raises(exceptions.RetryError) as exc_info:
- retry.retry_target(target, predicate, range(10), **kwargs)
-
- assert exc_info.value.cause == exception
- assert exc_info.match("Timeout of 10.0s exceeded")
- assert exc_info.match("last exception: meep")
- assert target.call_count == 2
-
- # Ensure the exception message does not include the target fn:
- # it may be a partial with user data embedded
- assert str(target) not in exc_info.exconly()
-
-
-def test_retry_target_bad_sleep_generator():
- with pytest.raises(ValueError, match="Sleep generator"):
- retry.retry_target(mock.sentinel.target, lambda x: True, [], None)
-
-
-@mock.patch("time.sleep", autospec=True)
-def test_retry_target_dynamic_backoff(sleep):
- """
- sleep_generator should be iterated after on_error, to support dynamic backoff
- """
- sleep.side_effect = RuntimeError("stop after sleep")
- # start with empty sleep generator; values are added after exception in push_sleep_value
- sleep_values = []
- exception = ValueError("trigger retry")
- error_target = mock.Mock(side_effect=exception)
- inserted_sleep = 99
-
- def push_sleep_value(err):
- sleep_values.append(inserted_sleep)
-
- with pytest.raises(RuntimeError):
- retry.retry_target(
- error_target,
- predicate=lambda x: True,
- sleep_generator=sleep_values,
- on_error=push_sleep_value,
- )
- assert sleep.call_count == 1
- sleep.assert_called_once_with(inserted_sleep)
-
-
-class TestRetry(Test_BaseRetry):
- def _make_one(self, *args, **kwargs):
- return retry.Retry(*args, **kwargs)
-
- def test___str__(self):
- def if_exception_type(exc):
- return bool(exc) # pragma: NO COVER
-
- # Explicitly set all attributes as changed Retry defaults should not
- # cause this test to start failing.
- retry_ = retry.Retry(
- predicate=if_exception_type,
- initial=1.0,
- maximum=60.0,
- multiplier=2.0,
- timeout=120.0,
- on_error=None,
- )
- assert re.match(
- (
- r"<Retry predicate=<function.*?if_exception_type.*?>, "
- r"initial=1.0, maximum=60.0, multiplier=2.0, timeout=120.0, "
- r"on_error=None>"
- ),
- str(retry_),
- )
-
- @mock.patch("time.sleep", autospec=True)
- def test___call___and_execute_success(self, sleep):
- retry_ = retry.Retry()
- target = mock.Mock(spec=["__call__"], return_value=42)
- # __name__ is needed by functools.partial.
- target.__name__ = "target"
-
- decorated = retry_(target)
- target.assert_not_called()
-
- result = decorated("meep")
-
- assert result == 42
- target.assert_called_once_with("meep")
- sleep.assert_not_called()
-
- @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n)
- @mock.patch("time.sleep", autospec=True)
- def test___call___and_execute_retry(self, sleep, uniform):
- on_error = mock.Mock(spec=["__call__"], side_effect=[None])
- retry_ = retry.Retry(predicate=retry.if_exception_type(ValueError))
-
- target = mock.Mock(spec=["__call__"], side_effect=[ValueError(), 42])
- # __name__ is needed by functools.partial.
- target.__name__ = "target"
-
- decorated = retry_(target, on_error=on_error)
- target.assert_not_called()
-
- result = decorated("meep")
-
- assert result == 42
- assert target.call_count == 2
- target.assert_has_calls([mock.call("meep"), mock.call("meep")])
- sleep.assert_called_once_with(retry_._initial)
- assert on_error.call_count == 1
-
- @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n)
- @mock.patch("time.sleep", autospec=True)
- def test___call___and_execute_retry_hitting_timeout(self, sleep, uniform):
- on_error = mock.Mock(spec=["__call__"], side_effect=[None] * 10)
- retry_ = retry.Retry(
- predicate=retry.if_exception_type(ValueError),
- initial=1.0,
- maximum=1024.0,
- multiplier=2.0,
- timeout=30.9,
- )
-
- monotonic_patcher = mock.patch("time.monotonic", return_value=0)
-
- target = mock.Mock(spec=["__call__"], side_effect=[ValueError()] * 10)
- # __name__ is needed by functools.partial.
- target.__name__ = "target"
-
- decorated = retry_(target, on_error=on_error)
- target.assert_not_called()
-
- with monotonic_patcher as patched_monotonic:
- # Make sure that calls to fake time.sleep() also advance the mocked
- # time clock.
- def increase_time(sleep_delay):
- patched_monotonic.return_value += sleep_delay
-
- sleep.side_effect = increase_time
-
- with pytest.raises(exceptions.RetryError):
- decorated("meep")
-
- assert target.call_count == 5
- target.assert_has_calls([mock.call("meep")] * 5)
- assert on_error.call_count == 5
-
- # check the delays
- assert sleep.call_count == 4 # once between each successive target calls
- last_wait = sleep.call_args.args[0]
- total_wait = sum(call_args.args[0] for call_args in sleep.call_args_list)
-
- assert last_wait == 8.0
- # Next attempt would be scheduled in 16 secs, 15 + 16 = 31 > 30.9, thus
- # we do not even wait for it to be scheduled (30.9 is configured timeout).
- # This changes the previous logic of shortening the last attempt to fit
- # in the timeout. The previous logic was removed to make Python retry
- # logic consistent with the other languages and to not disrupt the
- # randomized retry delays distribution by artificially increasing a
- # probability of scheduling two (instead of one) last attempts with very
- # short delay between them, while the second retry having very low chance
- # of succeeding anyways.
- assert total_wait == 15.0
-
- @mock.patch("time.sleep", autospec=True)
- def test___init___without_retry_executed(self, sleep):
- _some_function = mock.Mock()
-
- retry_ = retry.Retry(
- predicate=retry.if_exception_type(ValueError), on_error=_some_function
- )
- # check the proper creation of the class
- assert retry_._on_error is _some_function
-
- target = mock.Mock(spec=["__call__"], side_effect=[42])
- # __name__ is needed by functools.partial.
- target.__name__ = "target"
-
- wrapped = retry_(target)
-
- result = wrapped("meep")
-
- assert result == 42
- target.assert_called_once_with("meep")
- sleep.assert_not_called()
- _some_function.assert_not_called()
-
- @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n)
- @mock.patch("time.sleep", autospec=True)
- def test___init___when_retry_is_executed(self, sleep, uniform):
- _some_function = mock.Mock()
-
- retry_ = retry.Retry(
- predicate=retry.if_exception_type(ValueError), on_error=_some_function
- )
- # check the proper creation of the class
- assert retry_._on_error is _some_function
-
- target = mock.Mock(
- spec=["__call__"], side_effect=[ValueError(), ValueError(), 42]
- )
- # __name__ is needed by functools.partial.
- target.__name__ = "target"
-
- wrapped = retry_(target)
- target.assert_not_called()
-
- result = wrapped("meep")
-
- assert result == 42
- assert target.call_count == 3
- assert _some_function.call_count == 2
- target.assert_has_calls([mock.call("meep"), mock.call("meep")])
- sleep.assert_any_call(retry_._initial)
diff --git a/tests/unit/test_bidi.py b/tests/unit/test_bidi.py
deleted file mode 100644
index 4a8eb74..0000000
--- a/tests/unit/test_bidi.py
+++ /dev/null
@@ -1,965 +0,0 @@
-# Copyright 2018, Google LLC All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import datetime
-import logging
-import queue
-import threading
-import time
-
-try:
- from unittest import mock
- from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
-except ImportError: # pragma: NO COVER
- import mock # type: ignore
-
-import pytest
-
-try:
- import grpc
-except ImportError: # pragma: NO COVER
- pytest.skip("No GRPC", allow_module_level=True)
-
-from google.api_core import bidi
-from google.api_core import exceptions
-
-
-class Test_RequestQueueGenerator(object):
- def test_bounded_consume(self):
- call = mock.create_autospec(grpc.Call, instance=True)
- call.is_active.return_value = True
-
- def queue_generator(rpc):
- yield mock.sentinel.A
- yield queue.Empty()
- yield mock.sentinel.B
- rpc.is_active.return_value = False
- yield mock.sentinel.C
-
- q = mock.create_autospec(queue.Queue, instance=True)
- q.get.side_effect = queue_generator(call)
-
- generator = bidi._RequestQueueGenerator(q)
- generator.call = call
-
- items = list(generator)
-
- assert items == [mock.sentinel.A, mock.sentinel.B]
-
- def test_yield_initial_and_exit(self):
- q = mock.create_autospec(queue.Queue, instance=True)
- q.get.side_effect = queue.Empty()
- call = mock.create_autospec(grpc.Call, instance=True)
- call.is_active.return_value = False
-
- generator = bidi._RequestQueueGenerator(q, initial_request=mock.sentinel.A)
- generator.call = call
-
- items = list(generator)
-
- assert items == [mock.sentinel.A]
-
- def test_yield_initial_callable_and_exit(self):
- q = mock.create_autospec(queue.Queue, instance=True)
- q.get.side_effect = queue.Empty()
- call = mock.create_autospec(grpc.Call, instance=True)
- call.is_active.return_value = False
-
- generator = bidi._RequestQueueGenerator(
- q, initial_request=lambda: mock.sentinel.A
- )
- generator.call = call
-
- items = list(generator)
-
- assert items == [mock.sentinel.A]
-
- def test_exit_when_inactive_with_item(self):
- q = mock.create_autospec(queue.Queue, instance=True)
- q.get.side_effect = [mock.sentinel.A, queue.Empty()]
- call = mock.create_autospec(grpc.Call, instance=True)
- call.is_active.return_value = False
-
- generator = bidi._RequestQueueGenerator(q)
- generator.call = call
-
- items = list(generator)
-
- assert items == []
- # Make sure it put the item back.
- q.put.assert_called_once_with(mock.sentinel.A)
-
- def test_exit_when_inactive_empty(self):
- q = mock.create_autospec(queue.Queue, instance=True)
- q.get.side_effect = queue.Empty()
- call = mock.create_autospec(grpc.Call, instance=True)
- call.is_active.return_value = False
-
- generator = bidi._RequestQueueGenerator(q)
- generator.call = call
-
- items = list(generator)
-
- assert items == []
-
- def test_exit_with_stop(self):
- q = mock.create_autospec(queue.Queue, instance=True)
- q.get.side_effect = [None, queue.Empty()]
- call = mock.create_autospec(grpc.Call, instance=True)
- call.is_active.return_value = True
-
- generator = bidi._RequestQueueGenerator(q)
- generator.call = call
-
- items = list(generator)
-
- assert items == []
-
-
-class Test_Throttle(object):
- def test_repr(self):
- delta = datetime.timedelta(seconds=4.5)
- instance = bidi._Throttle(access_limit=42, time_window=delta)
- assert repr(instance) == "_Throttle(access_limit=42, time_window={})".format(
- repr(delta)
- )
-
- def test_raises_error_on_invalid_init_arguments(self):
- with pytest.raises(ValueError) as exc_info:
- bidi._Throttle(access_limit=10, time_window=datetime.timedelta(seconds=0.0))
- assert "time_window" in str(exc_info.value)
- assert "must be a positive timedelta" in str(exc_info.value)
-
- with pytest.raises(ValueError) as exc_info:
- bidi._Throttle(access_limit=0, time_window=datetime.timedelta(seconds=10))
- assert "access_limit" in str(exc_info.value)
- assert "must be positive" in str(exc_info.value)
-
- def test_does_not_delay_entry_attempts_under_threshold(self):
- throttle = bidi._Throttle(
- access_limit=3, time_window=datetime.timedelta(seconds=1)
- )
- entries = []
-
- for _ in range(3):
- with throttle as time_waited:
- entry_info = {
- "entered_at": datetime.datetime.now(),
- "reported_wait": time_waited,
- }
- entries.append(entry_info)
-
- # check the reported wait times ...
- assert all(entry["reported_wait"] == 0.0 for entry in entries)
-
- # .. and the actual wait times
- delta = entries[1]["entered_at"] - entries[0]["entered_at"]
- assert delta.total_seconds() < 0.1
- delta = entries[2]["entered_at"] - entries[1]["entered_at"]
- assert delta.total_seconds() < 0.1
-
- def test_delays_entry_attempts_above_threshold(self):
- throttle = bidi._Throttle(
- access_limit=3, time_window=datetime.timedelta(seconds=1)
- )
- entries = []
-
- for _ in range(6):
- with throttle as time_waited:
- entry_info = {
- "entered_at": datetime.datetime.now(),
- "reported_wait": time_waited,
- }
- entries.append(entry_info)
-
- # For each group of 4 consecutive entries the time difference between
- # the first and the last entry must have been greater than time_window,
- # because a maximum of 3 are allowed in each time_window.
- for i, entry in enumerate(entries[3:], start=3):
- first_entry = entries[i - 3]
- delta = entry["entered_at"] - first_entry["entered_at"]
- assert delta.total_seconds() > 1.0
-
- # check the reported wait times
- # (NOTE: not using assert all(...), b/c the coverage check would complain)
- for i, entry in enumerate(entries):
- if i != 3:
- assert entry["reported_wait"] == 0.0
-
- # The delayed entry is expected to have been delayed for a significant
- # chunk of the full second, and the actual and reported delay times
- # should reflect that.
- assert entries[3]["reported_wait"] > 0.7
- delta = entries[3]["entered_at"] - entries[2]["entered_at"]
- assert delta.total_seconds() > 0.7
-
-
-class _CallAndFuture(grpc.Call, grpc.Future):
- pass
-
-
-def make_rpc():
- """Makes a mock RPC used to test Bidi classes."""
- call = mock.create_autospec(_CallAndFuture, instance=True)
- rpc = mock.create_autospec(grpc.StreamStreamMultiCallable, instance=True)
-
- def rpc_side_effect(request, metadata=None):
- call.is_active.return_value = True
- call.request = request
- call.metadata = metadata
- return call
-
- rpc.side_effect = rpc_side_effect
-
- def cancel_side_effect():
- call.is_active.return_value = False
-
- call.cancel.side_effect = cancel_side_effect
-
- return rpc, call
-
-
-class ClosedCall(object):
- def __init__(self, exception):
- self.exception = exception
-
- def __next__(self):
- raise self.exception
-
- def is_active(self):
- return False
-
-
-class TestBidiRpc(object):
- def test_initial_state(self):
- bidi_rpc = bidi.BidiRpc(None)
-
- assert bidi_rpc.is_active is False
-
- def test_done_callbacks(self):
- bidi_rpc = bidi.BidiRpc(None)
- callback = mock.Mock(spec=["__call__"])
-
- bidi_rpc.add_done_callback(callback)
- bidi_rpc._on_call_done(mock.sentinel.future)
-
- callback.assert_called_once_with(mock.sentinel.future)
-
- def test_metadata(self):
- rpc, call = make_rpc()
- bidi_rpc = bidi.BidiRpc(rpc, metadata=mock.sentinel.A)
- assert bidi_rpc._rpc_metadata == mock.sentinel.A
-
- bidi_rpc.open()
- assert bidi_rpc.call == call
- assert bidi_rpc.call.metadata == mock.sentinel.A
-
- def test_open(self):
- rpc, call = make_rpc()
- bidi_rpc = bidi.BidiRpc(rpc)
-
- bidi_rpc.open()
-
- assert bidi_rpc.call == call
- assert bidi_rpc.is_active
- call.add_done_callback.assert_called_once_with(bidi_rpc._on_call_done)
-
- def test_open_error_already_open(self):
- rpc, _ = make_rpc()
- bidi_rpc = bidi.BidiRpc(rpc)
-
- bidi_rpc.open()
-
- with pytest.raises(ValueError):
- bidi_rpc.open()
-
- def test_close(self):
- rpc, call = make_rpc()
- bidi_rpc = bidi.BidiRpc(rpc)
- bidi_rpc.open()
-
- bidi_rpc.close()
-
- call.cancel.assert_called_once()
- assert bidi_rpc.call == call
- assert bidi_rpc.is_active is False
- # ensure the request queue was signaled to stop.
- assert bidi_rpc.pending_requests == 1
- assert bidi_rpc._request_queue.get() is None
- # ensure request and callbacks are cleaned up
- assert bidi_rpc._initial_request is None
- assert not bidi_rpc._callbacks
-
- def test_close_with_no_rpc(self):
- bidi_rpc = bidi.BidiRpc(None)
- bidi_rpc.close()
-
- assert bidi_rpc.call is None
- assert bidi_rpc.is_active is False
- # ensure the request queue was signaled to stop.
- assert bidi_rpc.pending_requests == 1
- assert bidi_rpc._request_queue.get() is None
- # ensure request and callbacks are cleaned up
- assert bidi_rpc._initial_request is None
- assert not bidi_rpc._callbacks
-
- def test_send(self):
- rpc, call = make_rpc()
- bidi_rpc = bidi.BidiRpc(rpc)
- bidi_rpc.open()
-
- bidi_rpc.send(mock.sentinel.request)
-
- assert bidi_rpc.pending_requests == 1
- assert bidi_rpc._request_queue.get() is mock.sentinel.request
-
- def test_send_not_open(self):
- rpc, call = make_rpc()
- bidi_rpc = bidi.BidiRpc(rpc)
-
- with pytest.raises(ValueError):
- bidi_rpc.send(mock.sentinel.request)
-
- def test_send_dead_rpc(self):
- error = ValueError()
- bidi_rpc = bidi.BidiRpc(None)
- bidi_rpc.call = ClosedCall(error)
-
- with pytest.raises(ValueError) as exc_info:
- bidi_rpc.send(mock.sentinel.request)
-
- assert exc_info.value == error
-
- def test_recv(self):
- bidi_rpc = bidi.BidiRpc(None)
- bidi_rpc.call = iter([mock.sentinel.response])
-
- response = bidi_rpc.recv()
-
- assert response == mock.sentinel.response
-
- def test_recv_not_open(self):
- rpc, call = make_rpc()
- bidi_rpc = bidi.BidiRpc(rpc)
-
- with pytest.raises(ValueError):
- bidi_rpc.recv()
-
-
-class CallStub(object):
- def __init__(self, values, active=True):
- self.values = iter(values)
- self._is_active = active
- self.cancelled = False
-
- def __next__(self):
- item = next(self.values)
- if isinstance(item, Exception):
- self._is_active = False
- raise item
- return item
-
- def is_active(self):
- return self._is_active
-
- def add_done_callback(self, callback):
- pass
-
- def cancel(self):
- self.cancelled = True
-
-
-class TestResumableBidiRpc(object):
- def test_ctor_defaults(self):
- start_rpc = mock.Mock()
- should_recover = mock.Mock()
- bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover)
-
- assert bidi_rpc.is_active is False
- assert bidi_rpc._finalized is False
- assert bidi_rpc._start_rpc is start_rpc
- assert bidi_rpc._should_recover is should_recover
- assert bidi_rpc._should_terminate is bidi._never_terminate
- assert bidi_rpc._initial_request is None
- assert bidi_rpc._rpc_metadata is None
- assert bidi_rpc._reopen_throttle is None
-
- def test_ctor_explicit(self):
- start_rpc = mock.Mock()
- should_recover = mock.Mock()
- should_terminate = mock.Mock()
- initial_request = mock.Mock()
- metadata = {"x-foo": "bar"}
- bidi_rpc = bidi.ResumableBidiRpc(
- start_rpc,
- should_recover,
- should_terminate=should_terminate,
- initial_request=initial_request,
- metadata=metadata,
- throttle_reopen=True,
- )
-
- assert bidi_rpc.is_active is False
- assert bidi_rpc._finalized is False
- assert bidi_rpc._should_recover is should_recover
- assert bidi_rpc._should_terminate is should_terminate
- assert bidi_rpc._initial_request is initial_request
- assert bidi_rpc._rpc_metadata == metadata
- assert isinstance(bidi_rpc._reopen_throttle, bidi._Throttle)
-
- def test_done_callbacks_terminate(self):
- cancellation = mock.Mock()
- start_rpc = mock.Mock()
- should_recover = mock.Mock(spec=["__call__"], return_value=True)
- should_terminate = mock.Mock(spec=["__call__"], return_value=True)
- bidi_rpc = bidi.ResumableBidiRpc(
- start_rpc, should_recover, should_terminate=should_terminate
- )
- callback = mock.Mock(spec=["__call__"])
-
- bidi_rpc.add_done_callback(callback)
- bidi_rpc._on_call_done(cancellation)
-
- should_terminate.assert_called_once_with(cancellation)
- should_recover.assert_not_called()
- callback.assert_called_once_with(cancellation)
- assert not bidi_rpc.is_active
-
- def test_done_callbacks_recoverable(self):
- start_rpc = mock.create_autospec(grpc.StreamStreamMultiCallable, instance=True)
- should_recover = mock.Mock(spec=["__call__"], return_value=True)
- bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover)
- callback = mock.Mock(spec=["__call__"])
-
- bidi_rpc.add_done_callback(callback)
- bidi_rpc._on_call_done(mock.sentinel.future)
-
- callback.assert_not_called()
- start_rpc.assert_called_once()
- should_recover.assert_called_once_with(mock.sentinel.future)
- assert bidi_rpc.is_active
-
- def test_done_callbacks_non_recoverable(self):
- start_rpc = mock.create_autospec(grpc.StreamStreamMultiCallable, instance=True)
- should_recover = mock.Mock(spec=["__call__"], return_value=False)
- bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover)
- callback = mock.Mock(spec=["__call__"])
-
- bidi_rpc.add_done_callback(callback)
- bidi_rpc._on_call_done(mock.sentinel.future)
-
- callback.assert_called_once_with(mock.sentinel.future)
- should_recover.assert_called_once_with(mock.sentinel.future)
- assert not bidi_rpc.is_active
-
- def test_send_terminate(self):
- cancellation = ValueError()
- call_1 = CallStub([cancellation], active=False)
- call_2 = CallStub([])
- start_rpc = mock.create_autospec(
- grpc.StreamStreamMultiCallable, instance=True, side_effect=[call_1, call_2]
- )
- should_recover = mock.Mock(spec=["__call__"], return_value=False)
- should_terminate = mock.Mock(spec=["__call__"], return_value=True)
- bidi_rpc = bidi.ResumableBidiRpc(
- start_rpc, should_recover, should_terminate=should_terminate
- )
-
- bidi_rpc.open()
-
- bidi_rpc.send(mock.sentinel.request)
-
- assert bidi_rpc.pending_requests == 1
- assert bidi_rpc._request_queue.get() is None
-
- should_recover.assert_not_called()
- should_terminate.assert_called_once_with(cancellation)
- assert bidi_rpc.call == call_1
- assert bidi_rpc.is_active is False
- assert call_1.cancelled is True
-
- def test_send_recover(self):
- error = ValueError()
- call_1 = CallStub([error], active=False)
- call_2 = CallStub([])
- start_rpc = mock.create_autospec(
- grpc.StreamStreamMultiCallable, instance=True, side_effect=[call_1, call_2]
- )
- should_recover = mock.Mock(spec=["__call__"], return_value=True)
- bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover)
-
- bidi_rpc.open()
-
- bidi_rpc.send(mock.sentinel.request)
-
- assert bidi_rpc.pending_requests == 1
- assert bidi_rpc._request_queue.get() is mock.sentinel.request
-
- should_recover.assert_called_once_with(error)
- assert bidi_rpc.call == call_2
- assert bidi_rpc.is_active is True
-
- def test_send_failure(self):
- error = ValueError()
- call = CallStub([error], active=False)
- start_rpc = mock.create_autospec(
- grpc.StreamStreamMultiCallable, instance=True, return_value=call
- )
- should_recover = mock.Mock(spec=["__call__"], return_value=False)
- bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover)
-
- bidi_rpc.open()
-
- with pytest.raises(ValueError) as exc_info:
- bidi_rpc.send(mock.sentinel.request)
-
- assert exc_info.value == error
- should_recover.assert_called_once_with(error)
- assert bidi_rpc.call == call
- assert bidi_rpc.is_active is False
- assert call.cancelled is True
- assert bidi_rpc.pending_requests == 1
- assert bidi_rpc._request_queue.get() is None
-
- def test_recv_terminate(self):
- cancellation = ValueError()
- call = CallStub([cancellation])
- start_rpc = mock.create_autospec(
- grpc.StreamStreamMultiCallable, instance=True, return_value=call
- )
- should_recover = mock.Mock(spec=["__call__"], return_value=False)
- should_terminate = mock.Mock(spec=["__call__"], return_value=True)
- bidi_rpc = bidi.ResumableBidiRpc(
- start_rpc, should_recover, should_terminate=should_terminate
- )
-
- bidi_rpc.open()
-
- bidi_rpc.recv()
-
- should_recover.assert_not_called()
- should_terminate.assert_called_once_with(cancellation)
- assert bidi_rpc.call == call
- assert bidi_rpc.is_active is False
- assert call.cancelled is True
-
- def test_recv_recover(self):
- error = ValueError()
- call_1 = CallStub([1, error])
- call_2 = CallStub([2, 3])
- start_rpc = mock.create_autospec(
- grpc.StreamStreamMultiCallable, instance=True, side_effect=[call_1, call_2]
- )
- should_recover = mock.Mock(spec=["__call__"], return_value=True)
- bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover)
-
- bidi_rpc.open()
-
- values = []
- for n in range(3):
- values.append(bidi_rpc.recv())
-
- assert values == [1, 2, 3]
- should_recover.assert_called_once_with(error)
- assert bidi_rpc.call == call_2
- assert bidi_rpc.is_active is True
-
- def test_recv_recover_already_recovered(self):
- call_1 = CallStub([])
- call_2 = CallStub([])
- start_rpc = mock.create_autospec(
- grpc.StreamStreamMultiCallable, instance=True, side_effect=[call_1, call_2]
- )
- callback = mock.Mock()
- callback.return_value = True
- bidi_rpc = bidi.ResumableBidiRpc(start_rpc, callback)
-
- bidi_rpc.open()
-
- bidi_rpc._reopen()
-
- assert bidi_rpc.call is call_1
- assert bidi_rpc.is_active is True
-
- def test_recv_failure(self):
- error = ValueError()
- call = CallStub([error])
- start_rpc = mock.create_autospec(
- grpc.StreamStreamMultiCallable, instance=True, return_value=call
- )
- should_recover = mock.Mock(spec=["__call__"], return_value=False)
- bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover)
-
- bidi_rpc.open()
-
- with pytest.raises(ValueError) as exc_info:
- bidi_rpc.recv()
-
- assert exc_info.value == error
- should_recover.assert_called_once_with(error)
- assert bidi_rpc.call == call
- assert bidi_rpc.is_active is False
- assert call.cancelled is True
-
- def test_close(self):
- call = mock.create_autospec(_CallAndFuture, instance=True)
-
- def cancel_side_effect():
- call.is_active.return_value = False
-
- call.cancel.side_effect = cancel_side_effect
- start_rpc = mock.create_autospec(
- grpc.StreamStreamMultiCallable, instance=True, return_value=call
- )
- should_recover = mock.Mock(spec=["__call__"], return_value=False)
- bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover)
- bidi_rpc.open()
-
- bidi_rpc.close()
-
- should_recover.assert_not_called()
- call.cancel.assert_called_once()
- assert bidi_rpc.call == call
- assert bidi_rpc.is_active is False
- # ensure the request queue was signaled to stop.
- assert bidi_rpc.pending_requests == 1
- assert bidi_rpc._request_queue.get() is None
- assert bidi_rpc._finalized
- assert bidi_rpc._initial_request is None
- assert not bidi_rpc._callbacks
-
- def test_reopen_failure_on_rpc_restart(self):
- error1 = ValueError("1")
- error2 = ValueError("2")
- call = CallStub([error1])
- # Invoking start RPC a second time will trigger an error.
- start_rpc = mock.create_autospec(
- grpc.StreamStreamMultiCallable, instance=True, side_effect=[call, error2]
- )
- should_recover = mock.Mock(spec=["__call__"], return_value=True)
- callback = mock.Mock(spec=["__call__"])
-
- bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover)
- bidi_rpc.add_done_callback(callback)
-
- bidi_rpc.open()
-
- with pytest.raises(ValueError) as exc_info:
- bidi_rpc.recv()
-
- assert exc_info.value == error2
- should_recover.assert_called_once_with(error1)
- assert bidi_rpc.call is None
- assert bidi_rpc.is_active is False
- callback.assert_called_once_with(error2)
-
- def test_using_throttle_on_reopen_requests(self):
- call = CallStub([])
- start_rpc = mock.create_autospec(
- grpc.StreamStreamMultiCallable, instance=True, return_value=call
- )
- should_recover = mock.Mock(spec=["__call__"], return_value=True)
- bidi_rpc = bidi.ResumableBidiRpc(
- start_rpc, should_recover, throttle_reopen=True
- )
-
- patcher = mock.patch.object(bidi_rpc._reopen_throttle.__class__, "__enter__")
- with patcher as mock_enter:
- bidi_rpc._reopen()
-
- mock_enter.assert_called_once()
-
- def test_send_not_open(self):
- bidi_rpc = bidi.ResumableBidiRpc(None, lambda _: False)
-
- with pytest.raises(ValueError):
- bidi_rpc.send(mock.sentinel.request)
-
- def test_recv_not_open(self):
- bidi_rpc = bidi.ResumableBidiRpc(None, lambda _: False)
-
- with pytest.raises(ValueError):
- bidi_rpc.recv()
-
- def test_finalize_idempotent(self):
- error1 = ValueError("1")
- error2 = ValueError("2")
- callback = mock.Mock(spec=["__call__"])
- should_recover = mock.Mock(spec=["__call__"], return_value=False)
-
- bidi_rpc = bidi.ResumableBidiRpc(mock.sentinel.start_rpc, should_recover)
-
- bidi_rpc.add_done_callback(callback)
-
- bidi_rpc._on_call_done(error1)
- bidi_rpc._on_call_done(error2)
-
- callback.assert_called_once_with(error1)
-
-
-class TestBackgroundConsumer(object):
- def test_consume_once_then_exit(self):
- bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True)
- bidi_rpc.is_active = True
- bidi_rpc.recv.side_effect = [mock.sentinel.response_1]
- recved = threading.Event()
-
- def on_response(response):
- assert response == mock.sentinel.response_1
- bidi_rpc.is_active = False
- recved.set()
-
- consumer = bidi.BackgroundConsumer(bidi_rpc, on_response)
-
- consumer.start()
-
- recved.wait()
-
- bidi_rpc.recv.assert_called_once()
- assert bidi_rpc.is_active is False
-
- consumer.stop()
-
- bidi_rpc.close.assert_called_once()
- assert consumer.is_active is False
-
- def test_pause_resume_and_close(self):
- # This test is relatively complex. It attempts to start the consumer,
- # consume one item, pause the consumer, check the state of the world,
- # then resume the consumer. Doing this in a deterministic fashion
- # requires a bit more mocking and patching than usual.
-
- bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True)
- bidi_rpc.is_active = True
-
- def close_side_effect():
- bidi_rpc.is_active = False
-
- bidi_rpc.close.side_effect = close_side_effect
-
- # These are used to coordinate the two threads to ensure deterministic
- # execution.
- should_continue = threading.Event()
- responses_and_events = {
- mock.sentinel.response_1: threading.Event(),
- mock.sentinel.response_2: threading.Event(),
- }
- bidi_rpc.recv.side_effect = [mock.sentinel.response_1, mock.sentinel.response_2]
-
- recved_responses = []
- consumer = None
-
- def on_response(response):
- if response == mock.sentinel.response_1:
- consumer.pause()
-
- recved_responses.append(response)
- responses_and_events[response].set()
- should_continue.wait()
-
- consumer = bidi.BackgroundConsumer(bidi_rpc, on_response)
-
- consumer.start()
-
- # Wait for the first response to be recved.
- responses_and_events[mock.sentinel.response_1].wait()
-
- # Ensure only one item has been recved and that the consumer is paused.
- assert recved_responses == [mock.sentinel.response_1]
- assert consumer.is_paused is True
- assert consumer.is_active is True
-
- # Unpause the consumer, wait for the second item, then close the
- # consumer.
- should_continue.set()
- consumer.resume()
-
- responses_and_events[mock.sentinel.response_2].wait()
-
- assert recved_responses == [mock.sentinel.response_1, mock.sentinel.response_2]
-
- consumer.stop()
-
- assert consumer.is_active is False
- assert consumer._on_response is None
-
- def test_wake_on_error(self):
- should_continue = threading.Event()
-
- bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True)
- bidi_rpc.is_active = True
- bidi_rpc.add_done_callback.side_effect = lambda _: should_continue.set()
-
- consumer = bidi.BackgroundConsumer(bidi_rpc, mock.sentinel.on_response)
-
- # Start the consumer paused, which should immediately put it into wait
- # state.
- consumer.pause()
- consumer.start()
-
- # Wait for add_done_callback to be called
- should_continue.wait()
- bidi_rpc.add_done_callback.assert_called_once_with(consumer._on_call_done)
-
- # The consumer should now be blocked on waiting to be unpaused.
- assert consumer.is_active
- assert consumer.is_paused
-
- # Trigger the done callback, it should unpause the consumer and cause
- # it to exit.
- bidi_rpc.is_active = False
- consumer._on_call_done(bidi_rpc)
-
- # It may take a few cycles for the thread to exit.
- while consumer.is_active:
- pass
-
- def test_rpc_callback_fires_when_consumer_start_fails(self):
- expected_exception = exceptions.InvalidArgument(
- "test", response=grpc.StatusCode.INVALID_ARGUMENT
- )
- callback = mock.Mock(spec=["__call__"])
-
- rpc, _ = make_rpc()
- bidi_rpc = bidi.BidiRpc(rpc)
- bidi_rpc.add_done_callback(callback)
- bidi_rpc._start_rpc.side_effect = expected_exception
-
- consumer = bidi.BackgroundConsumer(bidi_rpc, on_response=None)
-
- consumer.start()
-
- # Wait for the consumer's thread to exit.
- while consumer.is_active:
- pass # pragma: NO COVER
-
- assert callback.call_args.args[0] == grpc.StatusCode.INVALID_ARGUMENT
-
- def test_consumer_expected_error(self, caplog):
- caplog.set_level(logging.DEBUG)
-
- bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True)
- bidi_rpc.is_active = True
- bidi_rpc.recv.side_effect = exceptions.ServiceUnavailable("Gone away")
-
- on_response = mock.Mock(spec=["__call__"])
-
- consumer = bidi.BackgroundConsumer(bidi_rpc, on_response)
-
- consumer.start()
-
- # Wait for the consumer's thread to exit.
- while consumer.is_active:
- pass
-
- on_response.assert_not_called()
- bidi_rpc.recv.assert_called_once()
- assert "caught error" in caplog.text
-
- def test_consumer_unexpected_error(self, caplog):
- caplog.set_level(logging.DEBUG)
-
- bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True)
- bidi_rpc.is_active = True
- bidi_rpc.recv.side_effect = ValueError()
-
- on_response = mock.Mock(spec=["__call__"])
-
- consumer = bidi.BackgroundConsumer(bidi_rpc, on_response)
-
- consumer.start()
-
- # Wait for the consumer's thread to exit.
- while consumer.is_active:
- pass # pragma: NO COVER (race condition)
-
- on_response.assert_not_called()
- bidi_rpc.recv.assert_called_once()
- assert "caught unexpected exception" in caplog.text
-
- def test_double_stop(self, caplog):
- caplog.set_level(logging.DEBUG)
- bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True)
- bidi_rpc.is_active = True
- on_response = mock.Mock(spec=["__call__"])
-
- def close_side_effect():
- bidi_rpc.is_active = False
-
- bidi_rpc.close.side_effect = close_side_effect
-
- consumer = bidi.BackgroundConsumer(bidi_rpc, on_response)
-
- consumer.start()
- assert consumer.is_active is True
-
- consumer.stop()
- assert consumer.is_active is False
- assert consumer._on_response is None
-
- # calling stop twice should not result in an error.
- consumer.stop()
-
- def test_stop_error_logs(self, caplog):
- """
- Closing the client should result in no internal error logs
-
- https://github.com/googleapis/python-api-core/issues/788
- """
- caplog.set_level(logging.DEBUG)
- bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True)
- bidi_rpc.is_active = True
- on_response = mock.Mock(spec=["__call__"])
-
- consumer = bidi.BackgroundConsumer(bidi_rpc, on_response)
-
- consumer.start()
- consumer.stop()
- # let the background thread run for a while before exiting
- time.sleep(0.1)
- bidi_rpc.is_active = False
- # running thread should not result in error logs
- error_logs = [r.message for r in caplog.records if r.levelname == "ERROR"]
- assert not error_logs, f"Found unexpected ERROR logs: {error_logs}"
- bidi_rpc.is_active = False
-
- def test_fatal_exceptions_can_inform_consumer(self, caplog):
- """
- https://github.com/googleapis/python-api-core/issues/820
- Exceptions thrown in the BackgroundConsumer not caught by `should_recover` / `should_terminate`
- on the RPC should be bubbled back to the caller through `on_fatal_exception`, if passed.
- """
- caplog.set_level(logging.DEBUG)
-
- for fatal_exception in (
- ValueError("some non-api error"),
- exceptions.PermissionDenied("some api error"),
- ):
- bidi_rpc = mock.create_autospec(bidi.ResumableBidiRpc, instance=True)
- bidi_rpc.is_active = True
- on_response = mock.Mock(spec=["__call__"])
-
- on_fatal_exception = mock.Mock(spec=["__call__"])
-
- bidi_rpc.open.side_effect = fatal_exception
-
- consumer = bidi.BackgroundConsumer(
- bidi_rpc, on_response, on_fatal_exception
- )
-
- consumer.start()
- # let the background thread run for a while before exiting
- time.sleep(0.1)
-
- on_fatal_exception.assert_called_once_with(fatal_exception)
diff --git a/tests/unit/test_client_info.py b/tests/unit/test_client_info.py
deleted file mode 100644
index 3eacabc..0000000
--- a/tests/unit/test_client_info.py
+++ /dev/null
@@ -1,104 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-try:
- import grpc
-except ImportError: # pragma: NO COVER
- grpc = None
-
-from google.api_core import client_info
-
-
-def test_constructor_defaults():
- info = client_info.ClientInfo()
-
- assert info.python_version is not None
-
- if grpc is not None: # pragma: NO COVER
- assert info.grpc_version is not None
- else: # pragma: NO COVER
- assert info.grpc_version is None
-
- assert info.api_core_version is not None
- assert info.gapic_version is None
- assert info.client_library_version is None
- assert info.rest_version is None
-
-
-def test_constructor_options():
- info = client_info.ClientInfo(
- python_version="1",
- grpc_version="2",
- api_core_version="3",
- gapic_version="4",
- client_library_version="5",
- user_agent="6",
- rest_version="7",
- protobuf_runtime_version="8",
- )
-
- assert info.python_version == "1"
- assert info.grpc_version == "2"
- assert info.api_core_version == "3"
- assert info.gapic_version == "4"
- assert info.client_library_version == "5"
- assert info.user_agent == "6"
- assert info.rest_version == "7"
- assert info.protobuf_runtime_version == "8"
-
-
-def test_to_user_agent_minimal():
- info = client_info.ClientInfo(
- python_version="1",
- api_core_version="2",
- grpc_version=None,
- protobuf_runtime_version=None,
- )
-
- user_agent = info.to_user_agent()
-
- assert user_agent == "gl-python/1 gax/2"
-
-
-def test_to_user_agent_full():
- info = client_info.ClientInfo(
- python_version="1",
- grpc_version="2",
- api_core_version="3",
- gapic_version="4",
- client_library_version="5",
- user_agent="app-name/1.0",
- protobuf_runtime_version="6",
- )
-
- user_agent = info.to_user_agent()
-
- assert user_agent == "app-name/1.0 gl-python/1 grpc/2 gax/3 gapic/4 gccl/5 pb/6"
-
-
-def test_to_user_agent_rest():
- info = client_info.ClientInfo(
- python_version="1",
- grpc_version=None,
- rest_version="2",
- api_core_version="3",
- gapic_version="4",
- client_library_version="5",
- user_agent="app-name/1.0",
- )
-
- user_agent = info.to_user_agent()
-
- assert user_agent == "app-name/1.0 gl-python/1 rest/2 gax/3 gapic/4 gccl/5"
diff --git a/tests/unit/test_client_logging.py b/tests/unit/test_client_logging.py
deleted file mode 100644
index c73b269..0000000
--- a/tests/unit/test_client_logging.py
+++ /dev/null
@@ -1,139 +0,0 @@
-import json
-import logging
-from unittest import mock
-
-from google.api_core.client_logging import (
- setup_logging,
- initialize_logging,
- StructuredLogFormatter,
-)
-
-
-def reset_logger(scope):
- logger = logging.getLogger(scope)
- logger.handlers = []
- logger.setLevel(logging.NOTSET)
- logger.propagate = True
-
-
-def test_setup_logging_w_no_scopes():
- with mock.patch("google.api_core.client_logging._BASE_LOGGER_NAME", "foogle"):
- setup_logging()
- base_logger = logging.getLogger("foogle")
- assert base_logger.handlers == []
- assert not base_logger.propagate
- assert base_logger.level == logging.NOTSET
-
- reset_logger("foogle")
-
-
-def test_setup_logging_w_base_scope():
- with mock.patch("google.api_core.client_logging._BASE_LOGGER_NAME", "foogle"):
- setup_logging("foogle")
- base_logger = logging.getLogger("foogle")
- assert isinstance(base_logger.handlers[0], logging.StreamHandler)
- assert not base_logger.propagate
- assert base_logger.level == logging.DEBUG
-
- reset_logger("foogle")
-
-
-def test_setup_logging_w_configured_scope():
- with mock.patch("google.api_core.client_logging._BASE_LOGGER_NAME", "foogle"):
- base_logger = logging.getLogger("foogle")
- base_logger.propagate = False
- setup_logging("foogle")
- assert base_logger.handlers == []
- assert not base_logger.propagate
- assert base_logger.level == logging.NOTSET
-
- reset_logger("foogle")
-
-
-def test_setup_logging_w_module_scope():
- with mock.patch("google.api_core.client_logging._BASE_LOGGER_NAME", "foogle"):
- setup_logging("foogle.bar")
-
- base_logger = logging.getLogger("foogle")
- assert base_logger.handlers == []
- assert not base_logger.propagate
- assert base_logger.level == logging.NOTSET
-
- module_logger = logging.getLogger("foogle.bar")
- assert isinstance(module_logger.handlers[0], logging.StreamHandler)
- assert not module_logger.propagate
- assert module_logger.level == logging.DEBUG
-
- reset_logger("foogle")
- reset_logger("foogle.bar")
-
-
-def test_setup_logging_w_incorrect_scope():
- with mock.patch("google.api_core.client_logging._BASE_LOGGER_NAME", "foogle"):
- setup_logging("abc")
-
- base_logger = logging.getLogger("foogle")
- assert base_logger.handlers == []
- assert not base_logger.propagate
- assert base_logger.level == logging.NOTSET
-
- # TODO(https://github.com/googleapis/python-api-core/issues/759): update test once we add logic to ignore an incorrect scope.
- logger = logging.getLogger("abc")
- assert isinstance(logger.handlers[0], logging.StreamHandler)
- assert not logger.propagate
- assert logger.level == logging.DEBUG
-
- reset_logger("foogle")
- reset_logger("abc")
-
-
-def test_initialize_logging():
- with mock.patch("os.getenv", return_value="foogle.bar"):
- with mock.patch("google.api_core.client_logging._BASE_LOGGER_NAME", "foogle"):
- initialize_logging()
-
- base_logger = logging.getLogger("foogle")
- assert base_logger.handlers == []
- assert not base_logger.propagate
- assert base_logger.level == logging.NOTSET
-
- module_logger = logging.getLogger("foogle.bar")
- assert isinstance(module_logger.handlers[0], logging.StreamHandler)
- assert not module_logger.propagate
- assert module_logger.level == logging.DEBUG
-
- # Check that `initialize_logging()` is a no-op after the first time by verifying that user-set configs are not modified:
- base_logger.propagate = True
- module_logger.propagate = True
-
- initialize_logging()
-
- assert base_logger.propagate
- assert module_logger.propagate
-
- reset_logger("foogle")
- reset_logger("foogle.bar")
-
-
-def test_structured_log_formatter():
- # TODO(https://github.com/googleapis/python-api-core/issues/761): Test additional fields when implemented.
- record = logging.LogRecord(
- name="Appelation",
- level=logging.DEBUG,
- msg="This is a test message.",
- pathname="some/path",
- lineno=25,
- args=None,
- exc_info=None,
- )
-
- # Extra fields:
- record.rpcName = "bar"
-
- formatted_msg = StructuredLogFormatter().format(record)
- parsed_msg = json.loads(formatted_msg)
-
- assert parsed_msg["name"] == "Appelation"
- assert parsed_msg["severity"] == "DEBUG"
- assert parsed_msg["message"] == "This is a test message."
- assert parsed_msg["rpcName"] == "bar"
diff --git a/tests/unit/test_client_options.py b/tests/unit/test_client_options.py
deleted file mode 100644
index 54558ee..0000000
--- a/tests/unit/test_client_options.py
+++ /dev/null
@@ -1,170 +0,0 @@
-# Copyright 2019 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from re import match
-import pytest
-from ..helpers import warn_deprecated_credentials_file
-
-from google.api_core import client_options
-
-
-def get_client_cert():
- return b"cert", b"key"
-
-
-def get_client_encrypted_cert():
- return "cert_path", "key_path", b"passphrase"
-
-
-def test_constructor():
- with warn_deprecated_credentials_file():
- options = client_options.ClientOptions(
- api_endpoint="foo.googleapis.com",
- client_cert_source=get_client_cert,
- quota_project_id="quote-proj",
- credentials_file="path/to/credentials.json",
- scopes=[
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/cloud-platform.read-only",
- ],
- api_audience="foo2.googleapis.com",
- universe_domain="googleapis.com",
- )
-
- assert options.api_endpoint == "foo.googleapis.com"
- assert options.client_cert_source() == (b"cert", b"key")
- assert options.quota_project_id == "quote-proj"
- assert options.credentials_file == "path/to/credentials.json"
- assert options.scopes == [
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/cloud-platform.read-only",
- ]
- assert options.api_audience == "foo2.googleapis.com"
- assert options.universe_domain == "googleapis.com"
-
-
-def test_constructor_with_encrypted_cert_source():
- options = client_options.ClientOptions(
- api_endpoint="foo.googleapis.com",
- client_encrypted_cert_source=get_client_encrypted_cert,
- )
-
- assert options.api_endpoint == "foo.googleapis.com"
- assert options.client_encrypted_cert_source() == (
- "cert_path",
- "key_path",
- b"passphrase",
- )
-
-
-def test_constructor_with_both_cert_sources():
- with pytest.raises(ValueError):
- client_options.ClientOptions(
- api_endpoint="foo.googleapis.com",
- client_cert_source=get_client_cert,
- client_encrypted_cert_source=get_client_encrypted_cert,
- )
-
-
-def test_constructor_with_api_key():
- options = client_options.ClientOptions(
- api_endpoint="foo.googleapis.com",
- client_cert_source=get_client_cert,
- quota_project_id="quote-proj",
- api_key="api-key",
- scopes=[
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/cloud-platform.read-only",
- ],
- )
-
- assert options.api_endpoint == "foo.googleapis.com"
- assert options.client_cert_source() == (b"cert", b"key")
- assert options.quota_project_id == "quote-proj"
- assert options.api_key == "api-key"
- assert options.scopes == [
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/cloud-platform.read-only",
- ]
-
-
-def test_constructor_with_both_api_key_and_credentials_file():
- with pytest.raises(ValueError):
- with warn_deprecated_credentials_file():
- client_options.ClientOptions(
- api_key="api-key",
- credentials_file="path/to/credentials.json",
- )
-
-
-def test_from_dict():
- options = client_options.from_dict(
- {
- "api_endpoint": "foo.googleapis.com",
- "universe_domain": "googleapis.com",
- "client_cert_source": get_client_cert,
- "quota_project_id": "quote-proj",
- "credentials_file": "path/to/credentials.json",
- "scopes": [
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/cloud-platform.read-only",
- ],
- "api_audience": "foo2.googleapis.com",
- }
- )
-
- assert options.api_endpoint == "foo.googleapis.com"
- assert options.universe_domain == "googleapis.com"
- assert options.client_cert_source() == (b"cert", b"key")
- assert options.quota_project_id == "quote-proj"
- assert options.credentials_file == "path/to/credentials.json"
- assert options.scopes == [
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/cloud-platform.read-only",
- ]
- assert options.api_key is None
- assert options.api_audience == "foo2.googleapis.com"
-
-
-def test_from_dict_bad_argument():
- with pytest.raises(ValueError):
- client_options.from_dict(
- {
- "api_endpoint": "foo.googleapis.com",
- "bad_arg": "1234",
- "client_cert_source": get_client_cert,
- }
- )
-
-
-def test_repr():
- expected_keys = set(
- [
- "api_endpoint",
- "universe_domain",
- "client_cert_source",
- "client_encrypted_cert_source",
- "quota_project_id",
- "credentials_file",
- "scopes",
- "api_key",
- "api_audience",
- ]
- )
- options = client_options.ClientOptions(api_endpoint="foo.googleapis.com")
- options_repr = repr(options)
- options_keys = vars(options).keys()
- assert match(r"ClientOptions:", options_repr)
- assert match(r".*'api_endpoint': 'foo.googleapis.com'.*", options_repr)
- assert options_keys == expected_keys
diff --git a/tests/unit/test_datetime_helpers.py b/tests/unit/test_datetime_helpers.py
deleted file mode 100644
index 5f5470a..0000000
--- a/tests/unit/test_datetime_helpers.py
+++ /dev/null
@@ -1,396 +0,0 @@
-# Copyright 2017, Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import calendar
-import datetime
-
-import pytest
-
-from google.api_core import datetime_helpers
-from google.protobuf import timestamp_pb2
-
-
-ONE_MINUTE_IN_MICROSECONDS = 60 * 1e6
-
-
-def test_utcnow():
- result = datetime_helpers.utcnow()
- assert isinstance(result, datetime.datetime)
-
-
-def test_to_milliseconds():
- dt = datetime.datetime(1970, 1, 1, 0, 0, 1, tzinfo=datetime.timezone.utc)
- assert datetime_helpers.to_milliseconds(dt) == 1000
-
-
-def test_to_microseconds():
- microseconds = 314159
- dt = datetime.datetime(1970, 1, 1, 0, 0, 0, microsecond=microseconds)
- assert datetime_helpers.to_microseconds(dt) == microseconds
-
-
-def test_to_microseconds_non_utc():
- zone = datetime.timezone(datetime.timedelta(minutes=-1))
- dt = datetime.datetime(1970, 1, 1, 0, 0, 0, tzinfo=zone)
- assert datetime_helpers.to_microseconds(dt) == ONE_MINUTE_IN_MICROSECONDS
-
-
-def test_to_microseconds_naive():
- microseconds = 314159
- dt = datetime.datetime(1970, 1, 1, 0, 0, 0, microsecond=microseconds, tzinfo=None)
- assert datetime_helpers.to_microseconds(dt) == microseconds
-
-
-def test_from_microseconds():
- five_mins_from_epoch_in_microseconds = 5 * ONE_MINUTE_IN_MICROSECONDS
- five_mins_from_epoch_datetime = datetime.datetime(
- 1970, 1, 1, 0, 5, 0, tzinfo=datetime.timezone.utc
- )
-
- result = datetime_helpers.from_microseconds(five_mins_from_epoch_in_microseconds)
-
- assert result == five_mins_from_epoch_datetime
-
-
-def test_from_iso8601_date():
- today = datetime.date.today()
- iso_8601_today = today.strftime("%Y-%m-%d")
-
- assert datetime_helpers.from_iso8601_date(iso_8601_today) == today
-
-
-def test_from_iso8601_time():
- assert datetime_helpers.from_iso8601_time("12:09:42") == datetime.time(12, 9, 42)
-
-
-def test_from_rfc3339():
- value = "2009-12-17T12:44:32.123456Z"
- assert datetime_helpers.from_rfc3339(value) == datetime.datetime(
- 2009, 12, 17, 12, 44, 32, 123456, datetime.timezone.utc
- )
-
-
-def test_from_rfc3339_nanos():
- value = "2009-12-17T12:44:32.123456Z"
- assert datetime_helpers.from_rfc3339_nanos(value) == datetime.datetime(
- 2009, 12, 17, 12, 44, 32, 123456, datetime.timezone.utc
- )
-
-
-def test_from_rfc3339_without_nanos():
- value = "2009-12-17T12:44:32Z"
- assert datetime_helpers.from_rfc3339(value) == datetime.datetime(
- 2009, 12, 17, 12, 44, 32, 0, datetime.timezone.utc
- )
-
-
-def test_from_rfc3339_nanos_without_nanos():
- value = "2009-12-17T12:44:32Z"
- assert datetime_helpers.from_rfc3339_nanos(value) == datetime.datetime(
- 2009, 12, 17, 12, 44, 32, 0, datetime.timezone.utc
- )
-
-
-@pytest.mark.parametrize(
- "truncated, micros",
- [
- ("12345678", 123456),
- ("1234567", 123456),
- ("123456", 123456),
- ("12345", 123450),
- ("1234", 123400),
- ("123", 123000),
- ("12", 120000),
- ("1", 100000),
- ],
-)
-def test_from_rfc3339_with_truncated_nanos(truncated, micros):
- value = "2009-12-17T12:44:32.{}Z".format(truncated)
- assert datetime_helpers.from_rfc3339(value) == datetime.datetime(
- 2009, 12, 17, 12, 44, 32, micros, datetime.timezone.utc
- )
-
-
-def test_from_rfc3339_nanos_is_deprecated():
- value = "2009-12-17T12:44:32.123456Z"
-
- result = datetime_helpers.from_rfc3339(value)
- result_nanos = datetime_helpers.from_rfc3339_nanos(value)
-
- assert result == result_nanos
-
-
-@pytest.mark.parametrize(
- "truncated, micros",
- [
- ("12345678", 123456),
- ("1234567", 123456),
- ("123456", 123456),
- ("12345", 123450),
- ("1234", 123400),
- ("123", 123000),
- ("12", 120000),
- ("1", 100000),
- ],
-)
-def test_from_rfc3339_nanos_with_truncated_nanos(truncated, micros):
- value = "2009-12-17T12:44:32.{}Z".format(truncated)
- assert datetime_helpers.from_rfc3339_nanos(value) == datetime.datetime(
- 2009, 12, 17, 12, 44, 32, micros, datetime.timezone.utc
- )
-
-
-def test_from_rfc3339_wo_nanos_raise_exception():
- value = "2009-12-17T12:44:32"
- with pytest.raises(ValueError):
- datetime_helpers.from_rfc3339(value)
-
-
-def test_from_rfc3339_w_nanos_raise_exception():
- value = "2009-12-17T12:44:32.123456"
- with pytest.raises(ValueError):
- datetime_helpers.from_rfc3339(value)
-
-
-def test_to_rfc3339():
- value = datetime.datetime(2016, 4, 5, 13, 30, 0)
- expected = "2016-04-05T13:30:00.000000Z"
- assert datetime_helpers.to_rfc3339(value) == expected
-
-
-def test_to_rfc3339_with_utc():
- value = datetime.datetime(2016, 4, 5, 13, 30, 0, tzinfo=datetime.timezone.utc)
- expected = "2016-04-05T13:30:00.000000Z"
- assert datetime_helpers.to_rfc3339(value, ignore_zone=False) == expected
-
-
-def test_to_rfc3339_with_non_utc():
- zone = datetime.timezone(datetime.timedelta(minutes=-60))
- value = datetime.datetime(2016, 4, 5, 13, 30, 0, tzinfo=zone)
- expected = "2016-04-05T14:30:00.000000Z"
- assert datetime_helpers.to_rfc3339(value, ignore_zone=False) == expected
-
-
-def test_to_rfc3339_with_non_utc_ignore_zone():
- zone = datetime.timezone(datetime.timedelta(minutes=-60))
- value = datetime.datetime(2016, 4, 5, 13, 30, 0, tzinfo=zone)
- expected = "2016-04-05T13:30:00.000000Z"
- assert datetime_helpers.to_rfc3339(value, ignore_zone=True) == expected
-
-
-class Test_DateTimeWithNanos(object):
- @staticmethod
- def test_ctor_wo_nanos():
- stamp = datetime_helpers.DatetimeWithNanoseconds(
- 2016, 12, 20, 21, 13, 47, 123456
- )
- assert stamp.year == 2016
- assert stamp.month == 12
- assert stamp.day == 20
- assert stamp.hour == 21
- assert stamp.minute == 13
- assert stamp.second == 47
- assert stamp.microsecond == 123456
- assert stamp.nanosecond == 0
-
- @staticmethod
- def test_ctor_w_nanos():
- stamp = datetime_helpers.DatetimeWithNanoseconds(
- 2016, 12, 20, 21, 13, 47, nanosecond=123456789
- )
- assert stamp.year == 2016
- assert stamp.month == 12
- assert stamp.day == 20
- assert stamp.hour == 21
- assert stamp.minute == 13
- assert stamp.second == 47
- assert stamp.microsecond == 123456
- assert stamp.nanosecond == 123456789
-
- @staticmethod
- def test_ctor_w_micros_positional_and_nanos():
- with pytest.raises(TypeError):
- datetime_helpers.DatetimeWithNanoseconds(
- 2016, 12, 20, 21, 13, 47, 123456, nanosecond=123456789
- )
-
- @staticmethod
- def test_ctor_w_micros_keyword_and_nanos():
- with pytest.raises(TypeError):
- datetime_helpers.DatetimeWithNanoseconds(
- 2016, 12, 20, 21, 13, 47, microsecond=123456, nanosecond=123456789
- )
-
- @staticmethod
- def test_rfc3339_wo_nanos():
- stamp = datetime_helpers.DatetimeWithNanoseconds(
- 2016, 12, 20, 21, 13, 47, 123456
- )
- assert stamp.rfc3339() == "2016-12-20T21:13:47.123456Z"
-
- @staticmethod
- def test_rfc3339_wo_nanos_w_leading_zero():
- stamp = datetime_helpers.DatetimeWithNanoseconds(2016, 12, 20, 21, 13, 47, 1234)
- assert stamp.rfc3339() == "2016-12-20T21:13:47.001234Z"
-
- @staticmethod
- def test_rfc3339_w_nanos():
- stamp = datetime_helpers.DatetimeWithNanoseconds(
- 2016, 12, 20, 21, 13, 47, nanosecond=123456789
- )
- assert stamp.rfc3339() == "2016-12-20T21:13:47.123456789Z"
-
- @staticmethod
- def test_rfc3339_w_nanos_w_leading_zero():
- stamp = datetime_helpers.DatetimeWithNanoseconds(
- 2016, 12, 20, 21, 13, 47, nanosecond=1234567
- )
- assert stamp.rfc3339() == "2016-12-20T21:13:47.001234567Z"
-
- @staticmethod
- def test_rfc3339_w_nanos_no_trailing_zeroes():
- stamp = datetime_helpers.DatetimeWithNanoseconds(
- 2016, 12, 20, 21, 13, 47, nanosecond=100000000
- )
- assert stamp.rfc3339() == "2016-12-20T21:13:47.1Z"
-
- @staticmethod
- def test_rfc3339_w_nanos_w_leading_zero_and_no_trailing_zeros():
- stamp = datetime_helpers.DatetimeWithNanoseconds(
- 2016, 12, 20, 21, 13, 47, nanosecond=1234500
- )
- assert stamp.rfc3339() == "2016-12-20T21:13:47.0012345Z"
-
- @staticmethod
- def test_from_rfc3339_w_invalid():
- stamp = "2016-12-20T21:13:47"
- with pytest.raises(ValueError):
- datetime_helpers.DatetimeWithNanoseconds.from_rfc3339(stamp)
-
- @staticmethod
- def test_from_rfc3339_wo_fraction():
- timestamp = "2016-12-20T21:13:47Z"
- expected = datetime_helpers.DatetimeWithNanoseconds(
- 2016, 12, 20, 21, 13, 47, tzinfo=datetime.timezone.utc
- )
- stamp = datetime_helpers.DatetimeWithNanoseconds.from_rfc3339(timestamp)
- assert stamp == expected
-
- @staticmethod
- def test_from_rfc3339_w_partial_precision():
- timestamp = "2016-12-20T21:13:47.1Z"
- expected = datetime_helpers.DatetimeWithNanoseconds(
- 2016, 12, 20, 21, 13, 47, microsecond=100000, tzinfo=datetime.timezone.utc
- )
- stamp = datetime_helpers.DatetimeWithNanoseconds.from_rfc3339(timestamp)
- assert stamp == expected
-
- @staticmethod
- def test_from_rfc3339_w_full_precision():
- timestamp = "2016-12-20T21:13:47.123456789Z"
- expected = datetime_helpers.DatetimeWithNanoseconds(
- 2016, 12, 20, 21, 13, 47, nanosecond=123456789, tzinfo=datetime.timezone.utc
- )
- stamp = datetime_helpers.DatetimeWithNanoseconds.from_rfc3339(timestamp)
- assert stamp == expected
-
- @staticmethod
- @pytest.mark.parametrize(
- "fractional, nanos",
- [
- ("12345678", 123456780),
- ("1234567", 123456700),
- ("123456", 123456000),
- ("12345", 123450000),
- ("1234", 123400000),
- ("123", 123000000),
- ("12", 120000000),
- ("1", 100000000),
- ],
- )
- def test_from_rfc3339_test_nanoseconds(fractional, nanos):
- value = "2009-12-17T12:44:32.{}Z".format(fractional)
- assert (
- datetime_helpers.DatetimeWithNanoseconds.from_rfc3339(value).nanosecond
- == nanos
- )
-
- @staticmethod
- def test_timestamp_pb_wo_nanos_naive():
- stamp = datetime_helpers.DatetimeWithNanoseconds(
- 2016, 12, 20, 21, 13, 47, 123456
- )
- delta = (
- stamp.replace(tzinfo=datetime.timezone.utc) - datetime_helpers._UTC_EPOCH
- )
- seconds = int(delta.total_seconds())
- nanos = 123456000
- timestamp = timestamp_pb2.Timestamp(seconds=seconds, nanos=nanos)
- assert stamp.timestamp_pb() == timestamp
-
- @staticmethod
- def test_timestamp_pb_w_nanos():
- stamp = datetime_helpers.DatetimeWithNanoseconds(
- 2016, 12, 20, 21, 13, 47, nanosecond=123456789, tzinfo=datetime.timezone.utc
- )
- delta = stamp - datetime_helpers._UTC_EPOCH
- timestamp = timestamp_pb2.Timestamp(
- seconds=int(delta.total_seconds()), nanos=123456789
- )
- assert stamp.timestamp_pb() == timestamp
-
- @staticmethod
- def test_from_timestamp_pb_wo_nanos():
- when = datetime.datetime(
- 2016, 12, 20, 21, 13, 47, 123456, tzinfo=datetime.timezone.utc
- )
- delta = when - datetime_helpers._UTC_EPOCH
- seconds = int(delta.total_seconds())
- timestamp = timestamp_pb2.Timestamp(seconds=seconds)
-
- stamp = datetime_helpers.DatetimeWithNanoseconds.from_timestamp_pb(timestamp)
-
- assert _to_seconds(when) == _to_seconds(stamp)
- assert stamp.microsecond == 0
- assert stamp.nanosecond == 0
- assert stamp.tzinfo == datetime.timezone.utc
-
- @staticmethod
- def test_from_timestamp_pb_w_nanos():
- when = datetime.datetime(
- 2016, 12, 20, 21, 13, 47, 123456, tzinfo=datetime.timezone.utc
- )
- delta = when - datetime_helpers._UTC_EPOCH
- seconds = int(delta.total_seconds())
- timestamp = timestamp_pb2.Timestamp(seconds=seconds, nanos=123456789)
-
- stamp = datetime_helpers.DatetimeWithNanoseconds.from_timestamp_pb(timestamp)
-
- assert _to_seconds(when) == _to_seconds(stamp)
- assert stamp.microsecond == 123456
- assert stamp.nanosecond == 123456789
- assert stamp.tzinfo == datetime.timezone.utc
-
-
-def _to_seconds(value):
- """Convert a datetime to seconds since the unix epoch.
-
- Args:
- value (datetime.datetime): The datetime to covert.
-
- Returns:
- int: Microseconds since the unix epoch.
- """
- assert value.tzinfo is datetime.timezone.utc
- return calendar.timegm(value.timetuple())
diff --git a/tests/unit/test_exceptions.py b/tests/unit/test_exceptions.py
deleted file mode 100644
index e3f8f90..0000000
--- a/tests/unit/test_exceptions.py
+++ /dev/null
@@ -1,395 +0,0 @@
-# Copyright 2014 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import http.client
-import json
-from unittest import mock
-
-import pytest
-import requests
-
-try:
- import grpc
- from grpc_status import rpc_status
-except ImportError: # pragma: NO COVER
- grpc = rpc_status = None
-
-from google.api_core import exceptions
-from google.protobuf import any_pb2, json_format
-from google.rpc import error_details_pb2, status_pb2
-
-
-def test_create_google_cloud_error():
- exception = exceptions.GoogleAPICallError("Testing")
- exception.code = 600
- assert str(exception) == "600 Testing"
- assert exception.message == "Testing"
- assert exception.errors == []
- assert exception.response is None
-
-
-def test_create_google_cloud_error_with_args():
- error = {
- "code": 600,
- "message": "Testing",
- }
- response = mock.sentinel.response
- exception = exceptions.GoogleAPICallError("Testing", [error], response=response)
- exception.code = 600
- assert str(exception) == "600 Testing"
- assert exception.message == "Testing"
- assert exception.errors == [error]
- assert exception.response == response
-
-
-def test_from_http_status():
- message = "message"
- exception = exceptions.from_http_status(http.client.NOT_FOUND, message)
- assert exception.code == http.client.NOT_FOUND
- assert exception.message == message
- assert exception.errors == []
-
-
-def test_from_http_status_with_errors_and_response():
- message = "message"
- errors = ["1", "2"]
- response = mock.sentinel.response
- exception = exceptions.from_http_status(
- http.client.NOT_FOUND, message, errors=errors, response=response
- )
-
- assert isinstance(exception, exceptions.NotFound)
- assert exception.code == http.client.NOT_FOUND
- assert exception.message == message
- assert exception.errors == errors
- assert exception.response == response
-
-
-def test_from_http_status_unknown_code():
- message = "message"
- status_code = 156
- exception = exceptions.from_http_status(status_code, message)
- assert exception.code == status_code
- assert exception.message == message
-
-
-def make_response(content):
- response = requests.Response()
- response._content = content
- response.status_code = http.client.NOT_FOUND
- response.request = requests.Request(
- method="POST", url="https://example.com"
- ).prepare()
- return response
-
-
-def test_from_http_response_no_content():
- response = make_response(None)
-
- exception = exceptions.from_http_response(response)
-
- assert isinstance(exception, exceptions.NotFound)
- assert exception.code == http.client.NOT_FOUND
- assert exception.message == "POST https://example.com/: unknown error"
- assert exception.response == response
-
-
-def test_from_http_response_text_content():
- response = make_response(b"message")
- response.encoding = "UTF8" # suppress charset_normalizer warning
-
- exception = exceptions.from_http_response(response)
-
- assert isinstance(exception, exceptions.NotFound)
- assert exception.code == http.client.NOT_FOUND
- assert exception.message == "POST https://example.com/: message"
-
-
-def test_from_http_response_json_content():
- response = make_response(
- json.dumps({"error": {"message": "json message", "errors": ["1", "2"]}}).encode(
- "utf-8"
- )
- )
-
- exception = exceptions.from_http_response(response)
-
- assert isinstance(exception, exceptions.NotFound)
- assert exception.code == http.client.NOT_FOUND
- assert exception.message == "POST https://example.com/: json message"
- assert exception.errors == ["1", "2"]
-
-
-def test_from_http_response_bad_json_content():
- response = make_response(json.dumps({"meep": "moop"}).encode("utf-8"))
-
- exception = exceptions.from_http_response(response)
-
- assert isinstance(exception, exceptions.NotFound)
- assert exception.code == http.client.NOT_FOUND
- assert exception.message == "POST https://example.com/: unknown error"
-
-
-def test_from_http_response_json_unicode_content():
- response = make_response(
- json.dumps(
- {"error": {"message": "\u2019 message", "errors": ["1", "2"]}}
- ).encode("utf-8")
- )
-
- exception = exceptions.from_http_response(response)
-
- assert isinstance(exception, exceptions.NotFound)
- assert exception.code == http.client.NOT_FOUND
- assert exception.message == "POST https://example.com/: \u2019 message"
- assert exception.errors == ["1", "2"]
-
-
-@pytest.mark.skipif(grpc is None, reason="No grpc")
-def test_from_grpc_status():
- message = "message"
- exception = exceptions.from_grpc_status(grpc.StatusCode.OUT_OF_RANGE, message)
- assert isinstance(exception, exceptions.BadRequest)
- assert isinstance(exception, exceptions.OutOfRange)
- assert exception.code == http.client.BAD_REQUEST
- assert exception.grpc_status_code == grpc.StatusCode.OUT_OF_RANGE
- assert exception.message == message
- assert exception.errors == []
-
-
-@pytest.mark.skipif(grpc is None, reason="No grpc")
-def test_from_grpc_status_as_int():
- message = "message"
- exception = exceptions.from_grpc_status(11, message)
- assert isinstance(exception, exceptions.BadRequest)
- assert isinstance(exception, exceptions.OutOfRange)
- assert exception.code == http.client.BAD_REQUEST
- assert exception.grpc_status_code == grpc.StatusCode.OUT_OF_RANGE
- assert exception.message == message
- assert exception.errors == []
-
-
-@pytest.mark.skipif(grpc is None, reason="No grpc")
-def test_from_grpc_status_with_errors_and_response():
- message = "message"
- response = mock.sentinel.response
- errors = ["1", "2"]
- exception = exceptions.from_grpc_status(
- grpc.StatusCode.OUT_OF_RANGE, message, errors=errors, response=response
- )
-
- assert isinstance(exception, exceptions.OutOfRange)
- assert exception.message == message
- assert exception.errors == errors
- assert exception.response == response
-
-
-@pytest.mark.skipif(grpc is None, reason="No grpc")
-def test_from_grpc_status_unknown_code():
- message = "message"
- exception = exceptions.from_grpc_status(grpc.StatusCode.OK, message)
- assert exception.grpc_status_code == grpc.StatusCode.OK
- assert exception.message == message
-
-
-@pytest.mark.skipif(grpc is None, reason="No grpc")
-def test_from_grpc_error():
- message = "message"
- error = mock.create_autospec(grpc.Call, instance=True)
- error.code.return_value = grpc.StatusCode.INVALID_ARGUMENT
- error.details.return_value = message
-
- exception = exceptions.from_grpc_error(error)
-
- assert isinstance(exception, exceptions.BadRequest)
- assert isinstance(exception, exceptions.InvalidArgument)
- assert exception.code == http.client.BAD_REQUEST
- assert exception.grpc_status_code == grpc.StatusCode.INVALID_ARGUMENT
- assert exception.message == message
- assert exception.errors == [error]
- assert exception.response == error
-
-
-@pytest.mark.skipif(grpc is None, reason="No grpc")
-def test_from_grpc_error_non_call():
- message = "message"
- error = mock.create_autospec(grpc.RpcError, instance=True)
- error.__str__.return_value = message
-
- exception = exceptions.from_grpc_error(error)
-
- assert isinstance(exception, exceptions.GoogleAPICallError)
- assert exception.code is None
- assert exception.grpc_status_code is None
- assert exception.message == message
- assert exception.errors == [error]
- assert exception.response == error
-
-
-@pytest.mark.skipif(grpc is None, reason="No grpc")
-def test_from_grpc_error_bare_call():
- message = "Testing"
-
- class TestingError(grpc.Call, grpc.RpcError):
- def __init__(self, exception):
- self.exception = exception
-
- def code(self):
- return self.exception.grpc_status_code
-
- def details(self):
- return message
-
- nested_message = "message"
- error = TestingError(exceptions.GoogleAPICallError(nested_message))
-
- exception = exceptions.from_grpc_error(error)
-
- assert isinstance(exception, exceptions.GoogleAPICallError)
- assert exception.code is None
- assert exception.grpc_status_code is None
- assert exception.message == message
- assert exception.errors == [error]
- assert exception.response == error
- assert exception.details == []
-
-
-def create_bad_request_details():
- bad_request_details = error_details_pb2.BadRequest()
- field_violation = bad_request_details.field_violations.add()
- field_violation.field = "document.content"
- field_violation.description = "Must have some text content to annotate."
- status_detail = any_pb2.Any()
- status_detail.Pack(bad_request_details)
- return status_detail
-
-
-def create_error_info_details():
- info = error_details_pb2.ErrorInfo(
- reason="SERVICE_DISABLED",
- domain="googleapis.com",
- metadata={
- "consumer": "projects/455411330361",
- "service": "translate.googleapis.com",
- },
- )
- status_detail = any_pb2.Any()
- status_detail.Pack(info)
- return status_detail
-
-
-def test_error_details_from_rest_response():
- bad_request_detail = create_bad_request_details()
- error_info_detail = create_error_info_details()
- status = status_pb2.Status()
- status.code = 3
- status.message = (
- "3 INVALID_ARGUMENT: One of content, or gcs_content_uri must be set."
- )
- status.details.append(bad_request_detail)
- status.details.append(error_info_detail)
-
- # See JSON schema in https://cloud.google.com/apis/design/errors#http_mapping
- http_response = make_response(
- json.dumps(
- {"error": json.loads(json_format.MessageToJson(status, sort_keys=True))}
- ).encode("utf-8")
- )
- exception = exceptions.from_http_response(http_response)
- want_error_details = [
- json.loads(json_format.MessageToJson(bad_request_detail)),
- json.loads(json_format.MessageToJson(error_info_detail)),
- ]
- assert want_error_details == exception.details
-
- # 404 POST comes from make_response.
- assert str(exception) == (
- "404 POST https://example.com/: 3 INVALID_ARGUMENT:"
- " One of content, or gcs_content_uri must be set."
- " [{'@type': 'type.googleapis.com/google.rpc.BadRequest',"
- " 'fieldViolations': [{'description': 'Must have some text content to annotate.',"
- " 'field': 'document.content'}]},"
- " {'@type': 'type.googleapis.com/google.rpc.ErrorInfo',"
- " 'domain': 'googleapis.com',"
- " 'metadata': {'consumer': 'projects/455411330361',"
- " 'service': 'translate.googleapis.com'},"
- " 'reason': 'SERVICE_DISABLED'}]"
- )
-
-
-def test_error_details_from_v1_rest_response():
- response = make_response(
- json.dumps(
- {"error": {"message": "\u2019 message", "errors": ["1", "2"]}}
- ).encode("utf-8")
- )
- exception = exceptions.from_http_response(response)
- assert exception.details == []
- assert (
- exception.reason is None
- and exception.domain is None
- and exception.metadata is None
- )
-
-
-@pytest.mark.skipif(grpc is None, reason="gRPC not importable")
-def test_error_details_from_grpc_response():
- status = rpc_status.status_pb2.Status()
- status.code = 3
- status.message = (
- "3 INVALID_ARGUMENT: One of content, or gcs_content_uri must be set."
- )
- status_br_detail = create_bad_request_details()
- status_ei_detail = create_error_info_details()
- status.details.append(status_br_detail)
- status.details.append(status_ei_detail)
-
- # The actual error doesn't matter as long as its grpc.Call,
- # because from_call is mocked.
- error = mock.create_autospec(grpc.Call, instance=True)
- with mock.patch("grpc_status.rpc_status.from_call") as m:
- m.return_value = status
- exception = exceptions.from_grpc_error(error)
-
- bad_request_detail = error_details_pb2.BadRequest()
- error_info_detail = error_details_pb2.ErrorInfo()
- status_br_detail.Unpack(bad_request_detail)
- status_ei_detail.Unpack(error_info_detail)
- assert exception.details == [bad_request_detail, error_info_detail]
- assert exception.reason == error_info_detail.reason
- assert exception.domain == error_info_detail.domain
- assert exception.metadata == error_info_detail.metadata
-
-
-@pytest.mark.skipif(grpc is None, reason="gRPC not importable")
-def test_error_details_from_grpc_response_unknown_error():
- status_detail = any_pb2.Any()
-
- status = rpc_status.status_pb2.Status()
- status.code = 3
- status.message = (
- "3 INVALID_ARGUMENT: One of content, or gcs_content_uri must be set."
- )
- status.details.append(status_detail)
-
- error = mock.create_autospec(grpc.Call, instance=True)
- with mock.patch("grpc_status.rpc_status.from_call") as m:
- m.return_value = status
- exception = exceptions.from_grpc_error(error)
- assert exception.details == [status_detail]
- assert (
- exception.reason is None
- and exception.domain is None
- and exception.metadata is None
- )
diff --git a/tests/unit/test_extended_operation.py b/tests/unit/test_extended_operation.py
deleted file mode 100644
index ab55066..0000000
--- a/tests/unit/test_extended_operation.py
+++ /dev/null
@@ -1,246 +0,0 @@
-# Copyright 2022 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import dataclasses
-import enum
-import typing
-from unittest import mock
-
-import pytest
-
-from google.api_core import exceptions
-from google.api_core import extended_operation
-from google.api_core import retry
-
-TEST_OPERATION_NAME = "test/extended_operation"
-
-
-@dataclasses.dataclass(frozen=True)
-class CustomOperation:
- class StatusCode(enum.Enum):
- UNKNOWN = 0
- DONE = 1
- PENDING = 2
-
- class LROCustomErrors:
- class LROCustomError:
- def __init__(self, code: str = "", message: str = ""):
- self.code = code
- self.message = message
-
- def __init__(self, errors: typing.List[LROCustomError] = []):
- self.errors = errors
-
- name: str
- status: StatusCode
- error_code: typing.Optional[int] = None
- error_message: typing.Optional[str] = None
- armor_class: typing.Optional[int] = None
- # Note: `error` can be removed once proposal A from
- # b/284179390 is implemented.
- error: typing.Optional[LROCustomErrors] = None
-
- # Note: in generated clients, this property must be generated for each
- # extended operation message type.
- # The status may be an enum, a string, or a bool. If it's a string or enum,
- # its text is compared to the string "DONE".
- @property
- def done(self):
- return self.status.name == "DONE"
-
-
-def make_extended_operation(responses=None):
- client_operations_responses = responses or [
- CustomOperation(
- name=TEST_OPERATION_NAME, status=CustomOperation.StatusCode.PENDING
- )
- ]
-
- refresh = mock.Mock(spec=["__call__"], side_effect=client_operations_responses)
- refresh.responses = client_operations_responses
- cancel = mock.Mock(spec=["__call__"])
- extended_operation_future = extended_operation.ExtendedOperation.make(
- refresh,
- cancel,
- client_operations_responses[0],
- )
-
- return extended_operation_future, refresh, cancel
-
-
-def test_constructor():
- ex_op, refresh, _ = make_extended_operation()
- assert ex_op._extended_operation == refresh.responses[0]
- assert not ex_op.cancelled()
- assert not ex_op.done()
- assert ex_op.name == TEST_OPERATION_NAME
- assert ex_op.status == CustomOperation.StatusCode.PENDING
- assert ex_op.error_code is None
- assert ex_op.error_message is None
-
-
-def test_done():
- responses = [
- CustomOperation(
- name=TEST_OPERATION_NAME, status=CustomOperation.StatusCode.PENDING
- ),
- # Second response indicates that the operation has finished.
- CustomOperation(
- name=TEST_OPERATION_NAME, status=CustomOperation.StatusCode.DONE
- ),
- # Bumper to make sure we stop polling on DONE.
- CustomOperation(
- name=TEST_OPERATION_NAME,
- status=CustomOperation.StatusCode.DONE,
- error_message="Gone too far!",
- ),
- ]
- ex_op, refresh, _ = make_extended_operation(responses)
-
- # Start out not done.
- assert not ex_op.done()
- assert refresh.call_count == 1
-
- # Refresh brings us to the done state.
- assert ex_op.done()
- assert refresh.call_count == 2
- assert not ex_op.error_message
-
- # Make sure that subsequent checks are no-ops.
- assert ex_op.done()
- assert refresh.call_count == 2
- assert not ex_op.error_message
-
-
-def test_cancellation():
- responses = [
- CustomOperation(
- name=TEST_OPERATION_NAME, status=CustomOperation.StatusCode.PENDING
- ),
- # Second response indicates that the operation was cancelled.
- CustomOperation(
- name=TEST_OPERATION_NAME, status=CustomOperation.StatusCode.DONE
- ),
- ]
- ex_op, _, cancel = make_extended_operation(responses)
-
- assert not ex_op.cancelled()
-
- assert ex_op.cancel()
- assert ex_op.cancelled()
- cancel.assert_called_once_with()
-
- # Cancelling twice should have no effect.
- assert not ex_op.cancel()
- cancel.assert_called_once_with()
-
-
-def test_done_w_retry():
- # Not sure what's going on here with the coverage, so just ignore it.
- test_retry = retry.Retry(predicate=lambda x: True) # pragma: NO COVER
-
- responses = [
- CustomOperation(
- name=TEST_OPERATION_NAME, status=CustomOperation.StatusCode.PENDING
- ),
- CustomOperation(
- name=TEST_OPERATION_NAME, status=CustomOperation.StatusCode.DONE
- ),
- ]
-
- ex_op, refresh, _ = make_extended_operation(responses)
-
- ex_op.done(retry=test_retry)
-
- refresh.assert_called_once_with(retry=test_retry)
-
-
-def test_error():
- responses = [
- CustomOperation(
- name=TEST_OPERATION_NAME,
- status=CustomOperation.StatusCode.DONE,
- error_code=400,
- error_message="Bad request",
- ),
- ]
-
- ex_op, _, _ = make_extended_operation(responses)
-
- # Defaults to CallError when grpc is not installed
- with pytest.raises(exceptions.BadRequest):
- ex_op.result()
-
- # Test GCE custom LRO Error. See b/284179390
- # Note: This test case can be removed once proposal A from
- # b/284179390 is implemented.
- _EXCEPTION_CODE = "INCOMPATIBLE_BACKEND_SERVICES"
- _EXCEPTION_MESSAGE = "Validation failed for instance group"
- responses = [
- CustomOperation(
- name=TEST_OPERATION_NAME,
- status=CustomOperation.StatusCode.DONE,
- error_code=400,
- error_message="Bad request",
- error=CustomOperation.LROCustomErrors(
- errors=[
- CustomOperation.LROCustomErrors.LROCustomError(
- code=_EXCEPTION_CODE, message=_EXCEPTION_MESSAGE
- )
- ]
- ),
- ),
- ]
-
- ex_op, _, _ = make_extended_operation(responses)
-
- # Defaults to CallError when grpc is not installed
- with pytest.raises(
- exceptions.BadRequest, match=f"{_EXCEPTION_CODE}: {_EXCEPTION_MESSAGE}"
- ):
- ex_op.result()
-
- # Inconsistent result
- responses = [
- CustomOperation(
- name=TEST_OPERATION_NAME,
- status=CustomOperation.StatusCode.DONE,
- error_code=2112,
- ),
- ]
-
- ex_op, _, _ = make_extended_operation(responses)
-
- with pytest.raises(exceptions.GoogleAPICallError):
- ex_op.result()
-
-
-def test_pass_through():
- responses = [
- CustomOperation(
- name=TEST_OPERATION_NAME,
- status=CustomOperation.StatusCode.PENDING,
- armor_class=10,
- ),
- CustomOperation(
- name=TEST_OPERATION_NAME,
- status=CustomOperation.StatusCode.DONE,
- armor_class=20,
- ),
- ]
- ex_op, _, _ = make_extended_operation(responses)
-
- assert ex_op.armor_class == 10
- ex_op.result()
- assert ex_op.armor_class == 20
diff --git a/tests/unit/test_grpc_helpers.py b/tests/unit/test_grpc_helpers.py
deleted file mode 100644
index e05d396..0000000
--- a/tests/unit/test_grpc_helpers.py
+++ /dev/null
@@ -1,927 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from unittest import mock
-
-import pytest
-from ..helpers import warn_deprecated_credentials_file
-
-try:
- import grpc
-except ImportError: # pragma: NO COVER
- pytest.skip("No GRPC", allow_module_level=True)
-
-from google.api_core import exceptions
-from google.api_core import grpc_helpers
-import google.auth.credentials
-from google.longrunning import operations_pb2
-
-
-def test__patch_callable_name():
- callable = mock.Mock(spec=["__class__"])
- callable.__class__ = mock.Mock(spec=["__name__"])
- callable.__class__.__name__ = "TestCallable"
-
- grpc_helpers._patch_callable_name(callable)
-
- assert callable.__name__ == "TestCallable"
-
-
-def test__patch_callable_name_no_op():
- callable = mock.Mock(spec=["__name__"])
- callable.__name__ = "test_callable"
-
- grpc_helpers._patch_callable_name(callable)
-
- assert callable.__name__ == "test_callable"
-
-
-class RpcErrorImpl(grpc.RpcError, grpc.Call):
- def __init__(self, code):
- super(RpcErrorImpl, self).__init__()
- self._code = code
-
- def code(self):
- return self._code
-
- def details(self):
- return None
-
- def trailing_metadata(self):
- return None
-
-
-def test_wrap_unary_errors():
- grpc_error = RpcErrorImpl(grpc.StatusCode.INVALID_ARGUMENT)
- callable_ = mock.Mock(spec=["__call__"], side_effect=grpc_error)
-
- wrapped_callable = grpc_helpers._wrap_unary_errors(callable_)
-
- with pytest.raises(exceptions.InvalidArgument) as exc_info:
- wrapped_callable(1, 2, three="four")
-
- callable_.assert_called_once_with(1, 2, three="four")
- assert exc_info.value.response == grpc_error
-
-
-class Test_StreamingResponseIterator:
- @staticmethod
- def _make_wrapped(*items):
- return iter(items)
-
- @staticmethod
- def _make_one(wrapped, **kw):
- return grpc_helpers._StreamingResponseIterator(wrapped, **kw)
-
- def test_ctor_defaults(self):
- wrapped = self._make_wrapped("a", "b", "c")
- iterator = self._make_one(wrapped)
- assert iterator._stored_first_result == "a"
- assert list(wrapped) == ["b", "c"]
-
- def test_ctor_explicit(self):
- wrapped = self._make_wrapped("a", "b", "c")
- iterator = self._make_one(wrapped, prefetch_first_result=False)
- assert getattr(iterator, "_stored_first_result", self) is self
- assert list(wrapped) == ["a", "b", "c"]
-
- def test_ctor_w_rpc_error_on_prefetch(self):
- wrapped = mock.MagicMock()
- wrapped.__next__.side_effect = grpc.RpcError()
-
- with pytest.raises(grpc.RpcError):
- self._make_one(wrapped)
-
- def test___iter__(self):
- wrapped = self._make_wrapped("a", "b", "c")
- iterator = self._make_one(wrapped)
- assert iter(iterator) is iterator
-
- def test___next___w_cached_first_result(self):
- wrapped = self._make_wrapped("a", "b", "c")
- iterator = self._make_one(wrapped)
- assert next(iterator) == "a"
- iterator = self._make_one(wrapped, prefetch_first_result=False)
- assert next(iterator) == "b"
- assert next(iterator) == "c"
-
- def test___next___wo_cached_first_result(self):
- wrapped = self._make_wrapped("a", "b", "c")
- iterator = self._make_one(wrapped, prefetch_first_result=False)
- assert next(iterator) == "a"
- assert next(iterator) == "b"
- assert next(iterator) == "c"
-
- def test___next___w_rpc_error(self):
- wrapped = mock.MagicMock()
- wrapped.__next__.side_effect = grpc.RpcError()
- iterator = self._make_one(wrapped, prefetch_first_result=False)
-
- with pytest.raises(exceptions.GoogleAPICallError):
- next(iterator)
-
- def test_add_callback(self):
- wrapped = mock.MagicMock()
- callback = mock.Mock(spec={})
- iterator = self._make_one(wrapped, prefetch_first_result=False)
-
- assert iterator.add_callback(callback) is wrapped.add_callback.return_value
-
- wrapped.add_callback.assert_called_once_with(callback)
-
- def test_cancel(self):
- wrapped = mock.MagicMock()
- iterator = self._make_one(wrapped, prefetch_first_result=False)
-
- assert iterator.cancel() is wrapped.cancel.return_value
-
- wrapped.cancel.assert_called_once_with()
-
- def test_code(self):
- wrapped = mock.MagicMock()
- iterator = self._make_one(wrapped, prefetch_first_result=False)
-
- assert iterator.code() is wrapped.code.return_value
-
- wrapped.code.assert_called_once_with()
-
- def test_details(self):
- wrapped = mock.MagicMock()
- iterator = self._make_one(wrapped, prefetch_first_result=False)
-
- assert iterator.details() is wrapped.details.return_value
-
- wrapped.details.assert_called_once_with()
-
- def test_initial_metadata(self):
- wrapped = mock.MagicMock()
- iterator = self._make_one(wrapped, prefetch_first_result=False)
-
- assert iterator.initial_metadata() is wrapped.initial_metadata.return_value
-
- wrapped.initial_metadata.assert_called_once_with()
-
- def test_is_active(self):
- wrapped = mock.MagicMock()
- iterator = self._make_one(wrapped, prefetch_first_result=False)
-
- assert iterator.is_active() is wrapped.is_active.return_value
-
- wrapped.is_active.assert_called_once_with()
-
- def test_time_remaining(self):
- wrapped = mock.MagicMock()
- iterator = self._make_one(wrapped, prefetch_first_result=False)
-
- assert iterator.time_remaining() is wrapped.time_remaining.return_value
-
- wrapped.time_remaining.assert_called_once_with()
-
- def test_trailing_metadata(self):
- wrapped = mock.MagicMock()
- iterator = self._make_one(wrapped, prefetch_first_result=False)
-
- assert iterator.trailing_metadata() is wrapped.trailing_metadata.return_value
-
- wrapped.trailing_metadata.assert_called_once_with()
-
-
-class TestGrpcStream(Test_StreamingResponseIterator):
- @staticmethod
- def _make_one(wrapped, **kw):
- return grpc_helpers.GrpcStream(wrapped, **kw)
-
- def test_grpc_stream_attributes(self):
- """
- Should be both a grpc.Call and an iterable
- """
- call = self._make_one(None)
- assert isinstance(call, grpc.Call)
- # should implement __iter__
- assert hasattr(call, "__iter__")
- it = call.__iter__()
- assert hasattr(it, "__next__")
-
-
-def test_wrap_stream_okay():
- expected_responses = [1, 2, 3]
- callable_ = mock.Mock(spec=["__call__"], return_value=iter(expected_responses))
-
- wrapped_callable = grpc_helpers._wrap_stream_errors(callable_)
-
- got_iterator = wrapped_callable(1, 2, three="four")
-
- responses = list(got_iterator)
-
- callable_.assert_called_once_with(1, 2, three="four")
- assert responses == expected_responses
-
-
-def test_wrap_stream_prefetch_disabled():
- responses = [1, 2, 3]
- iter_responses = iter(responses)
- callable_ = mock.Mock(spec=["__call__"], return_value=iter_responses)
- callable_._prefetch_first_result_ = False
-
- wrapped_callable = grpc_helpers._wrap_stream_errors(callable_)
- wrapped_callable(1, 2, three="four")
-
- assert list(iter_responses) == responses # no items should have been pre-fetched
- callable_.assert_called_once_with(1, 2, three="four")
-
-
-def test_wrap_stream_iterable_interface():
- response_iter = mock.create_autospec(grpc.Call, instance=True)
- callable_ = mock.Mock(spec=["__call__"], return_value=response_iter)
-
- wrapped_callable = grpc_helpers._wrap_stream_errors(callable_)
-
- got_iterator = wrapped_callable()
-
- callable_.assert_called_once_with()
-
- # Check each aliased method in the grpc.Call interface
- got_iterator.add_callback(mock.sentinel.callback)
- response_iter.add_callback.assert_called_once_with(mock.sentinel.callback)
-
- got_iterator.cancel()
- response_iter.cancel.assert_called_once_with()
-
- got_iterator.code()
- response_iter.code.assert_called_once_with()
-
- got_iterator.details()
- response_iter.details.assert_called_once_with()
-
- got_iterator.initial_metadata()
- response_iter.initial_metadata.assert_called_once_with()
-
- got_iterator.is_active()
- response_iter.is_active.assert_called_once_with()
-
- got_iterator.time_remaining()
- response_iter.time_remaining.assert_called_once_with()
-
- got_iterator.trailing_metadata()
- response_iter.trailing_metadata.assert_called_once_with()
-
-
-def test_wrap_stream_errors_invocation():
- grpc_error = RpcErrorImpl(grpc.StatusCode.INVALID_ARGUMENT)
- callable_ = mock.Mock(spec=["__call__"], side_effect=grpc_error)
-
- wrapped_callable = grpc_helpers._wrap_stream_errors(callable_)
-
- with pytest.raises(exceptions.InvalidArgument) as exc_info:
- wrapped_callable(1, 2, three="four")
-
- callable_.assert_called_once_with(1, 2, three="four")
- assert exc_info.value.response == grpc_error
-
-
-def test_wrap_stream_empty_iterator():
- expected_responses = []
- callable_ = mock.Mock(spec=["__call__"], return_value=iter(expected_responses))
-
- wrapped_callable = grpc_helpers._wrap_stream_errors(callable_)
-
- got_iterator = wrapped_callable()
-
- responses = list(got_iterator)
-
- callable_.assert_called_once_with()
- assert responses == expected_responses
-
-
-class RpcResponseIteratorImpl(object):
- def __init__(self, iterable):
- self._iterable = iter(iterable)
-
- def next(self):
- next_item = next(self._iterable)
- if isinstance(next_item, RpcErrorImpl):
- raise next_item
- return next_item
-
- __next__ = next
-
-
-def test_wrap_stream_errors_iterator_initialization():
- grpc_error = RpcErrorImpl(grpc.StatusCode.UNAVAILABLE)
- response_iter = RpcResponseIteratorImpl([grpc_error])
- callable_ = mock.Mock(spec=["__call__"], return_value=response_iter)
-
- wrapped_callable = grpc_helpers._wrap_stream_errors(callable_)
-
- with pytest.raises(exceptions.ServiceUnavailable) as exc_info:
- wrapped_callable(1, 2, three="four")
-
- callable_.assert_called_once_with(1, 2, three="four")
- assert exc_info.value.response == grpc_error
-
-
-def test_wrap_stream_errors_during_iteration():
- grpc_error = RpcErrorImpl(grpc.StatusCode.UNAVAILABLE)
- response_iter = RpcResponseIteratorImpl([1, grpc_error])
- callable_ = mock.Mock(spec=["__call__"], return_value=response_iter)
-
- wrapped_callable = grpc_helpers._wrap_stream_errors(callable_)
- got_iterator = wrapped_callable(1, 2, three="four")
- next(got_iterator)
-
- with pytest.raises(exceptions.ServiceUnavailable) as exc_info:
- next(got_iterator)
-
- callable_.assert_called_once_with(1, 2, three="four")
- assert exc_info.value.response == grpc_error
-
-
-@mock.patch("google.api_core.grpc_helpers._wrap_unary_errors")
-def test_wrap_errors_non_streaming(wrap_unary_errors):
- callable_ = mock.create_autospec(grpc.UnaryUnaryMultiCallable)
-
- result = grpc_helpers.wrap_errors(callable_)
-
- assert result == wrap_unary_errors.return_value
- wrap_unary_errors.assert_called_once_with(callable_)
-
-
-@mock.patch("google.api_core.grpc_helpers._wrap_stream_errors")
-def test_wrap_errors_streaming(wrap_stream_errors):
- callable_ = mock.create_autospec(grpc.UnaryStreamMultiCallable)
-
- result = grpc_helpers.wrap_errors(callable_)
-
- assert result == wrap_stream_errors.return_value
- wrap_stream_errors.assert_called_once_with(callable_)
-
-
-@pytest.mark.parametrize(
- "attempt_direct_path,target,expected_target",
- [
- (None, "example.com:443", "example.com:443"),
- (False, "example.com:443", "example.com:443"),
- (True, "example.com:443", "google-c2p:///example.com"),
- (True, "dns:///example.com", "google-c2p:///example.com"),
- (True, "another-c2p:///example.com", "another-c2p:///example.com"),
- ],
-)
-@mock.patch("grpc.compute_engine_channel_credentials")
-@mock.patch(
- "google.auth.default",
- autospec=True,
- return_value=(mock.sentinel.credentials, mock.sentinel.project),
-)
-@mock.patch("grpc.secure_channel")
-def test_create_channel_implicit(
- grpc_secure_channel,
- google_auth_default,
- composite_creds_call,
- attempt_direct_path,
- target,
- expected_target,
-):
- composite_creds = composite_creds_call.return_value
-
- channel = grpc_helpers.create_channel(
- target,
- compression=grpc.Compression.Gzip,
- attempt_direct_path=attempt_direct_path,
- )
-
- assert channel is grpc_secure_channel.return_value
-
- google_auth_default.assert_called_once_with(scopes=None, default_scopes=None)
-
- grpc_secure_channel.assert_called_once_with(
- expected_target, composite_creds, compression=grpc.Compression.Gzip
- )
-
-
-@pytest.mark.parametrize(
- "attempt_direct_path,target, expected_target",
- [
- (None, "example.com:443", "example.com:443"),
- (False, "example.com:443", "example.com:443"),
- (True, "example.com:443", "google-c2p:///example.com"),
- (True, "dns:///example.com", "google-c2p:///example.com"),
- (True, "another-c2p:///example.com", "another-c2p:///example.com"),
- ],
-)
-@mock.patch("google.auth.transport.grpc.AuthMetadataPlugin", autospec=True)
-@mock.patch(
- "google.auth.transport.requests.Request",
- autospec=True,
- return_value=mock.sentinel.Request,
-)
-@mock.patch("grpc.compute_engine_channel_credentials")
-@mock.patch(
- "google.auth.default",
- autospec=True,
- return_value=(mock.sentinel.credentials, mock.sentinel.project),
-)
-@mock.patch("grpc.secure_channel")
-def test_create_channel_implicit_with_default_host(
- grpc_secure_channel,
- google_auth_default,
- composite_creds_call,
- request,
- auth_metadata_plugin,
- attempt_direct_path,
- target,
- expected_target,
-):
- default_host = "example.com"
- composite_creds = composite_creds_call.return_value
-
- channel = grpc_helpers.create_channel(
- target, default_host=default_host, attempt_direct_path=attempt_direct_path
- )
-
- assert channel is grpc_secure_channel.return_value
-
- google_auth_default.assert_called_once_with(scopes=None, default_scopes=None)
- auth_metadata_plugin.assert_called_once_with(
- mock.sentinel.credentials, mock.sentinel.Request, default_host=default_host
- )
-
- grpc_secure_channel.assert_called_once_with(
- expected_target, composite_creds, compression=None
- )
-
-
-@pytest.mark.parametrize(
- "attempt_direct_path",
- [
- None,
- False,
- ],
-)
-@mock.patch("grpc.composite_channel_credentials")
-@mock.patch(
- "google.auth.default",
- autospec=True,
- return_value=(mock.sentinel.credentials, mock.sentinel.project),
-)
-@mock.patch("grpc.secure_channel")
-def test_create_channel_implicit_with_ssl_creds(
- grpc_secure_channel, default, composite_creds_call, attempt_direct_path
-):
- target = "example.com:443"
-
- ssl_creds = grpc.ssl_channel_credentials()
-
- grpc_helpers.create_channel(
- target, ssl_credentials=ssl_creds, attempt_direct_path=attempt_direct_path
- )
-
- default.assert_called_once_with(scopes=None, default_scopes=None)
-
- composite_creds_call.assert_called_once_with(ssl_creds, mock.ANY)
- composite_creds = composite_creds_call.return_value
-
- grpc_secure_channel.assert_called_once_with(
- target, composite_creds, compression=None
- )
-
-
-def test_create_channel_implicit_with_ssl_creds_attempt_direct_path_true():
- target = "example.com:443"
- ssl_creds = grpc.ssl_channel_credentials()
- with pytest.raises(
- ValueError, match="Using ssl_credentials with Direct Path is not supported"
- ):
- grpc_helpers.create_channel(
- target, ssl_credentials=ssl_creds, attempt_direct_path=True
- )
-
-
-@mock.patch("grpc.compute_engine_channel_credentials")
-@mock.patch(
- "google.auth.default",
- autospec=True,
- return_value=(mock.sentinel.credentials, mock.sentinel.project),
-)
-@mock.patch("grpc.secure_channel")
-def test_create_channel_implicit_with_scopes(
- grpc_secure_channel, default, composite_creds_call
-):
- target = "example.com:443"
- composite_creds = composite_creds_call.return_value
-
- channel = grpc_helpers.create_channel(target, scopes=["one", "two"])
-
- assert channel is grpc_secure_channel.return_value
-
- default.assert_called_once_with(scopes=["one", "two"], default_scopes=None)
-
- grpc_secure_channel.assert_called_once_with(
- target, composite_creds, compression=None
- )
-
-
-@mock.patch("grpc.compute_engine_channel_credentials")
-@mock.patch(
- "google.auth.default",
- autospec=True,
- return_value=(mock.sentinel.credentials, mock.sentinel.project),
-)
-@mock.patch("grpc.secure_channel")
-def test_create_channel_implicit_with_default_scopes(
- grpc_secure_channel, default, composite_creds_call
-):
- target = "example.com:443"
- composite_creds = composite_creds_call.return_value
-
- channel = grpc_helpers.create_channel(target, default_scopes=["three", "four"])
-
- assert channel is grpc_secure_channel.return_value
-
- default.assert_called_once_with(scopes=None, default_scopes=["three", "four"])
-
- grpc_secure_channel.assert_called_once_with(
- target, composite_creds, compression=None
- )
-
-
-def test_create_channel_explicit_with_duplicate_credentials():
- target = "example.com:443"
-
- with pytest.raises(exceptions.DuplicateCredentialArgs):
- with warn_deprecated_credentials_file():
- grpc_helpers.create_channel(
- target,
- credentials_file="credentials.json",
- credentials=mock.sentinel.credentials,
- )
-
-
-@mock.patch("grpc.compute_engine_channel_credentials")
-@mock.patch("google.auth.credentials.with_scopes_if_required", autospec=True)
-@mock.patch("grpc.secure_channel")
-def test_create_channel_explicit(grpc_secure_channel, auth_creds, composite_creds_call):
- target = "example.com:443"
- composite_creds = composite_creds_call.return_value
-
- channel = grpc_helpers.create_channel(target, credentials=mock.sentinel.credentials)
-
- auth_creds.assert_called_once_with(
- mock.sentinel.credentials, scopes=None, default_scopes=None
- )
-
- assert channel is grpc_secure_channel.return_value
-
- grpc_secure_channel.assert_called_once_with(
- target, composite_creds, compression=None
- )
-
-
-@mock.patch("grpc.compute_engine_channel_credentials")
-@mock.patch("grpc.secure_channel")
-def test_create_channel_explicit_scoped(grpc_secure_channel, composite_creds_call):
- target = "example.com:443"
- scopes = ["1", "2"]
- composite_creds = composite_creds_call.return_value
-
- credentials = mock.create_autospec(google.auth.credentials.Scoped, instance=True)
- credentials.requires_scopes = True
-
- channel = grpc_helpers.create_channel(
- target, credentials=credentials, scopes=scopes
- )
-
- credentials.with_scopes.assert_called_once_with(scopes, default_scopes=None)
-
- assert channel is grpc_secure_channel.return_value
-
- grpc_secure_channel.assert_called_once_with(
- target, composite_creds, compression=None
- )
-
-
-@mock.patch("grpc.compute_engine_channel_credentials")
-@mock.patch("grpc.secure_channel")
-def test_create_channel_explicit_default_scopes(
- grpc_secure_channel, composite_creds_call
-):
- target = "example.com:443"
- default_scopes = ["3", "4"]
- composite_creds = composite_creds_call.return_value
-
- credentials = mock.create_autospec(google.auth.credentials.Scoped, instance=True)
- credentials.requires_scopes = True
-
- channel = grpc_helpers.create_channel(
- target, credentials=credentials, default_scopes=default_scopes
- )
-
- credentials.with_scopes.assert_called_once_with(
- scopes=None, default_scopes=default_scopes
- )
-
- assert channel is grpc_secure_channel.return_value
-
- grpc_secure_channel.assert_called_once_with(
- target, composite_creds, compression=None
- )
-
-
-@mock.patch("grpc.compute_engine_channel_credentials")
-@mock.patch("grpc.secure_channel")
-def test_create_channel_explicit_with_quota_project(
- grpc_secure_channel, composite_creds_call
-):
- target = "example.com:443"
- composite_creds = composite_creds_call.return_value
-
- credentials = mock.create_autospec(
- google.auth.credentials.CredentialsWithQuotaProject, instance=True
- )
-
- channel = grpc_helpers.create_channel(
- target, credentials=credentials, quota_project_id="project-foo"
- )
-
- credentials.with_quota_project.assert_called_once_with("project-foo")
-
- assert channel is grpc_secure_channel.return_value
-
- grpc_secure_channel.assert_called_once_with(
- target, composite_creds, compression=None
- )
-
-
-@mock.patch("grpc.compute_engine_channel_credentials")
-@mock.patch("grpc.secure_channel")
-@mock.patch(
- "google.auth.load_credentials_from_file",
- autospec=True,
- return_value=(mock.sentinel.credentials, mock.sentinel.project),
-)
-def test_create_channel_with_credentials_file(
- load_credentials_from_file, grpc_secure_channel, composite_creds_call
-):
- target = "example.com:443"
-
- credentials_file = "/path/to/credentials/file.json"
- composite_creds = composite_creds_call.return_value
-
- with warn_deprecated_credentials_file():
- channel = grpc_helpers.create_channel(target, credentials_file=credentials_file)
-
- google.auth.load_credentials_from_file.assert_called_once_with(
- credentials_file, scopes=None, default_scopes=None
- )
-
- assert channel is grpc_secure_channel.return_value
-
- grpc_secure_channel.assert_called_once_with(
- target, composite_creds, compression=None
- )
-
-
-@mock.patch("grpc.compute_engine_channel_credentials")
-@mock.patch("grpc.secure_channel")
-@mock.patch(
- "google.auth.load_credentials_from_file",
- autospec=True,
- return_value=(mock.sentinel.credentials, mock.sentinel.project),
-)
-def test_create_channel_with_credentials_file_and_scopes(
- load_credentials_from_file, grpc_secure_channel, composite_creds_call
-):
- target = "example.com:443"
- scopes = ["1", "2"]
-
- credentials_file = "/path/to/credentials/file.json"
- composite_creds = composite_creds_call.return_value
-
- with warn_deprecated_credentials_file():
- channel = grpc_helpers.create_channel(
- target, credentials_file=credentials_file, scopes=scopes
- )
-
- google.auth.load_credentials_from_file.assert_called_once_with(
- credentials_file, scopes=scopes, default_scopes=None
- )
-
- assert channel is grpc_secure_channel.return_value
-
- grpc_secure_channel.assert_called_once_with(
- target, composite_creds, compression=None
- )
-
-
-@mock.patch("grpc.compute_engine_channel_credentials")
-@mock.patch("grpc.secure_channel")
-@mock.patch(
- "google.auth.load_credentials_from_file",
- autospec=True,
- return_value=(mock.sentinel.credentials, mock.sentinel.project),
-)
-def test_create_channel_with_credentials_file_and_default_scopes(
- load_credentials_from_file, grpc_secure_channel, composite_creds_call
-):
- target = "example.com:443"
- default_scopes = ["3", "4"]
-
- credentials_file = "/path/to/credentials/file.json"
- composite_creds = composite_creds_call.return_value
-
- with warn_deprecated_credentials_file():
- channel = grpc_helpers.create_channel(
- target, credentials_file=credentials_file, default_scopes=default_scopes
- )
-
- load_credentials_from_file.assert_called_once_with(
- credentials_file, scopes=None, default_scopes=default_scopes
- )
-
- assert channel is grpc_secure_channel.return_value
-
- grpc_secure_channel.assert_called_once_with(
- target, composite_creds, compression=None
- )
-
-
-@mock.patch("grpc.secure_channel")
-def test_create_channel_without_grpc_gcp(grpc_secure_channel):
- target = "example.com:443"
- scopes = ["test_scope"]
-
- credentials = mock.create_autospec(google.auth.credentials.Scoped, instance=True)
- credentials.requires_scopes = True
-
- grpc_helpers.create_channel(target, credentials=credentials, scopes=scopes)
- grpc_secure_channel.assert_called()
-
- credentials.with_scopes.assert_called_once_with(scopes, default_scopes=None)
-
-
-class TestChannelStub(object):
- def test_single_response(self):
- channel = grpc_helpers.ChannelStub()
- stub = operations_pb2.OperationsStub(channel)
- expected_request = operations_pb2.GetOperationRequest(name="meep")
- expected_response = operations_pb2.Operation(name="moop")
-
- channel.GetOperation.response = expected_response
-
- response = stub.GetOperation(expected_request)
-
- assert response == expected_response
- assert channel.requests == [("GetOperation", expected_request)]
- assert channel.GetOperation.requests == [expected_request]
-
- def test_no_response(self):
- channel = grpc_helpers.ChannelStub()
- stub = operations_pb2.OperationsStub(channel)
- expected_request = operations_pb2.GetOperationRequest(name="meep")
-
- with pytest.raises(ValueError) as exc_info:
- stub.GetOperation(expected_request)
-
- assert exc_info.match("GetOperation")
-
- def test_missing_method(self):
- channel = grpc_helpers.ChannelStub()
-
- with pytest.raises(AttributeError):
- channel.DoesNotExist.response
-
- def test_exception_response(self):
- channel = grpc_helpers.ChannelStub()
- stub = operations_pb2.OperationsStub(channel)
- expected_request = operations_pb2.GetOperationRequest(name="meep")
-
- channel.GetOperation.response = RuntimeError()
-
- with pytest.raises(RuntimeError):
- stub.GetOperation(expected_request)
-
- def test_callable_response(self):
- channel = grpc_helpers.ChannelStub()
- stub = operations_pb2.OperationsStub(channel)
- expected_request = operations_pb2.GetOperationRequest(name="meep")
- expected_response = operations_pb2.Operation(name="moop")
-
- on_get_operation = mock.Mock(spec=("__call__",), return_value=expected_response)
-
- channel.GetOperation.response = on_get_operation
-
- response = stub.GetOperation(expected_request)
-
- assert response == expected_response
- on_get_operation.assert_called_once_with(expected_request)
-
- def test_multiple_responses(self):
- channel = grpc_helpers.ChannelStub()
- stub = operations_pb2.OperationsStub(channel)
- expected_request = operations_pb2.GetOperationRequest(name="meep")
- expected_responses = [
- operations_pb2.Operation(name="foo"),
- operations_pb2.Operation(name="bar"),
- operations_pb2.Operation(name="baz"),
- ]
-
- channel.GetOperation.responses = iter(expected_responses)
-
- response1 = stub.GetOperation(expected_request)
- response2 = stub.GetOperation(expected_request)
- response3 = stub.GetOperation(expected_request)
-
- assert response1 == expected_responses[0]
- assert response2 == expected_responses[1]
- assert response3 == expected_responses[2]
- assert channel.requests == [("GetOperation", expected_request)] * 3
- assert channel.GetOperation.requests == [expected_request] * 3
-
- with pytest.raises(StopIteration):
- stub.GetOperation(expected_request)
-
- def test_multiple_responses_and_single_response_error(self):
- channel = grpc_helpers.ChannelStub()
- stub = operations_pb2.OperationsStub(channel)
- channel.GetOperation.responses = []
- channel.GetOperation.response = mock.sentinel.response
-
- with pytest.raises(ValueError):
- stub.GetOperation(operations_pb2.GetOperationRequest())
-
- def test_call_info(self):
- channel = grpc_helpers.ChannelStub()
- stub = operations_pb2.OperationsStub(channel)
- expected_request = operations_pb2.GetOperationRequest(name="meep")
- expected_response = operations_pb2.Operation(name="moop")
- expected_compression = grpc.Compression.NoCompression
- expected_metadata = [("red", "blue"), ("two", "shoe")]
- expected_credentials = mock.sentinel.credentials
- channel.GetOperation.response = expected_response
-
- response = stub.GetOperation(
- expected_request,
- timeout=42,
- compression=expected_compression,
- metadata=expected_metadata,
- credentials=expected_credentials,
- )
-
- assert response == expected_response
- assert channel.requests == [("GetOperation", expected_request)]
- assert channel.GetOperation.calls == [
- (
- expected_request,
- 42,
- expected_metadata,
- expected_credentials,
- expected_compression,
- )
- ]
-
- def test_unary_unary(self):
- channel = grpc_helpers.ChannelStub()
- method_name = "GetOperation"
- callable_stub = channel.unary_unary(method_name)
- assert callable_stub._method == method_name
- assert callable_stub._channel == channel
-
- def test_unary_stream(self):
- channel = grpc_helpers.ChannelStub()
- method_name = "GetOperation"
- callable_stub = channel.unary_stream(method_name)
- assert callable_stub._method == method_name
- assert callable_stub._channel == channel
-
- def test_stream_unary(self):
- channel = grpc_helpers.ChannelStub()
- method_name = "GetOperation"
- callable_stub = channel.stream_unary(method_name)
- assert callable_stub._method == method_name
- assert callable_stub._channel == channel
-
- def test_stream_stream(self):
- channel = grpc_helpers.ChannelStub()
- method_name = "GetOperation"
- callable_stub = channel.stream_stream(method_name)
- assert callable_stub._method == method_name
- assert callable_stub._channel == channel
-
- def test_subscribe_unsubscribe(self):
- channel = grpc_helpers.ChannelStub()
- assert channel.subscribe(None) is None
- assert channel.unsubscribe(None) is None
-
- def test_close(self):
- channel = grpc_helpers.ChannelStub()
- assert channel.close() is None
diff --git a/tests/unit/test_iam.py b/tests/unit/test_iam.py
deleted file mode 100644
index 3de1528..0000000
--- a/tests/unit/test_iam.py
+++ /dev/null
@@ -1,386 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import pytest
-
-from google.api_core.iam import _DICT_ACCESS_MSG, InvalidOperationException
-
-
-class TestPolicy:
- @staticmethod
- def _get_target_class():
- from google.api_core.iam import Policy
-
- return Policy
-
- def _make_one(self, *args, **kw):
- return self._get_target_class()(*args, **kw)
-
- def test_ctor_defaults(self):
- empty = frozenset()
- policy = self._make_one()
- assert policy.etag is None
- assert policy.version is None
- assert policy.owners == empty
- assert policy.editors == empty
- assert policy.viewers == empty
- assert len(policy) == 0
- assert dict(policy) == {}
-
- def test_ctor_explicit(self):
- VERSION = 1
- ETAG = "ETAG"
- empty = frozenset()
- policy = self._make_one(ETAG, VERSION)
- assert policy.etag == ETAG
- assert policy.version == VERSION
- assert policy.owners == empty
- assert policy.editors == empty
- assert policy.viewers == empty
- assert len(policy) == 0
- assert dict(policy) == {}
-
- def test___getitem___miss(self):
- policy = self._make_one()
- assert policy["nonesuch"] == set()
-
- def test__getitem___and_set(self):
- from google.api_core.iam import OWNER_ROLE
-
- policy = self._make_one()
-
- # get the policy using the getter and then modify it
- policy[OWNER_ROLE].add("user:phred@example.com")
- assert dict(policy) == {OWNER_ROLE: {"user:phred@example.com"}}
-
- def test___getitem___version3(self):
- policy = self._make_one("DEADBEEF", 3)
- with pytest.raises(InvalidOperationException, match=_DICT_ACCESS_MSG):
- policy["role"]
-
- def test___getitem___with_conditions(self):
- USER = "user:phred@example.com"
- CONDITION = {"expression": "2 > 1"}
- policy = self._make_one("DEADBEEF", 1)
- policy.bindings = [
- {"role": "role/reader", "members": [USER], "condition": CONDITION}
- ]
- with pytest.raises(InvalidOperationException, match=_DICT_ACCESS_MSG):
- policy["role/reader"]
-
- def test___setitem__(self):
- USER = "user:phred@example.com"
- PRINCIPALS = set([USER])
- policy = self._make_one()
- policy["rolename"] = [USER]
- assert policy["rolename"] == PRINCIPALS
- assert len(policy) == 1
- assert dict(policy) == {"rolename": PRINCIPALS}
-
- def test__set_item__overwrite(self):
- GROUP = "group:test@group.com"
- USER = "user:phred@example.com"
- ALL_USERS = "allUsers"
- MEMBERS = set([ALL_USERS])
- GROUPS = set([GROUP])
- policy = self._make_one()
- policy["first"] = [GROUP]
- policy["second"] = [USER]
- policy["second"] = [ALL_USERS]
- assert policy["second"] == MEMBERS
- assert len(policy) == 2
- assert dict(policy) == {"first": GROUPS, "second": MEMBERS}
-
- def test___setitem___version3(self):
- policy = self._make_one("DEADBEEF", 3)
- with pytest.raises(InvalidOperationException, match=_DICT_ACCESS_MSG):
- policy["role/reader"] = ["user:phred@example.com"]
-
- def test___setitem___with_conditions(self):
- USER = "user:phred@example.com"
- CONDITION = {"expression": "2 > 1"}
- policy = self._make_one("DEADBEEF", 1)
- policy.bindings = [
- {"role": "role/reader", "members": set([USER]), "condition": CONDITION}
- ]
- with pytest.raises(InvalidOperationException, match=_DICT_ACCESS_MSG):
- policy["role/reader"] = ["user:phred@example.com"]
-
- def test___delitem___hit(self):
- policy = self._make_one()
- policy.bindings = [
- {"role": "to/keep", "members": set(["phred@example.com"])},
- {"role": "to/remove", "members": set(["phred@example.com"])},
- ]
- del policy["to/remove"]
- assert len(policy) == 1
- assert dict(policy) == {"to/keep": set(["phred@example.com"])}
-
- def test___delitem___miss(self):
- policy = self._make_one()
- with pytest.raises(KeyError):
- del policy["nonesuch"]
-
- def test___delitem___version3(self):
- policy = self._make_one("DEADBEEF", 3)
- with pytest.raises(InvalidOperationException, match=_DICT_ACCESS_MSG):
- del policy["role/reader"]
-
- def test___delitem___with_conditions(self):
- USER = "user:phred@example.com"
- CONDITION = {"expression": "2 > 1"}
- policy = self._make_one("DEADBEEF", 1)
- policy.bindings = [
- {"role": "role/reader", "members": set([USER]), "condition": CONDITION}
- ]
- with pytest.raises(InvalidOperationException, match=_DICT_ACCESS_MSG):
- del policy["role/reader"]
-
- def test_bindings_property(self):
- USER = "user:phred@example.com"
- CONDITION = {"expression": "2 > 1"}
- policy = self._make_one()
- BINDINGS = [
- {"role": "role/reader", "members": set([USER]), "condition": CONDITION}
- ]
- policy.bindings = BINDINGS
- assert policy.bindings == BINDINGS
-
- def test_owners_getter(self):
- from google.api_core.iam import OWNER_ROLE
-
- MEMBER = "user:phred@example.com"
- expected = frozenset([MEMBER])
- policy = self._make_one()
- policy[OWNER_ROLE] = [MEMBER]
- assert policy.owners == expected
-
- def test_owners_setter(self):
- from google.api_core.iam import OWNER_ROLE
-
- MEMBER = "user:phred@example.com"
- expected = set([MEMBER])
- policy = self._make_one()
-
- with pytest.warns(
- DeprecationWarning, match="Assigning to 'owners' is deprecated."
- ) as warned:
- policy.owners = [MEMBER]
-
- (warning,) = warned
- assert warning.category is DeprecationWarning
- assert policy[OWNER_ROLE] == expected
-
- def test_editors_getter(self):
- from google.api_core.iam import EDITOR_ROLE
-
- MEMBER = "user:phred@example.com"
- expected = frozenset([MEMBER])
- policy = self._make_one()
- policy[EDITOR_ROLE] = [MEMBER]
- assert policy.editors == expected
-
- def test_editors_setter(self):
- from google.api_core.iam import EDITOR_ROLE
-
- MEMBER = "user:phred@example.com"
- expected = set([MEMBER])
- policy = self._make_one()
-
- with pytest.warns(
- DeprecationWarning, match="Assigning to 'editors' is deprecated."
- ) as warned:
- policy.editors = [MEMBER]
-
- (warning,) = warned
- assert warning.category is DeprecationWarning
- assert policy[EDITOR_ROLE] == expected
-
- def test_viewers_getter(self):
- from google.api_core.iam import VIEWER_ROLE
-
- MEMBER = "user:phred@example.com"
- expected = frozenset([MEMBER])
- policy = self._make_one()
- policy[VIEWER_ROLE] = [MEMBER]
- assert policy.viewers == expected
-
- def test_viewers_setter(self):
- from google.api_core.iam import VIEWER_ROLE
-
- MEMBER = "user:phred@example.com"
- expected = set([MEMBER])
- policy = self._make_one()
-
- with pytest.warns(
- DeprecationWarning, match="Assigning to 'viewers' is deprecated."
- ) as warned:
- policy.viewers = [MEMBER]
-
- (warning,) = warned
- assert warning.category is DeprecationWarning
- assert policy[VIEWER_ROLE] == expected
-
- def test_user(self):
- EMAIL = "phred@example.com"
- MEMBER = "user:%s" % (EMAIL,)
- policy = self._make_one()
- assert policy.user(EMAIL) == MEMBER
-
- def test_service_account(self):
- EMAIL = "phred@example.com"
- MEMBER = "serviceAccount:%s" % (EMAIL,)
- policy = self._make_one()
- assert policy.service_account(EMAIL) == MEMBER
-
- def test_group(self):
- EMAIL = "phred@example.com"
- MEMBER = "group:%s" % (EMAIL,)
- policy = self._make_one()
- assert policy.group(EMAIL) == MEMBER
-
- def test_domain(self):
- DOMAIN = "example.com"
- MEMBER = "domain:%s" % (DOMAIN,)
- policy = self._make_one()
- assert policy.domain(DOMAIN) == MEMBER
-
- def test_all_users(self):
- policy = self._make_one()
- assert policy.all_users() == "allUsers"
-
- def test_authenticated_users(self):
- policy = self._make_one()
- assert policy.authenticated_users() == "allAuthenticatedUsers"
-
- def test_from_api_repr_only_etag(self):
- empty = frozenset()
- RESOURCE = {"etag": "ACAB"}
- klass = self._get_target_class()
- policy = klass.from_api_repr(RESOURCE)
- assert policy.etag == "ACAB"
- assert policy.version is None
- assert policy.owners == empty
- assert policy.editors == empty
- assert policy.viewers == empty
- assert dict(policy) == {}
-
- def test_from_api_repr_complete(self):
- from google.api_core.iam import OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE
-
- OWNER1 = "group:cloud-logs@google.com"
- OWNER2 = "user:phred@example.com"
- EDITOR1 = "domain:google.com"
- EDITOR2 = "user:phred@example.com"
- VIEWER1 = "serviceAccount:1234-abcdef@service.example.com"
- VIEWER2 = "user:phred@example.com"
- RESOURCE = {
- "etag": "DEADBEEF",
- "version": 1,
- "bindings": [
- {"role": OWNER_ROLE, "members": [OWNER1, OWNER2]},
- {"role": EDITOR_ROLE, "members": [EDITOR1, EDITOR2]},
- {"role": VIEWER_ROLE, "members": [VIEWER1, VIEWER2]},
- ],
- }
- klass = self._get_target_class()
- policy = klass.from_api_repr(RESOURCE)
- assert policy.etag == "DEADBEEF"
- assert policy.version == 1
- assert policy.owners, frozenset([OWNER1 == OWNER2])
- assert policy.editors, frozenset([EDITOR1 == EDITOR2])
- assert policy.viewers, frozenset([VIEWER1 == VIEWER2])
- assert dict(policy) == {
- OWNER_ROLE: set([OWNER1, OWNER2]),
- EDITOR_ROLE: set([EDITOR1, EDITOR2]),
- VIEWER_ROLE: set([VIEWER1, VIEWER2]),
- }
- assert policy.bindings == [
- {"role": OWNER_ROLE, "members": set([OWNER1, OWNER2])},
- {"role": EDITOR_ROLE, "members": set([EDITOR1, EDITOR2])},
- {"role": VIEWER_ROLE, "members": set([VIEWER1, VIEWER2])},
- ]
-
- def test_from_api_repr_unknown_role(self):
- USER = "user:phred@example.com"
- GROUP = "group:cloud-logs@google.com"
- RESOURCE = {
- "etag": "DEADBEEF",
- "version": 1,
- "bindings": [{"role": "unknown", "members": [USER, GROUP]}],
- }
- klass = self._get_target_class()
- policy = klass.from_api_repr(RESOURCE)
- assert policy.etag == "DEADBEEF"
- assert policy.version == 1
- assert dict(policy), {"unknown": set([GROUP == USER])}
-
- def test_to_api_repr_defaults(self):
- policy = self._make_one()
- assert policy.to_api_repr() == {}
-
- def test_to_api_repr_only_etag(self):
- policy = self._make_one("DEADBEEF")
- assert policy.to_api_repr() == {"etag": "DEADBEEF"}
-
- def test_to_api_repr_binding_wo_members(self):
- policy = self._make_one()
- policy["empty"] = []
- assert policy.to_api_repr() == {}
-
- def test_to_api_repr_binding_w_duplicates(self):
- from google.api_core.iam import OWNER_ROLE
-
- OWNER = "group:cloud-logs@google.com"
- policy = self._make_one()
- with pytest.warns(
- DeprecationWarning, match="Assigning to 'owners' is deprecated."
- ):
- policy.owners = [OWNER, OWNER]
- assert policy.to_api_repr() == {
- "bindings": [{"role": OWNER_ROLE, "members": [OWNER]}]
- }
-
- def test_to_api_repr_full(self):
- import operator
- from google.api_core.iam import OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE
-
- OWNER1 = "group:cloud-logs@google.com"
- OWNER2 = "user:phred@example.com"
- EDITOR1 = "domain:google.com"
- EDITOR2 = "user:phred@example.com"
- VIEWER1 = "serviceAccount:1234-abcdef@service.example.com"
- VIEWER2 = "user:phred@example.com"
- CONDITION = {
- "title": "title",
- "description": "description",
- "expression": "true",
- }
- BINDINGS = [
- {"role": OWNER_ROLE, "members": [OWNER1, OWNER2]},
- {"role": EDITOR_ROLE, "members": [EDITOR1, EDITOR2]},
- {"role": VIEWER_ROLE, "members": [VIEWER1, VIEWER2]},
- {
- "role": VIEWER_ROLE,
- "members": [VIEWER1, VIEWER2],
- "condition": CONDITION,
- },
- ]
- policy = self._make_one("DEADBEEF", 1)
- policy.bindings = BINDINGS
- resource = policy.to_api_repr()
- assert resource["etag"] == "DEADBEEF"
- assert resource["version"] == 1
- key = operator.itemgetter("role")
- assert sorted(resource["bindings"], key=key) == sorted(BINDINGS, key=key)
diff --git a/tests/unit/test_operation.py b/tests/unit/test_operation.py
deleted file mode 100644
index 8068072..0000000
--- a/tests/unit/test_operation.py
+++ /dev/null
@@ -1,327 +0,0 @@
-# Copyright 2017, Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-from unittest import mock
-
-import pytest
-
-try:
- import grpc # noqa: F401
-except ImportError: # pragma: NO COVER
- pytest.skip("No GRPC", allow_module_level=True)
-
-from google.api_core import exceptions
-from google.api_core import operation
-from google.api_core import operations_v1
-from google.api_core import retry
-from google.longrunning import operations_pb2
-from google.protobuf import struct_pb2
-from google.rpc import code_pb2
-from google.rpc import status_pb2
-
-TEST_OPERATION_NAME = "test/operation"
-
-
-def make_operation_proto(
- name=TEST_OPERATION_NAME, metadata=None, response=None, error=None, **kwargs
-):
- operation_proto = operations_pb2.Operation(name=name, **kwargs)
-
- if metadata is not None:
- operation_proto.metadata.Pack(metadata)
-
- if response is not None:
- operation_proto.response.Pack(response)
-
- if error is not None:
- operation_proto.error.CopyFrom(error)
-
- return operation_proto
-
-
-def make_operation_future(client_operations_responses=None):
- if client_operations_responses is None:
- client_operations_responses = [make_operation_proto()]
-
- refresh = mock.Mock(spec=["__call__"], side_effect=client_operations_responses)
- refresh.responses = client_operations_responses
- cancel = mock.Mock(spec=["__call__"])
- operation_future = operation.Operation(
- client_operations_responses[0],
- refresh,
- cancel,
- result_type=struct_pb2.Struct,
- metadata_type=struct_pb2.Struct,
- )
-
- return operation_future, refresh, cancel
-
-
-def test_constructor():
- future, refresh, _ = make_operation_future()
-
- assert future.operation == refresh.responses[0]
- assert future.operation.done is False
- assert future.operation.name == TEST_OPERATION_NAME
- assert future.metadata is None
- assert future.running()
-
-
-def test_metadata():
- expected_metadata = struct_pb2.Struct()
- future, _, _ = make_operation_future(
- [make_operation_proto(metadata=expected_metadata)]
- )
-
- assert future.metadata == expected_metadata
-
-
-def test_cancellation():
- responses = [
- make_operation_proto(),
- # Second response indicates that the operation was cancelled.
- make_operation_proto(
- done=True, error=status_pb2.Status(code=code_pb2.CANCELLED)
- ),
- ]
- future, _, cancel = make_operation_future(responses)
-
- assert future.cancel()
- assert future.cancelled()
- cancel.assert_called_once_with()
-
- # Cancelling twice should have no effect.
- assert not future.cancel()
- cancel.assert_called_once_with()
-
-
-def test_result():
- expected_result = struct_pb2.Struct()
- responses = [
- make_operation_proto(),
- # Second operation response includes the result.
- make_operation_proto(done=True, response=expected_result),
- ]
- future, _, _ = make_operation_future(responses)
-
- result = future.result()
-
- assert result == expected_result
- assert future.done()
-
-
-def test_done_w_retry():
- RETRY_PREDICATE = retry.if_exception_type(exceptions.TooManyRequests)
- test_retry = retry.Retry(predicate=RETRY_PREDICATE)
-
- expected_result = struct_pb2.Struct()
- responses = [
- make_operation_proto(),
- # Second operation response includes the result.
- make_operation_proto(done=True, response=expected_result),
- ]
- future, _, _ = make_operation_future(responses)
- future._refresh = mock.Mock()
-
- future.done(retry=test_retry)
- future._refresh.assert_called_once_with(retry=test_retry)
-
-
-def test_exception():
- expected_exception = status_pb2.Status(message="meep")
- responses = [
- make_operation_proto(),
- # Second operation response includes the error.
- make_operation_proto(done=True, error=expected_exception),
- ]
- future, _, _ = make_operation_future(responses)
-
- exception = future.exception()
-
- assert expected_exception.message in "{!r}".format(exception)
-
-
-def test_exception_with_error_code():
- expected_exception = status_pb2.Status(message="meep", code=5)
- responses = [
- make_operation_proto(),
- # Second operation response includes the error.
- make_operation_proto(done=True, error=expected_exception),
- ]
- future, _, _ = make_operation_future(responses)
-
- exception = future.exception()
-
- assert expected_exception.message in "{!r}".format(exception)
- # Status Code 5 maps to Not Found
- # https://developers.google.com/maps-booking/reference/grpc-api/status_codes
- assert isinstance(exception, exceptions.NotFound)
-
-
-def test_unexpected_result():
- responses = [
- make_operation_proto(),
- # Second operation response is done, but has not error or response.
- make_operation_proto(done=True),
- ]
- future, _, _ = make_operation_future(responses)
-
- exception = future.exception()
-
- assert "Unexpected state" in "{!r}".format(exception)
-
-
-def test__refresh_http():
- json_response = {"name": TEST_OPERATION_NAME, "done": True}
- api_request = mock.Mock(return_value=json_response)
-
- result = operation._refresh_http(api_request, TEST_OPERATION_NAME)
-
- assert isinstance(result, operations_pb2.Operation)
- assert result.name == TEST_OPERATION_NAME
- assert result.done is True
-
- api_request.assert_called_once_with(
- method="GET", path="operations/{}".format(TEST_OPERATION_NAME)
- )
-
-
-def test__refresh_http_w_retry():
- json_response = {"name": TEST_OPERATION_NAME, "done": True}
- api_request = mock.Mock()
- retry = mock.Mock()
- retry.return_value.return_value = json_response
-
- result = operation._refresh_http(api_request, TEST_OPERATION_NAME, retry=retry)
-
- assert isinstance(result, operations_pb2.Operation)
- assert result.name == TEST_OPERATION_NAME
- assert result.done is True
-
- api_request.assert_not_called()
- retry.assert_called_once_with(api_request)
- retry.return_value.assert_called_once_with(
- method="GET", path="operations/{}".format(TEST_OPERATION_NAME)
- )
-
-
-def test__cancel_http():
- api_request = mock.Mock()
-
- operation._cancel_http(api_request, TEST_OPERATION_NAME)
-
- api_request.assert_called_once_with(
- method="POST", path="operations/{}:cancel".format(TEST_OPERATION_NAME)
- )
-
-
-def test_from_http_json():
- operation_json = {"name": TEST_OPERATION_NAME, "done": True}
- api_request = mock.sentinel.api_request
-
- future = operation.from_http_json(
- operation_json, api_request, struct_pb2.Struct, metadata_type=struct_pb2.Struct
- )
-
- assert future._result_type == struct_pb2.Struct
- assert future._metadata_type == struct_pb2.Struct
- assert future.operation.name == TEST_OPERATION_NAME
- assert future.done
-
-
-def test__refresh_grpc():
- operations_stub = mock.Mock(spec=["GetOperation"])
- expected_result = make_operation_proto(done=True)
- operations_stub.GetOperation.return_value = expected_result
-
- result = operation._refresh_grpc(operations_stub, TEST_OPERATION_NAME)
-
- assert result == expected_result
- expected_request = operations_pb2.GetOperationRequest(name=TEST_OPERATION_NAME)
- operations_stub.GetOperation.assert_called_once_with(expected_request)
-
-
-def test__refresh_grpc_w_retry():
- operations_stub = mock.Mock(spec=["GetOperation"])
- expected_result = make_operation_proto(done=True)
- retry = mock.Mock()
- retry.return_value.return_value = expected_result
-
- result = operation._refresh_grpc(operations_stub, TEST_OPERATION_NAME, retry=retry)
-
- assert result == expected_result
- expected_request = operations_pb2.GetOperationRequest(name=TEST_OPERATION_NAME)
- operations_stub.GetOperation.assert_not_called()
- retry.assert_called_once_with(operations_stub.GetOperation)
- retry.return_value.assert_called_once_with(expected_request)
-
-
-def test__cancel_grpc():
- operations_stub = mock.Mock(spec=["CancelOperation"])
-
- operation._cancel_grpc(operations_stub, TEST_OPERATION_NAME)
-
- expected_request = operations_pb2.CancelOperationRequest(name=TEST_OPERATION_NAME)
- operations_stub.CancelOperation.assert_called_once_with(expected_request)
-
-
-def test_from_grpc():
- operation_proto = make_operation_proto(done=True)
- operations_stub = mock.sentinel.operations_stub
-
- future = operation.from_grpc(
- operation_proto,
- operations_stub,
- struct_pb2.Struct,
- metadata_type=struct_pb2.Struct,
- grpc_metadata=[("x-goog-request-params", "foo")],
- )
-
- assert future._result_type == struct_pb2.Struct
- assert future._metadata_type == struct_pb2.Struct
- assert future.operation.name == TEST_OPERATION_NAME
- assert future.done
- assert future._refresh.keywords["metadata"] == [("x-goog-request-params", "foo")]
- assert future._cancel.keywords["metadata"] == [("x-goog-request-params", "foo")]
-
-
-def test_from_gapic():
- operation_proto = make_operation_proto(done=True)
- operations_client = mock.create_autospec(
- operations_v1.OperationsClient, instance=True
- )
-
- future = operation.from_gapic(
- operation_proto,
- operations_client,
- struct_pb2.Struct,
- metadata_type=struct_pb2.Struct,
- grpc_metadata=[("x-goog-request-params", "foo")],
- )
-
- assert future._result_type == struct_pb2.Struct
- assert future._metadata_type == struct_pb2.Struct
- assert future.operation.name == TEST_OPERATION_NAME
- assert future.done
- assert future._refresh.keywords["metadata"] == [("x-goog-request-params", "foo")]
- assert future._cancel.keywords["metadata"] == [("x-goog-request-params", "foo")]
-
-
-def test_deserialize():
- op = make_operation_proto(name="foobarbaz")
- serialized = op.SerializeToString()
- deserialized_op = operation.Operation.deserialize(serialized)
- assert op.name == deserialized_op.name
- assert type(op) is type(deserialized_op)
diff --git a/tests/unit/test_packaging.py b/tests/unit/test_packaging.py
deleted file mode 100644
index 8100a49..0000000
--- a/tests/unit/test_packaging.py
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2023 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import subprocess
-import sys
-
-
-def test_namespace_package_compat(tmp_path):
- # The ``google`` namespace package should not be masked
- # by the presence of ``google-api-core``.
- google = tmp_path / "google"
- google.mkdir()
- google.joinpath("othermod.py").write_text("")
- env = dict(os.environ, PYTHONPATH=str(tmp_path))
- cmd = [sys.executable, "-m", "google.othermod"]
- subprocess.check_call(cmd, env=env)
diff --git a/tests/unit/test_page_iterator.py b/tests/unit/test_page_iterator.py
deleted file mode 100644
index ba0fbba..0000000
--- a/tests/unit/test_page_iterator.py
+++ /dev/null
@@ -1,665 +0,0 @@
-# Copyright 2015 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import math
-import types
-from unittest import mock
-
-import pytest
-
-from google.api_core import page_iterator
-
-
-def test__do_nothing_page_start():
- assert page_iterator._do_nothing_page_start(None, None, None) is None
-
-
-class TestPage(object):
- def test_constructor(self):
- parent = mock.sentinel.parent
- item_to_value = mock.sentinel.item_to_value
-
- page = page_iterator.Page(parent, (1, 2, 3), item_to_value)
-
- assert page.num_items == 3
- assert page.remaining == 3
- assert page._parent is parent
- assert page._item_to_value is item_to_value
- assert page.raw_page is None
-
- def test___iter__(self):
- page = page_iterator.Page(None, (), None, None)
- assert iter(page) is page
-
- def test_iterator_calls_parent_item_to_value(self):
- parent = mock.sentinel.parent
-
- item_to_value = mock.Mock(
- side_effect=lambda iterator, value: value, spec=["__call__"]
- )
-
- page = page_iterator.Page(parent, (10, 11, 12), item_to_value)
- page._remaining = 100
-
- assert item_to_value.call_count == 0
- assert page.remaining == 100
-
- assert next(page) == 10
- assert item_to_value.call_count == 1
- item_to_value.assert_called_with(parent, 10)
- assert page.remaining == 99
-
- assert next(page) == 11
- assert item_to_value.call_count == 2
- item_to_value.assert_called_with(parent, 11)
- assert page.remaining == 98
-
- assert next(page) == 12
- assert item_to_value.call_count == 3
- item_to_value.assert_called_with(parent, 12)
- assert page.remaining == 97
-
- def test_raw_page(self):
- parent = mock.sentinel.parent
- item_to_value = mock.sentinel.item_to_value
-
- raw_page = mock.sentinel.raw_page
-
- page = page_iterator.Page(parent, (1, 2, 3), item_to_value, raw_page=raw_page)
- assert page.raw_page is raw_page
-
- with pytest.raises(AttributeError):
- page.raw_page = None
-
-
-class PageIteratorImpl(page_iterator.Iterator):
- def _next_page(self):
- return mock.create_autospec(page_iterator.Page, instance=True)
-
-
-class TestIterator(object):
- def test_constructor(self):
- client = mock.sentinel.client
- item_to_value = mock.sentinel.item_to_value
- token = "ab13nceor03"
- max_results = 1337
-
- iterator = PageIteratorImpl(
- client, item_to_value, page_token=token, max_results=max_results
- )
-
- assert not iterator._started
- assert iterator.client is client
- assert iterator.item_to_value == item_to_value
- assert iterator.max_results == max_results
- # Changing attributes.
- assert iterator.page_number == 0
- assert iterator.next_page_token == token
- assert iterator.num_results == 0
-
- def test_next(self):
- iterator = PageIteratorImpl(None, None)
- page_1 = page_iterator.Page(
- iterator, ("item 1.1", "item 1.2"), page_iterator._item_to_value_identity
- )
- page_2 = page_iterator.Page(
- iterator, ("item 2.1",), page_iterator._item_to_value_identity
- )
- iterator._next_page = mock.Mock(side_effect=[page_1, page_2, None])
-
- result = next(iterator)
- assert result == "item 1.1"
- result = next(iterator)
- assert result == "item 1.2"
- result = next(iterator)
- assert result == "item 2.1"
-
- with pytest.raises(StopIteration):
- next(iterator)
-
- def test_pages_property_starts(self):
- iterator = PageIteratorImpl(None, None)
-
- assert not iterator._started
-
- assert isinstance(iterator.pages, types.GeneratorType)
-
- assert iterator._started
-
- def test_pages_property_restart(self):
- iterator = PageIteratorImpl(None, None)
-
- assert iterator.pages
-
- # Make sure we cannot restart.
- with pytest.raises(ValueError):
- assert iterator.pages
-
- def test__page_iter_increment(self):
- iterator = PageIteratorImpl(None, None)
- page = page_iterator.Page(
- iterator, ("item",), page_iterator._item_to_value_identity
- )
- iterator._next_page = mock.Mock(side_effect=[page, None])
-
- assert iterator.num_results == 0
-
- page_iter = iterator._page_iter(increment=True)
- next(page_iter)
-
- assert iterator.num_results == 1
-
- def test__page_iter_no_increment(self):
- iterator = PageIteratorImpl(None, None)
-
- assert iterator.num_results == 0
-
- page_iter = iterator._page_iter(increment=False)
- next(page_iter)
-
- # results should still be 0 after fetching a page.
- assert iterator.num_results == 0
-
- def test__items_iter(self):
- # Items to be returned.
- item1 = 17
- item2 = 100
- item3 = 211
-
- # Make pages from mock responses
- parent = mock.sentinel.parent
- page1 = page_iterator.Page(
- parent, (item1, item2), page_iterator._item_to_value_identity
- )
- page2 = page_iterator.Page(
- parent, (item3,), page_iterator._item_to_value_identity
- )
-
- iterator = PageIteratorImpl(None, None)
- iterator._next_page = mock.Mock(side_effect=[page1, page2, None])
-
- items_iter = iterator._items_iter()
-
- assert isinstance(items_iter, types.GeneratorType)
-
- # Consume items and check the state of the iterator.
- assert iterator.num_results == 0
-
- assert next(items_iter) == item1
- assert iterator.num_results == 1
-
- assert next(items_iter) == item2
- assert iterator.num_results == 2
-
- assert next(items_iter) == item3
- assert iterator.num_results == 3
-
- with pytest.raises(StopIteration):
- next(items_iter)
-
- def test___iter__(self):
- iterator = PageIteratorImpl(None, None)
- iterator._next_page = mock.Mock(side_effect=[(1, 2), (3,), None])
-
- assert not iterator._started
-
- result = list(iterator)
-
- assert result == [1, 2, 3]
- assert iterator._started
-
- def test___iter__restart(self):
- iterator = PageIteratorImpl(None, None)
-
- iter(iterator)
-
- # Make sure we cannot restart.
- with pytest.raises(ValueError):
- iter(iterator)
-
- def test___iter___restart_after_page(self):
- iterator = PageIteratorImpl(None, None)
-
- assert iterator.pages
-
- # Make sure we cannot restart after starting the page iterator
- with pytest.raises(ValueError):
- iter(iterator)
-
-
-class TestHTTPIterator(object):
- def test_constructor(self):
- client = mock.sentinel.client
- path = "/foo"
- iterator = page_iterator.HTTPIterator(
- client, mock.sentinel.api_request, path, mock.sentinel.item_to_value
- )
-
- assert not iterator._started
- assert iterator.client is client
- assert iterator.path == path
- assert iterator.item_to_value is mock.sentinel.item_to_value
- assert iterator._items_key == "items"
- assert iterator.max_results is None
- assert iterator.extra_params == {}
- assert iterator._page_start == page_iterator._do_nothing_page_start
- # Changing attributes.
- assert iterator.page_number == 0
- assert iterator.next_page_token is None
- assert iterator.num_results == 0
- assert iterator._page_size is None
-
- def test_constructor_w_extra_param_collision(self):
- extra_params = {"pageToken": "val"}
-
- with pytest.raises(ValueError):
- page_iterator.HTTPIterator(
- mock.sentinel.client,
- mock.sentinel.api_request,
- mock.sentinel.path,
- mock.sentinel.item_to_value,
- extra_params=extra_params,
- )
-
- def test_iterate(self):
- path = "/foo"
- item1 = {"name": "1"}
- item2 = {"name": "2"}
- api_request = mock.Mock(return_value={"items": [item1, item2]})
- iterator = page_iterator.HTTPIterator(
- mock.sentinel.client,
- api_request,
- path=path,
- item_to_value=page_iterator._item_to_value_identity,
- )
-
- assert iterator.num_results == 0
-
- items_iter = iter(iterator)
-
- val1 = next(items_iter)
- assert val1 == item1
- assert iterator.num_results == 1
-
- val2 = next(items_iter)
- assert val2 == item2
- assert iterator.num_results == 2
-
- with pytest.raises(StopIteration):
- next(items_iter)
-
- api_request.assert_called_once_with(method="GET", path=path, query_params={})
-
- def test__has_next_page_new(self):
- iterator = page_iterator.HTTPIterator(
- mock.sentinel.client,
- mock.sentinel.api_request,
- mock.sentinel.path,
- mock.sentinel.item_to_value,
- )
-
- # The iterator should *always* indicate that it has a next page
- # when created so that it can fetch the initial page.
- assert iterator._has_next_page()
-
- def test__has_next_page_without_token(self):
- iterator = page_iterator.HTTPIterator(
- mock.sentinel.client,
- mock.sentinel.api_request,
- mock.sentinel.path,
- mock.sentinel.item_to_value,
- )
-
- iterator.page_number = 1
-
- # The iterator should not indicate that it has a new page if the
- # initial page has been requested and there's no page token.
- assert not iterator._has_next_page()
-
- def test__has_next_page_w_number_w_token(self):
- iterator = page_iterator.HTTPIterator(
- mock.sentinel.client,
- mock.sentinel.api_request,
- mock.sentinel.path,
- mock.sentinel.item_to_value,
- )
-
- iterator.page_number = 1
- iterator.next_page_token = mock.sentinel.token
-
- # The iterator should indicate that it has a new page if the
- # initial page has been requested and there's is a page token.
- assert iterator._has_next_page()
-
- def test__has_next_page_w_max_results_not_done(self):
- iterator = page_iterator.HTTPIterator(
- mock.sentinel.client,
- mock.sentinel.api_request,
- mock.sentinel.path,
- mock.sentinel.item_to_value,
- max_results=3,
- page_token=mock.sentinel.token,
- )
-
- iterator.page_number = 1
-
- # The iterator should indicate that it has a new page if there
- # is a page token and it has not consumed more than max_results.
- assert iterator.num_results < iterator.max_results
- assert iterator._has_next_page()
-
- def test__has_next_page_w_max_results_done(self):
- iterator = page_iterator.HTTPIterator(
- mock.sentinel.client,
- mock.sentinel.api_request,
- mock.sentinel.path,
- mock.sentinel.item_to_value,
- max_results=3,
- page_token=mock.sentinel.token,
- )
-
- iterator.page_number = 1
- iterator.num_results = 3
-
- # The iterator should not indicate that it has a new page if there
- # if it has consumed more than max_results.
- assert iterator.num_results == iterator.max_results
- assert not iterator._has_next_page()
-
- def test__get_query_params_no_token(self):
- iterator = page_iterator.HTTPIterator(
- mock.sentinel.client,
- mock.sentinel.api_request,
- mock.sentinel.path,
- mock.sentinel.item_to_value,
- )
-
- assert iterator._get_query_params() == {}
-
- def test__get_query_params_w_token(self):
- iterator = page_iterator.HTTPIterator(
- mock.sentinel.client,
- mock.sentinel.api_request,
- mock.sentinel.path,
- mock.sentinel.item_to_value,
- )
- iterator.next_page_token = "token"
-
- assert iterator._get_query_params() == {"pageToken": iterator.next_page_token}
-
- def test__get_query_params_w_max_results(self):
- max_results = 3
- iterator = page_iterator.HTTPIterator(
- mock.sentinel.client,
- mock.sentinel.api_request,
- mock.sentinel.path,
- mock.sentinel.item_to_value,
- max_results=max_results,
- )
-
- iterator.num_results = 1
- local_max = max_results - iterator.num_results
-
- assert iterator._get_query_params() == {"maxResults": local_max}
-
- def test__get_query_params_extra_params(self):
- extra_params = {"key": "val"}
- iterator = page_iterator.HTTPIterator(
- mock.sentinel.client,
- mock.sentinel.api_request,
- mock.sentinel.path,
- mock.sentinel.item_to_value,
- extra_params=extra_params,
- )
-
- assert iterator._get_query_params() == extra_params
-
- def test__get_next_page_response_with_post(self):
- path = "/foo"
- page_response = {"items": ["one", "two"]}
- api_request = mock.Mock(return_value=page_response)
- iterator = page_iterator.HTTPIterator(
- mock.sentinel.client,
- api_request,
- path=path,
- item_to_value=page_iterator._item_to_value_identity,
- )
- iterator._HTTP_METHOD = "POST"
-
- response = iterator._get_next_page_response()
-
- assert response == page_response
-
- api_request.assert_called_once_with(method="POST", path=path, data={})
-
- def test__get_next_page_bad_http_method(self):
- iterator = page_iterator.HTTPIterator(
- mock.sentinel.client,
- mock.sentinel.api_request,
- mock.sentinel.path,
- mock.sentinel.item_to_value,
- )
- iterator._HTTP_METHOD = "NOT-A-VERB"
-
- with pytest.raises(ValueError):
- iterator._get_next_page_response()
-
- @pytest.mark.parametrize(
- "page_size,max_results,pages",
- [(3, None, False), (3, 8, False), (3, None, True), (3, 8, True)],
- )
- def test_page_size_items(self, page_size, max_results, pages):
- path = "/foo"
- NITEMS = 10
-
- n = [0] # blast you python 2!
-
- def api_request(*args, **kw):
- assert not args
- query_params = dict(
- maxResults=(
- page_size
- if max_results is None
- else min(page_size, max_results - n[0])
- )
- )
- if n[0]:
- query_params.update(pageToken="test")
- assert kw == {"method": "GET", "path": "/foo", "query_params": query_params}
- n_items = min(kw["query_params"]["maxResults"], NITEMS - n[0])
- items = [dict(name=str(i + n[0])) for i in range(n_items)]
- n[0] += n_items
- result = dict(items=items)
- if n[0] < NITEMS:
- result.update(nextPageToken="test")
- return result
-
- iterator = page_iterator.HTTPIterator(
- mock.sentinel.client,
- api_request,
- path=path,
- item_to_value=page_iterator._item_to_value_identity,
- page_size=page_size,
- max_results=max_results,
- )
-
- assert iterator.num_results == 0
-
- n_results = max_results if max_results is not None else NITEMS
- if pages:
- items_iter = iter(iterator.pages)
- npages = int(math.ceil(float(n_results) / page_size))
- for ipage in range(npages):
- assert list(next(items_iter)) == [
- dict(name=str(i))
- for i in range(
- ipage * page_size,
- min((ipage + 1) * page_size, n_results),
- )
- ]
- else:
- items_iter = iter(iterator)
- for i in range(n_results):
- assert next(items_iter) == dict(name=str(i))
- assert iterator.num_results == i + 1
-
- with pytest.raises(StopIteration):
- next(items_iter)
-
-
-class TestGRPCIterator(object):
- def test_constructor(self):
- client = mock.sentinel.client
- items_field = "items"
- iterator = page_iterator.GRPCIterator(
- client, mock.sentinel.method, mock.sentinel.request, items_field
- )
-
- assert not iterator._started
- assert iterator.client is client
- assert iterator.max_results is None
- assert iterator.item_to_value is page_iterator._item_to_value_identity
- assert iterator._method == mock.sentinel.method
- assert iterator._request == mock.sentinel.request
- assert iterator._items_field == items_field
- assert (
- iterator._request_token_field
- == page_iterator.GRPCIterator._DEFAULT_REQUEST_TOKEN_FIELD
- )
- assert (
- iterator._response_token_field
- == page_iterator.GRPCIterator._DEFAULT_RESPONSE_TOKEN_FIELD
- )
- # Changing attributes.
- assert iterator.page_number == 0
- assert iterator.next_page_token is None
- assert iterator.num_results == 0
-
- def test_constructor_options(self):
- client = mock.sentinel.client
- items_field = "items"
- request_field = "request"
- response_field = "response"
- iterator = page_iterator.GRPCIterator(
- client,
- mock.sentinel.method,
- mock.sentinel.request,
- items_field,
- item_to_value=mock.sentinel.item_to_value,
- request_token_field=request_field,
- response_token_field=response_field,
- max_results=42,
- )
-
- assert iterator.client is client
- assert iterator.max_results == 42
- assert iterator.item_to_value is mock.sentinel.item_to_value
- assert iterator._method == mock.sentinel.method
- assert iterator._request == mock.sentinel.request
- assert iterator._items_field == items_field
- assert iterator._request_token_field == request_field
- assert iterator._response_token_field == response_field
-
- def test_iterate(self):
- request = mock.Mock(spec=["page_token"], page_token=None)
- response1 = mock.Mock(items=["a", "b"], next_page_token="1")
- response2 = mock.Mock(items=["c"], next_page_token="2")
- response3 = mock.Mock(items=["d"], next_page_token="")
- method = mock.Mock(side_effect=[response1, response2, response3])
- iterator = page_iterator.GRPCIterator(
- mock.sentinel.client, method, request, "items"
- )
-
- assert iterator.num_results == 0
-
- items = list(iterator)
- assert items == ["a", "b", "c", "d"]
-
- method.assert_called_with(request)
- assert method.call_count == 3
- assert request.page_token == "2"
-
- def test_iterate_with_max_results(self):
- request = mock.Mock(spec=["page_token"], page_token=None)
- response1 = mock.Mock(items=["a", "b"], next_page_token="1")
- response2 = mock.Mock(items=["c"], next_page_token="2")
- response3 = mock.Mock(items=["d"], next_page_token="")
- method = mock.Mock(side_effect=[response1, response2, response3])
- iterator = page_iterator.GRPCIterator(
- mock.sentinel.client, method, request, "items", max_results=3
- )
-
- assert iterator.num_results == 0
-
- items = list(iterator)
-
- assert items == ["a", "b", "c"]
- assert iterator.num_results == 3
-
- method.assert_called_with(request)
- assert method.call_count == 2
- assert request.page_token == "1"
-
-
-class GAXPageIterator(object):
- """Fake object that matches gax.PageIterator"""
-
- def __init__(self, pages, page_token=None):
- self._pages = iter(pages)
- self.page_token = page_token
-
- def next(self):
- return next(self._pages)
-
- __next__ = next
-
-
-class TestGAXIterator(object):
- def test_constructor(self):
- client = mock.sentinel.client
- token = "zzzyy78kl"
- page_iter = GAXPageIterator((), page_token=token)
- item_to_value = page_iterator._item_to_value_identity
- max_results = 1337
- iterator = page_iterator._GAXIterator(
- client, page_iter, item_to_value, max_results=max_results
- )
-
- assert not iterator._started
- assert iterator.client is client
- assert iterator.item_to_value is item_to_value
- assert iterator.max_results == max_results
- assert iterator._gax_page_iter is page_iter
- # Changing attributes.
- assert iterator.page_number == 0
- assert iterator.next_page_token == token
- assert iterator.num_results == 0
-
- def test__next_page(self):
- page_items = (29, 31)
- page_token = "2sde98ds2s0hh"
- page_iter = GAXPageIterator([page_items], page_token=page_token)
- iterator = page_iterator._GAXIterator(
- mock.sentinel.client, page_iter, page_iterator._item_to_value_identity
- )
-
- page = iterator._next_page()
-
- assert iterator.next_page_token == page_token
- assert isinstance(page, page_iterator.Page)
- assert list(page) == list(page_items)
-
- next_page = iterator._next_page()
-
- assert next_page is None
diff --git a/tests/unit/test_path_template.py b/tests/unit/test_path_template.py
deleted file mode 100644
index c34dd0f..0000000
--- a/tests/unit/test_path_template.py
+++ /dev/null
@@ -1,652 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import unicode_literals
-from unittest import mock
-
-import pytest
-
-from google.api import auth_pb2
-from google.api_core import path_template
-
-
-@pytest.mark.parametrize(
- "tmpl, args, kwargs, expected_result",
- [
- # Basic positional params
- ["/v1/*", ["a"], {}, "/v1/a"],
- ["/v1/**", ["a/b"], {}, "/v1/a/b"],
- ["/v1/*/*", ["a", "b"], {}, "/v1/a/b"],
- ["/v1/*/*/**", ["a", "b", "c/d"], {}, "/v1/a/b/c/d"],
- # Basic named params
- ["/v1/{name}", [], {"name": "parent"}, "/v1/parent"],
- ["/v1/{name=**}", [], {"name": "parent/child"}, "/v1/parent/child"],
- # Named params with a sub-template
- ["/v1/{name=parent/*}", [], {"name": "parent/child"}, "/v1/parent/child"],
- [
- "/v1/{name=parent/**}",
- [],
- {"name": "parent/child/object"},
- "/v1/parent/child/object",
- ],
- # Combining positional and named params
- ["/v1/*/{name}", ["a"], {"name": "parent"}, "/v1/a/parent"],
- ["/v1/{name}/*", ["a"], {"name": "parent"}, "/v1/parent/a"],
- [
- "/v1/{parent}/*/{child}/*",
- ["a", "b"],
- {"parent": "thor", "child": "thorson"},
- "/v1/thor/a/thorson/b",
- ],
- ["/v1/{name}/**", ["a/b"], {"name": "parent"}, "/v1/parent/a/b"],
- # Combining positional and named params with sub-templates.
- [
- "/v1/{name=parent/*}/*",
- ["a"],
- {"name": "parent/child"},
- "/v1/parent/child/a",
- ],
- [
- "/v1/*/{name=parent/**}",
- ["a"],
- {"name": "parent/child/object"},
- "/v1/a/parent/child/object",
- ],
- ],
-)
-def test_expand_success(tmpl, args, kwargs, expected_result):
- result = path_template.expand(tmpl, *args, **kwargs)
- assert result == expected_result
- assert path_template.validate(tmpl, result)
-
-
-@pytest.mark.parametrize(
- "tmpl, args, kwargs, exc_match",
- [
- # Missing positional arg.
- ["v1/*", [], {}, "Positional"],
- # Missing named arg.
- ["v1/{name}", [], {}, "Named"],
- ],
-)
-def test_expanded_failure(tmpl, args, kwargs, exc_match):
- with pytest.raises(ValueError, match=exc_match):
- path_template.expand(tmpl, *args, **kwargs)
-
-
-@pytest.mark.parametrize(
- "request_obj, field, expected_result",
- [
- [{"field": "stringValue"}, "field", "stringValue"],
- [{"field": "stringValue"}, "nosuchfield", None],
- [{"field": "stringValue"}, "field.subfield", None],
- [{"field": {"subfield": "stringValue"}}, "field", None],
- [{"field": {"subfield": "stringValue"}}, "field.subfield", "stringValue"],
- [{"field": {"subfield": [1, 2, 3]}}, "field.subfield", [1, 2, 3]],
- [{"field": {"subfield": "stringValue"}}, "field", None],
- [{"field": {"subfield": "stringValue"}}, "field.nosuchfield", None],
- [
- {"field": {"subfield": {"subsubfield": "stringValue"}}},
- "field.subfield.subsubfield",
- "stringValue",
- ],
- ["string", "field", None],
- ],
-)
-def test_get_field(request_obj, field, expected_result):
- result = path_template.get_field(request_obj, field)
- assert result == expected_result
-
-
-@pytest.mark.parametrize(
- "request_obj, field, expected_result",
- [
- [{"field": "stringValue"}, "field", {}],
- [{"field": "stringValue"}, "nosuchfield", {"field": "stringValue"}],
- [{"field": "stringValue"}, "field.subfield", {"field": "stringValue"}],
- [{"field": {"subfield": "stringValue"}}, "field.subfield", {"field": {}}],
- [
- {"field": {"subfield": "stringValue", "q": "w"}, "e": "f"},
- "field.subfield",
- {"field": {"q": "w"}, "e": "f"},
- ],
- [
- {"field": {"subfield": "stringValue"}},
- "field.nosuchfield",
- {"field": {"subfield": "stringValue"}},
- ],
- [
- {"field": {"subfield": {"subsubfield": "stringValue", "q": "w"}}},
- "field.subfield.subsubfield",
- {"field": {"subfield": {"q": "w"}}},
- ],
- ["string", "field", "string"],
- ["string", "field.subfield", "string"],
- ],
-)
-def test_delete_field(request_obj, field, expected_result):
- path_template.delete_field(request_obj, field)
- assert request_obj == expected_result
-
-
-@pytest.mark.parametrize(
- "tmpl, path",
- [
- # Single segment template, but multi segment value
- ["v1/*", "v1/a/b"],
- ["v1/*/*", "v1/a/b/c"],
- # Single segement named template, but multi segment value
- ["v1/{name}", "v1/a/b"],
- ["v1/{name}/{value}", "v1/a/b/c"],
- # Named value with a sub-template but invalid value
- ["v1/{name=parent/*}", "v1/grandparent/child"],
- ],
-)
-def test_validate_failure(tmpl, path):
- assert not path_template.validate(tmpl, path)
-
-
-def test__expand_variable_match_unexpected():
- match = mock.Mock(spec=["group"])
- match.group.return_value = None
- with pytest.raises(ValueError, match="Unknown"):
- path_template._expand_variable_match([], {}, match)
-
-
-def test__replace_variable_with_pattern():
- match = mock.Mock(spec=["group"])
- match.group.return_value = None
- with pytest.raises(ValueError, match="Unknown"):
- path_template._replace_variable_with_pattern(match)
-
-
-@pytest.mark.parametrize(
- "http_options, message, request_kwargs, expected_result",
- [
- [
- [["get", "/v1/no/template", ""]],
- None,
- {"foo": "bar"},
- ["get", "/v1/no/template", {}, {"foo": "bar"}],
- ],
- [
- [["get", "/v1/no/template", ""]],
- auth_pb2.AuthenticationRule(selector="bar"),
- {},
- [
- "get",
- "/v1/no/template",
- None,
- auth_pb2.AuthenticationRule(selector="bar"),
- ],
- ],
- # Single templates
- [
- [["get", "/v1/{field}", ""]],
- None,
- {"field": "parent"},
- ["get", "/v1/parent", {}, {}],
- ],
- [
- [["get", "/v1/{selector}", ""]],
- auth_pb2.AuthenticationRule(selector="parent"),
- {},
- ["get", "/v1/parent", None, auth_pb2.AuthenticationRule()],
- ],
- [
- [["get", "/v1/{field.sub}", ""]],
- None,
- {"field": {"sub": "parent"}, "foo": "bar"},
- ["get", "/v1/parent", {}, {"field": {}, "foo": "bar"}],
- ],
- [
- [["get", "/v1/{oauth.canonical_scopes}", ""]],
- auth_pb2.AuthenticationRule(
- selector="bar",
- oauth=auth_pb2.OAuthRequirements(canonical_scopes="parent"),
- ),
- {},
- [
- "get",
- "/v1/parent",
- None,
- auth_pb2.AuthenticationRule(
- selector="bar", oauth=auth_pb2.OAuthRequirements()
- ),
- ],
- ],
- ],
-)
-def test_transcode_base_case(http_options, message, request_kwargs, expected_result):
- http_options, expected_result = helper_test_transcode(http_options, expected_result)
- result = path_template.transcode(http_options, message, **request_kwargs)
- assert result == expected_result
-
-
-@pytest.mark.parametrize(
- "http_options, message, request_kwargs, expected_result",
- [
- [
- [["get", "/v1/{field.subfield}", ""]],
- None,
- {"field": {"subfield": "parent"}, "foo": "bar"},
- ["get", "/v1/parent", {}, {"field": {}, "foo": "bar"}],
- ],
- [
- [["get", "/v1/{oauth.canonical_scopes}", ""]],
- auth_pb2.AuthenticationRule(
- selector="bar",
- oauth=auth_pb2.OAuthRequirements(canonical_scopes="parent"),
- ),
- {},
- [
- "get",
- "/v1/parent",
- None,
- auth_pb2.AuthenticationRule(
- selector="bar", oauth=auth_pb2.OAuthRequirements()
- ),
- ],
- ],
- [
- [["get", "/v1/{field.subfield.subsubfield}", ""]],
- None,
- {"field": {"subfield": {"subsubfield": "parent"}}, "foo": "bar"},
- ["get", "/v1/parent", {}, {"field": {"subfield": {}}, "foo": "bar"}],
- ],
- [
- [["get", "/v1/{field.subfield1}/{field.subfield2}", ""]],
- None,
- {"field": {"subfield1": "parent", "subfield2": "child"}, "foo": "bar"},
- ["get", "/v1/parent/child", {}, {"field": {}, "foo": "bar"}],
- ],
- [
- [["get", "/v1/{selector}/{oauth.canonical_scopes}", ""]],
- auth_pb2.AuthenticationRule(
- selector="parent",
- oauth=auth_pb2.OAuthRequirements(canonical_scopes="child"),
- ),
- {"field": {"subfield1": "parent", "subfield2": "child"}, "foo": "bar"},
- [
- "get",
- "/v1/parent/child",
- None,
- auth_pb2.AuthenticationRule(oauth=auth_pb2.OAuthRequirements()),
- ],
- ],
- ],
-)
-def test_transcode_subfields(http_options, message, request_kwargs, expected_result):
- http_options, expected_result = helper_test_transcode(http_options, expected_result)
- result = path_template.transcode(http_options, message, **request_kwargs)
- assert result == expected_result
-
-
-@pytest.mark.parametrize(
- "http_options, message, request_kwargs, expected_result",
- [
- # Single segment wildcard
- [
- [["get", "/v1/{field=*}", ""]],
- None,
- {"field": "parent"},
- ["get", "/v1/parent", {}, {}],
- ],
- [
- [["get", "/v1/{selector=*}", ""]],
- auth_pb2.AuthenticationRule(selector="parent"),
- {},
- ["get", "/v1/parent", None, auth_pb2.AuthenticationRule()],
- ],
- [
- [["get", "/v1/{field=a/*/b/*}", ""]],
- None,
- {"field": "a/parent/b/child", "foo": "bar"},
- ["get", "/v1/a/parent/b/child", {}, {"foo": "bar"}],
- ],
- [
- [["get", "/v1/{selector=a/*/b/*}", ""]],
- auth_pb2.AuthenticationRule(
- selector="a/parent/b/child", allow_without_credential=True
- ),
- {},
- [
- "get",
- "/v1/a/parent/b/child",
- None,
- auth_pb2.AuthenticationRule(allow_without_credential=True),
- ],
- ],
- # Double segment wildcard
- [
- [["get", "/v1/{field=**}", ""]],
- None,
- {"field": "parent/p1"},
- ["get", "/v1/parent/p1", {}, {}],
- ],
- [
- [["get", "/v1/{selector=**}", ""]],
- auth_pb2.AuthenticationRule(selector="parent/p1"),
- {},
- ["get", "/v1/parent/p1", None, auth_pb2.AuthenticationRule()],
- ],
- [
- [["get", "/v1/{field=a/**/b/**}", ""]],
- None,
- {"field": "a/parent/p1/b/child/c1", "foo": "bar"},
- ["get", "/v1/a/parent/p1/b/child/c1", {}, {"foo": "bar"}],
- ],
- [
- [["get", "/v1/{selector=a/**/b/**}", ""]],
- auth_pb2.AuthenticationRule(
- selector="a/parent/p1/b/child/c1", allow_without_credential=True
- ),
- {},
- [
- "get",
- "/v1/a/parent/p1/b/child/c1",
- None,
- auth_pb2.AuthenticationRule(allow_without_credential=True),
- ],
- ],
- # Combined single and double segment wildcard
- [
- [["get", "/v1/{field=a/*/b/**}", ""]],
- None,
- {"field": "a/parent/b/child/c1"},
- ["get", "/v1/a/parent/b/child/c1", {}, {}],
- ],
- [
- [["get", "/v1/{selector=a/*/b/**}", ""]],
- auth_pb2.AuthenticationRule(selector="a/parent/b/child/c1"),
- {},
- ["get", "/v1/a/parent/b/child/c1", None, auth_pb2.AuthenticationRule()],
- ],
- [
- [["get", "/v1/{field=a/**/b/*}/v2/{name}", ""]],
- None,
- {"field": "a/parent/p1/b/child", "name": "first", "foo": "bar"},
- ["get", "/v1/a/parent/p1/b/child/v2/first", {}, {"foo": "bar"}],
- ],
- [
- [["get", "/v1/{selector=a/**/b/*}/v2/{oauth.canonical_scopes}", ""]],
- auth_pb2.AuthenticationRule(
- selector="a/parent/p1/b/child",
- oauth=auth_pb2.OAuthRequirements(canonical_scopes="first"),
- ),
- {"field": "a/parent/p1/b/child", "name": "first", "foo": "bar"},
- [
- "get",
- "/v1/a/parent/p1/b/child/v2/first",
- None,
- auth_pb2.AuthenticationRule(oauth=auth_pb2.OAuthRequirements()),
- ],
- ],
- ],
-)
-def test_transcode_with_wildcard(
- http_options, message, request_kwargs, expected_result
-):
- http_options, expected_result = helper_test_transcode(http_options, expected_result)
- result = path_template.transcode(http_options, message, **request_kwargs)
- assert result == expected_result
-
-
-@pytest.mark.parametrize(
- "http_options, message, request_kwargs, expected_result",
- [
- # Single field body
- [
- [["post", "/v1/no/template", "data"]],
- None,
- {"data": {"id": 1, "info": "some info"}, "foo": "bar"},
- ["post", "/v1/no/template", {"id": 1, "info": "some info"}, {"foo": "bar"}],
- ],
- [
- [["post", "/v1/no/template", "oauth"]],
- auth_pb2.AuthenticationRule(
- selector="bar",
- oauth=auth_pb2.OAuthRequirements(canonical_scopes="child"),
- ),
- {},
- [
- "post",
- "/v1/no/template",
- auth_pb2.OAuthRequirements(canonical_scopes="child"),
- auth_pb2.AuthenticationRule(selector="bar"),
- ],
- ],
- [
- [["post", "/v1/{field=a/*}/b/{name=**}", "data"]],
- None,
- {
- "field": "a/parent",
- "name": "first/last",
- "data": {"id": 1, "info": "some info"},
- "foo": "bar",
- },
- [
- "post",
- "/v1/a/parent/b/first/last",
- {"id": 1, "info": "some info"},
- {"foo": "bar"},
- ],
- ],
- [
- [["post", "/v1/{selector=a/*}/b/{oauth.canonical_scopes=**}", "oauth"]],
- auth_pb2.AuthenticationRule(
- selector="a/parent",
- allow_without_credential=True,
- requirements=[auth_pb2.AuthRequirement(provider_id="p")],
- oauth=auth_pb2.OAuthRequirements(canonical_scopes="first/last"),
- ),
- {},
- [
- "post",
- "/v1/a/parent/b/first/last",
- auth_pb2.OAuthRequirements(),
- auth_pb2.AuthenticationRule(
- requirements=[auth_pb2.AuthRequirement(provider_id="p")],
- allow_without_credential=True,
- ),
- ],
- ],
- # Wildcard body
- [
- [["post", "/v1/{field=a/*}/b/{name=**}", "*"]],
- None,
- {
- "field": "a/parent",
- "name": "first/last",
- "data": {"id": 1, "info": "some info"},
- "foo": "bar",
- },
- [
- "post",
- "/v1/a/parent/b/first/last",
- {"data": {"id": 1, "info": "some info"}, "foo": "bar"},
- {},
- ],
- ],
- [
- [["post", "/v1/{selector=a/*}/b/{oauth.canonical_scopes=**}", "*"]],
- auth_pb2.AuthenticationRule(
- selector="a/parent",
- allow_without_credential=True,
- oauth=auth_pb2.OAuthRequirements(canonical_scopes="first/last"),
- ),
- {
- "field": "a/parent",
- "name": "first/last",
- "data": {"id": 1, "info": "some info"},
- "foo": "bar",
- },
- [
- "post",
- "/v1/a/parent/b/first/last",
- auth_pb2.AuthenticationRule(
- allow_without_credential=True, oauth=auth_pb2.OAuthRequirements()
- ),
- auth_pb2.AuthenticationRule(),
- ],
- ],
- ],
-)
-def test_transcode_with_body(http_options, message, request_kwargs, expected_result):
- http_options, expected_result = helper_test_transcode(http_options, expected_result)
- result = path_template.transcode(http_options, message, **request_kwargs)
- assert result == expected_result
-
-
-@pytest.mark.parametrize(
- "http_options, message, request_kwargs, expected_result",
- [
- # Additional bindings
- [
- [
- ["post", "/v1/{field=a/*}/b/{name=**}", "extra_data"],
- ["post", "/v1/{field=a/*}/b/{name=**}", "*"],
- ],
- None,
- {
- "field": "a/parent",
- "name": "first/last",
- "data": {"id": 1, "info": "some info"},
- "foo": "bar",
- },
- [
- "post",
- "/v1/a/parent/b/first/last",
- {"data": {"id": 1, "info": "some info"}, "foo": "bar"},
- {},
- ],
- ],
- [
- [
- [
- "post",
- "/v1/{selector=a/*}/b/{oauth.canonical_scopes=**}",
- "extra_data",
- ],
- ["post", "/v1/{selector=a/*}/b/{oauth.canonical_scopes=**}", "*"],
- ],
- auth_pb2.AuthenticationRule(
- selector="a/parent",
- allow_without_credential=True,
- oauth=auth_pb2.OAuthRequirements(canonical_scopes="first/last"),
- ),
- {},
- [
- "post",
- "/v1/a/parent/b/first/last",
- auth_pb2.AuthenticationRule(
- allow_without_credential=True, oauth=auth_pb2.OAuthRequirements()
- ),
- auth_pb2.AuthenticationRule(),
- ],
- ],
- [
- [
- ["get", "/v1/{field=a/*}/b/{name=**}", ""],
- ["get", "/v1/{field=a/*}/b/first/last", ""],
- ],
- None,
- {"field": "a/parent", "foo": "bar"},
- ["get", "/v1/a/parent/b/first/last", {}, {"foo": "bar"}],
- ],
- [
- [
- ["get", "/v1/{selector=a/*}/b/{oauth.allow_without_credential=**}", ""],
- ["get", "/v1/{selector=a/*}/b/first/last", ""],
- ],
- auth_pb2.AuthenticationRule(
- selector="a/parent",
- allow_without_credential=True,
- oauth=auth_pb2.OAuthRequirements(),
- ),
- {},
- [
- "get",
- "/v1/a/parent/b/first/last",
- None,
- auth_pb2.AuthenticationRule(
- allow_without_credential=True, oauth=auth_pb2.OAuthRequirements()
- ),
- ],
- ],
- ],
-)
-def test_transcode_with_additional_bindings(
- http_options, message, request_kwargs, expected_result
-):
- http_options, expected_result = helper_test_transcode(http_options, expected_result)
- result = path_template.transcode(http_options, message, **request_kwargs)
- assert result == expected_result
-
-
-@pytest.mark.parametrize(
- "http_options, message, request_kwargs",
- [
- [[["get", "/v1/{name}", ""]], None, {"foo": "bar"}],
- [[["get", "/v1/{selector}", ""]], auth_pb2.AuthenticationRule(), {}],
- [[["get", "/v1/{name}", ""]], auth_pb2.AuthenticationRule(), {}],
- [[["get", "/v1/{name}", ""]], None, {"name": "first/last"}],
- [
- [["get", "/v1/{selector}", ""]],
- auth_pb2.AuthenticationRule(selector="first/last"),
- {},
- ],
- [[["get", "/v1/{name=mr/*/*}", ""]], None, {"name": "first/last"}],
- [
- [["get", "/v1/{selector=mr/*/*}", ""]],
- auth_pb2.AuthenticationRule(selector="first/last"),
- {},
- ],
- [[["post", "/v1/{name}", "data"]], None, {"name": "first/last"}],
- [
- [["post", "/v1/{selector}", "data"]],
- auth_pb2.AuthenticationRule(selector="first"),
- {},
- ],
- [[["post", "/v1/{first_name}", "data"]], None, {"last_name": "last"}],
- [
- [["post", "/v1/{first_name}", ""]],
- auth_pb2.AuthenticationRule(selector="first"),
- {},
- ],
- ],
-)
-def test_transcode_fails(http_options, message, request_kwargs):
- http_options, _ = helper_test_transcode(http_options, range(4))
- with pytest.raises(ValueError) as exc_info:
- path_template.transcode(http_options, message, **request_kwargs)
- assert str(exc_info.value).count("URI") == len(http_options)
-
-
-def helper_test_transcode(http_options_list, expected_result_list):
- http_options = []
- for opt_list in http_options_list:
- http_option = {"method": opt_list[0], "uri": opt_list[1]}
- if opt_list[2]:
- http_option["body"] = opt_list[2]
- http_options.append(http_option)
-
- expected_result = {
- "method": expected_result_list[0],
- "uri": expected_result_list[1],
- "query_params": expected_result_list[3],
- }
- if expected_result_list[2]:
- expected_result["body"] = expected_result_list[2]
- return (http_options, expected_result)
diff --git a/tests/unit/test_protobuf_helpers.py b/tests/unit/test_protobuf_helpers.py
deleted file mode 100644
index 5678d3b..0000000
--- a/tests/unit/test_protobuf_helpers.py
+++ /dev/null
@@ -1,512 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import pytest
-import re
-
-from google.api import http_pb2
-from google.api_core import protobuf_helpers
-from google.longrunning import operations_pb2
-from google.protobuf import any_pb2
-from google.protobuf import message
-from google.protobuf import source_context_pb2
-from google.protobuf import struct_pb2
-from google.protobuf import timestamp_pb2
-from google.protobuf import type_pb2
-from google.protobuf import wrappers_pb2
-from google.type import color_pb2
-from google.type import date_pb2
-from google.type import timeofday_pb2
-
-
-def test_from_any_pb_success():
- in_message = date_pb2.Date(year=1990)
- in_message_any = any_pb2.Any()
- in_message_any.Pack(in_message)
- out_message = protobuf_helpers.from_any_pb(date_pb2.Date, in_message_any)
-
- assert in_message == out_message
-
-
-def test_from_any_pb_wrapped_success():
- # Declare a message class conforming to wrapped messages.
- class WrappedDate(object):
- def __init__(self, **kwargs):
- self._pb = date_pb2.Date(**kwargs)
-
- def __eq__(self, other):
- return self._pb == other
-
- @classmethod
- def pb(cls, msg):
- return msg._pb
-
- # Run the same test as `test_from_any_pb_success`, but using the
- # wrapped class.
- in_message = date_pb2.Date(year=1990)
- in_message_any = any_pb2.Any()
- in_message_any.Pack(in_message)
- out_message = protobuf_helpers.from_any_pb(WrappedDate, in_message_any)
-
- assert out_message == in_message
-
-
-def test_from_any_pb_failure():
- in_message = any_pb2.Any()
- in_message.Pack(date_pb2.Date(year=1990))
-
- with pytest.raises(
- TypeError,
- match=re.escape(
- "Could not convert `google.type.Date` with underlying type `google.protobuf.any_pb2.Any` to `google.type.TimeOfDay`"
- ),
- ):
- protobuf_helpers.from_any_pb(timeofday_pb2.TimeOfDay, in_message)
-
-
-def test_check_protobuf_helpers_ok():
- assert protobuf_helpers.check_oneof() is None
- assert protobuf_helpers.check_oneof(foo="bar") is None
- assert protobuf_helpers.check_oneof(foo="bar", baz=None) is None
- assert protobuf_helpers.check_oneof(foo=None, baz="bacon") is None
- assert protobuf_helpers.check_oneof(foo="bar", spam=None, eggs=None) is None
-
-
-def test_check_protobuf_helpers_failures():
- with pytest.raises(ValueError):
- protobuf_helpers.check_oneof(foo="bar", spam="eggs")
- with pytest.raises(ValueError):
- protobuf_helpers.check_oneof(foo="bar", baz="bacon", spam="eggs")
- with pytest.raises(ValueError):
- protobuf_helpers.check_oneof(foo="bar", spam=0, eggs=None)
-
-
-def test_get_messages():
- answer = protobuf_helpers.get_messages(date_pb2)
-
- # Ensure that Date was exported properly.
- assert answer["Date"] is date_pb2.Date
-
- # Ensure that no non-Message objects were exported.
- for value in answer.values():
- assert issubclass(value, message.Message)
-
-
-def test_get_dict_absent():
- with pytest.raises(KeyError):
- assert protobuf_helpers.get({}, "foo")
-
-
-def test_get_dict_present():
- assert protobuf_helpers.get({"foo": "bar"}, "foo") == "bar"
-
-
-def test_get_dict_default():
- assert protobuf_helpers.get({}, "foo", default="bar") == "bar"
-
-
-def test_get_dict_nested():
- assert protobuf_helpers.get({"foo": {"bar": "baz"}}, "foo.bar") == "baz"
-
-
-def test_get_dict_nested_default():
- assert protobuf_helpers.get({}, "foo.baz", default="bacon") == "bacon"
- assert protobuf_helpers.get({"foo": {}}, "foo.baz", default="bacon") == "bacon"
-
-
-def test_get_msg_sentinel():
- msg = timestamp_pb2.Timestamp()
- with pytest.raises(KeyError):
- assert protobuf_helpers.get(msg, "foo")
-
-
-def test_get_msg_present():
- msg = timestamp_pb2.Timestamp(seconds=42)
- assert protobuf_helpers.get(msg, "seconds") == 42
-
-
-def test_get_msg_default():
- msg = timestamp_pb2.Timestamp()
- assert protobuf_helpers.get(msg, "foo", default="bar") == "bar"
-
-
-def test_invalid_object():
- with pytest.raises(TypeError):
- protobuf_helpers.get(object(), "foo", "bar")
-
-
-def test_set_dict():
- mapping = {}
- protobuf_helpers.set(mapping, "foo", "bar")
- assert mapping == {"foo": "bar"}
-
-
-def test_set_msg():
- msg = timestamp_pb2.Timestamp()
- protobuf_helpers.set(msg, "seconds", 42)
- assert msg.seconds == 42
-
-
-def test_set_dict_nested():
- mapping = {}
- protobuf_helpers.set(mapping, "foo.bar", "baz")
- assert mapping == {"foo": {"bar": "baz"}}
-
-
-def test_set_invalid_object():
- with pytest.raises(TypeError):
- protobuf_helpers.set(object(), "foo", "bar")
-
-
-def test_set_list():
- list_ops_response = operations_pb2.ListOperationsResponse()
-
- protobuf_helpers.set(
- list_ops_response,
- "operations",
- [{"name": "foo"}, operations_pb2.Operation(name="bar")],
- )
-
- assert len(list_ops_response.operations) == 2
-
- for operation in list_ops_response.operations:
- assert isinstance(operation, operations_pb2.Operation)
-
- assert list_ops_response.operations[0].name == "foo"
- assert list_ops_response.operations[1].name == "bar"
-
-
-def test_set_list_clear_existing():
- list_ops_response = operations_pb2.ListOperationsResponse(
- operations=[{"name": "baz"}]
- )
-
- protobuf_helpers.set(
- list_ops_response,
- "operations",
- [{"name": "foo"}, operations_pb2.Operation(name="bar")],
- )
-
- assert len(list_ops_response.operations) == 2
- for operation in list_ops_response.operations:
- assert isinstance(operation, operations_pb2.Operation)
- assert list_ops_response.operations[0].name == "foo"
- assert list_ops_response.operations[1].name == "bar"
-
-
-def test_set_msg_with_msg_field():
- rule = http_pb2.HttpRule()
- pattern = http_pb2.CustomHttpPattern(kind="foo", path="bar")
-
- protobuf_helpers.set(rule, "custom", pattern)
-
- assert rule.custom.kind == "foo"
- assert rule.custom.path == "bar"
-
-
-def test_set_msg_with_dict_field():
- rule = http_pb2.HttpRule()
- pattern = {"kind": "foo", "path": "bar"}
-
- protobuf_helpers.set(rule, "custom", pattern)
-
- assert rule.custom.kind == "foo"
- assert rule.custom.path == "bar"
-
-
-def test_set_msg_nested_key():
- rule = http_pb2.HttpRule(custom=http_pb2.CustomHttpPattern(kind="foo", path="bar"))
-
- protobuf_helpers.set(rule, "custom.kind", "baz")
-
- assert rule.custom.kind == "baz"
- assert rule.custom.path == "bar"
-
-
-def test_setdefault_dict_unset():
- mapping = {}
- protobuf_helpers.setdefault(mapping, "foo", "bar")
- assert mapping == {"foo": "bar"}
-
-
-def test_setdefault_dict_falsy():
- mapping = {"foo": None}
- protobuf_helpers.setdefault(mapping, "foo", "bar")
- assert mapping == {"foo": "bar"}
-
-
-def test_setdefault_dict_truthy():
- mapping = {"foo": "bar"}
- protobuf_helpers.setdefault(mapping, "foo", "baz")
- assert mapping == {"foo": "bar"}
-
-
-def test_setdefault_pb2_falsy():
- operation = operations_pb2.Operation()
- protobuf_helpers.setdefault(operation, "name", "foo")
- assert operation.name == "foo"
-
-
-def test_setdefault_pb2_truthy():
- operation = operations_pb2.Operation(name="bar")
- protobuf_helpers.setdefault(operation, "name", "foo")
- assert operation.name == "bar"
-
-
-def test_field_mask_invalid_args():
- with pytest.raises(ValueError):
- protobuf_helpers.field_mask("foo", any_pb2.Any())
- with pytest.raises(ValueError):
- protobuf_helpers.field_mask(any_pb2.Any(), "bar")
- with pytest.raises(ValueError):
- protobuf_helpers.field_mask(any_pb2.Any(), operations_pb2.Operation())
-
-
-def test_field_mask_equal_values():
- assert protobuf_helpers.field_mask(None, None).paths == []
-
- original = struct_pb2.Value(number_value=1.0)
- modified = struct_pb2.Value(number_value=1.0)
- assert protobuf_helpers.field_mask(original, modified).paths == []
-
- original = color_pb2.Color(alpha=wrappers_pb2.FloatValue(value=1.0))
- modified = color_pb2.Color(alpha=wrappers_pb2.FloatValue(value=1.0))
- assert protobuf_helpers.field_mask(original, modified).paths == []
-
- original = struct_pb2.ListValue(values=[struct_pb2.Value(number_value=1.0)])
- modified = struct_pb2.ListValue(values=[struct_pb2.Value(number_value=1.0)])
- assert protobuf_helpers.field_mask(original, modified).paths == []
-
- original = struct_pb2.Struct(fields={"bar": struct_pb2.Value(number_value=1.0)})
- modified = struct_pb2.Struct(fields={"bar": struct_pb2.Value(number_value=1.0)})
- assert protobuf_helpers.field_mask(original, modified).paths == []
-
-
-def test_field_mask_zero_values():
- # Singular Values
- original = color_pb2.Color(red=0.0)
- modified = None
- assert protobuf_helpers.field_mask(original, modified).paths == []
-
- original = None
- modified = color_pb2.Color(red=0.0)
- assert protobuf_helpers.field_mask(original, modified).paths == []
-
- # Repeated Values
- original = struct_pb2.ListValue(values=[])
- modified = None
- assert protobuf_helpers.field_mask(original, modified).paths == []
-
- original = None
- modified = struct_pb2.ListValue(values=[])
- assert protobuf_helpers.field_mask(original, modified).paths == []
-
- # Maps
- original = struct_pb2.Struct(fields={})
- modified = None
- assert protobuf_helpers.field_mask(original, modified).paths == []
-
- original = None
- modified = struct_pb2.Struct(fields={})
- assert protobuf_helpers.field_mask(original, modified).paths == []
-
- # Oneofs
- original = struct_pb2.Value(number_value=0.0)
- modified = None
- assert protobuf_helpers.field_mask(original, modified).paths == []
-
- original = None
- modified = struct_pb2.Value(number_value=0.0)
- assert protobuf_helpers.field_mask(original, modified).paths == []
-
-
-def test_field_mask_singular_field_diffs():
- original = type_pb2.Type(name="name")
- modified = type_pb2.Type()
- assert protobuf_helpers.field_mask(original, modified).paths == ["name"]
-
- original = type_pb2.Type(name="name")
- modified = type_pb2.Type()
- assert protobuf_helpers.field_mask(original, modified).paths == ["name"]
-
- original = None
- modified = type_pb2.Type(name="name")
- assert protobuf_helpers.field_mask(original, modified).paths == ["name"]
-
- original = type_pb2.Type(name="name")
- modified = None
- assert protobuf_helpers.field_mask(original, modified).paths == ["name"]
-
-
-def test_field_mask_message_diffs():
- original = type_pb2.Type()
- modified = type_pb2.Type(
- source_context=source_context_pb2.SourceContext(file_name="name")
- )
- assert protobuf_helpers.field_mask(original, modified).paths == [
- "source_context.file_name"
- ]
-
- original = type_pb2.Type(
- source_context=source_context_pb2.SourceContext(file_name="name")
- )
- modified = type_pb2.Type()
- assert protobuf_helpers.field_mask(original, modified).paths == ["source_context"]
-
- original = type_pb2.Type(
- source_context=source_context_pb2.SourceContext(file_name="name")
- )
- modified = type_pb2.Type(
- source_context=source_context_pb2.SourceContext(file_name="other_name")
- )
- assert protobuf_helpers.field_mask(original, modified).paths == [
- "source_context.file_name"
- ]
-
- original = None
- modified = type_pb2.Type(
- source_context=source_context_pb2.SourceContext(file_name="name")
- )
- assert protobuf_helpers.field_mask(original, modified).paths == [
- "source_context.file_name"
- ]
-
- original = type_pb2.Type(
- source_context=source_context_pb2.SourceContext(file_name="name")
- )
- modified = None
- assert protobuf_helpers.field_mask(original, modified).paths == ["source_context"]
-
-
-def test_field_mask_wrapper_type_diffs():
- original = color_pb2.Color()
- modified = color_pb2.Color(alpha=wrappers_pb2.FloatValue(value=1.0))
- assert protobuf_helpers.field_mask(original, modified).paths == ["alpha"]
-
- original = color_pb2.Color(alpha=wrappers_pb2.FloatValue(value=1.0))
- modified = color_pb2.Color()
- assert protobuf_helpers.field_mask(original, modified).paths == ["alpha"]
-
- original = color_pb2.Color(alpha=wrappers_pb2.FloatValue(value=1.0))
- modified = color_pb2.Color(alpha=wrappers_pb2.FloatValue(value=2.0))
- assert protobuf_helpers.field_mask(original, modified).paths == ["alpha"]
-
- original = None
- modified = color_pb2.Color(alpha=wrappers_pb2.FloatValue(value=2.0))
- assert protobuf_helpers.field_mask(original, modified).paths == ["alpha"]
-
- original = color_pb2.Color(alpha=wrappers_pb2.FloatValue(value=1.0))
- modified = None
- assert protobuf_helpers.field_mask(original, modified).paths == ["alpha"]
-
-
-def test_field_mask_repeated_diffs():
- original = struct_pb2.ListValue()
- modified = struct_pb2.ListValue(
- values=[struct_pb2.Value(number_value=1.0), struct_pb2.Value(number_value=2.0)]
- )
- assert protobuf_helpers.field_mask(original, modified).paths == ["values"]
-
- original = struct_pb2.ListValue(
- values=[struct_pb2.Value(number_value=1.0), struct_pb2.Value(number_value=2.0)]
- )
- modified = struct_pb2.ListValue()
- assert protobuf_helpers.field_mask(original, modified).paths == ["values"]
-
- original = None
- modified = struct_pb2.ListValue(
- values=[struct_pb2.Value(number_value=1.0), struct_pb2.Value(number_value=2.0)]
- )
- assert protobuf_helpers.field_mask(original, modified).paths == ["values"]
-
- original = struct_pb2.ListValue(
- values=[struct_pb2.Value(number_value=1.0), struct_pb2.Value(number_value=2.0)]
- )
- modified = None
- assert protobuf_helpers.field_mask(original, modified).paths == ["values"]
-
- original = struct_pb2.ListValue(
- values=[struct_pb2.Value(number_value=1.0), struct_pb2.Value(number_value=2.0)]
- )
- modified = struct_pb2.ListValue(
- values=[struct_pb2.Value(number_value=2.0), struct_pb2.Value(number_value=1.0)]
- )
- assert protobuf_helpers.field_mask(original, modified).paths == ["values"]
-
-
-def test_field_mask_map_diffs():
- original = struct_pb2.Struct()
- modified = struct_pb2.Struct(fields={"foo": struct_pb2.Value(number_value=1.0)})
- assert protobuf_helpers.field_mask(original, modified).paths == ["fields"]
-
- original = struct_pb2.Struct(fields={"foo": struct_pb2.Value(number_value=1.0)})
- modified = struct_pb2.Struct()
- assert protobuf_helpers.field_mask(original, modified).paths == ["fields"]
-
- original = None
- modified = struct_pb2.Struct(fields={"foo": struct_pb2.Value(number_value=1.0)})
- assert protobuf_helpers.field_mask(original, modified).paths == ["fields"]
-
- original = struct_pb2.Struct(fields={"foo": struct_pb2.Value(number_value=1.0)})
- modified = None
- assert protobuf_helpers.field_mask(original, modified).paths == ["fields"]
-
- original = struct_pb2.Struct(fields={"foo": struct_pb2.Value(number_value=1.0)})
- modified = struct_pb2.Struct(fields={"foo": struct_pb2.Value(number_value=2.0)})
- assert protobuf_helpers.field_mask(original, modified).paths == ["fields"]
-
- original = struct_pb2.Struct(fields={"foo": struct_pb2.Value(number_value=1.0)})
- modified = struct_pb2.Struct(fields={"bar": struct_pb2.Value(number_value=1.0)})
- assert protobuf_helpers.field_mask(original, modified).paths == ["fields"]
-
-
-def test_field_mask_different_level_diffs():
- original = color_pb2.Color(alpha=wrappers_pb2.FloatValue(value=1.0))
- modified = color_pb2.Color(alpha=wrappers_pb2.FloatValue(value=2.0), red=1.0)
- assert sorted(protobuf_helpers.field_mask(original, modified).paths) == [
- "alpha",
- "red",
- ]
-
-
-def test_field_mask_ignore_trailing_underscore():
- import proto
-
- class Foo(proto.Message):
- type_ = proto.Field(proto.STRING, number=1)
- input_config = proto.Field(proto.STRING, number=2)
-
- modified = Foo(type_="bar", input_config="baz")
-
- assert sorted(protobuf_helpers.field_mask(None, Foo.pb(modified)).paths) == [
- "input_config",
- "type",
- ]
-
-
-def test_field_mask_ignore_trailing_underscore_with_nesting():
- import proto
-
- class Bar(proto.Message):
- class Baz(proto.Message):
- input_config = proto.Field(proto.STRING, number=1)
-
- type_ = proto.Field(Baz, number=1)
-
- modified = Bar()
- modified.type_.input_config = "foo"
-
- assert sorted(protobuf_helpers.field_mask(None, Bar.pb(modified)).paths) == [
- "type.input_config",
- ]
diff --git a/tests/unit/test_python_package_support.py b/tests/unit/test_python_package_support.py
deleted file mode 100644
index 0feb264..0000000
--- a/tests/unit/test_python_package_support.py
+++ /dev/null
@@ -1,132 +0,0 @@
-# Copyright 2025 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import warnings
-from unittest.mock import patch
-
-import pytest
-
-from google.api_core._python_package_support import (
- parse_version_to_tuple,
- get_dependency_version,
- warn_deprecation_for_versions_less_than,
- check_dependency_versions,
- DependencyConstraint,
- DependencyVersion,
-)
-
-
-@pytest.mark.parametrize("version_string_to_test", ["1.2.3", "1.2.3b1"])
-def test_get_dependency_version(mocker, version_string_to_test):
- """Test get_dependency_version."""
- mock_importlib = mocker.patch(
- "importlib.metadata.version", return_value=version_string_to_test
- )
- expected = DependencyVersion(
- parse_version_to_tuple(version_string_to_test), version_string_to_test
- )
- assert get_dependency_version("some-package") == expected
-
- mock_importlib.assert_called_once_with("some-package")
-
- # Test package not found
- mock_importlib.side_effect = ImportError
- assert get_dependency_version("not-a-package") == DependencyVersion(None, "--")
-
-
-@patch("google.api_core._python_package_support._get_distribution_and_import_packages")
-@patch("google.api_core._python_package_support.get_dependency_version")
-def test_warn_deprecation_for_versions_less_than(mock_get_version, mock_get_packages):
- """Test the deprecation warning logic."""
- # Mock the helper function to return predictable package strings
- mock_get_packages.side_effect = [
- ("dep-package (dep.package)", "dep-package"),
- ("my-package (my.package)", "my-package"),
- ]
-
- mock_get_version.return_value = DependencyVersion(
- parse_version_to_tuple("1.0.0"), "1.0.0"
- )
- with pytest.warns(FutureWarning) as record:
- warn_deprecation_for_versions_less_than("my.package", "dep.package", "2.0.0")
- assert len(record) == 1
- assert (
- "DEPRECATION: Package my-package (my.package) depends on dep-package (dep.package)"
- in str(record[0].message)
- )
-
- # Cases where no warning should be issued
- with warnings.catch_warnings(record=True) as w:
- warnings.simplefilter("always") # Capture all warnings
-
- # Case 2: Installed version is equal to required, should not warn.
- mock_get_packages.reset_mock()
- mock_get_version.return_value = DependencyVersion(
- parse_version_to_tuple("2.0.0"), "2.0.0"
- )
- warn_deprecation_for_versions_less_than("my.package", "dep.package", "2.0.0")
-
- # Case 3: Installed version is greater than required, should not warn.
- mock_get_packages.reset_mock()
- mock_get_version.return_value = DependencyVersion(
- parse_version_to_tuple("3.0.0"), "3.0.0"
- )
- warn_deprecation_for_versions_less_than("my.package", "dep.package", "2.0.0")
-
- # Case 4: Dependency not found, should not warn.
- mock_get_packages.reset_mock()
- mock_get_version.return_value = DependencyVersion(None, "--")
- warn_deprecation_for_versions_less_than("my.package", "dep.package", "2.0.0")
-
- # Assert that no warnings were recorded
- assert len(w) == 0
-
- # Case 5: Custom message template.
- mock_get_packages.reset_mock()
- mock_get_packages.side_effect = [
- ("dep-package (dep.package)", "dep-package"),
- ("my-package (my.package)", "my-package"),
- ]
- mock_get_version.return_value = DependencyVersion(
- parse_version_to_tuple("1.0.0"), "1.0.0"
- )
- template = "Custom warning for {dependency_package} used by {consumer_package}."
- with pytest.warns(FutureWarning) as record:
- warn_deprecation_for_versions_less_than(
- "my.package", "dep.package", "2.0.0", message_template=template
- )
- assert len(record) == 1
- assert (
- "Custom warning for dep-package (dep.package) used by my-package (my.package)."
- in str(record[0].message)
- )
-
-
-@patch(
- "google.api_core._python_package_support.warn_deprecation_for_versions_less_than"
-)
-def test_check_dependency_versions_with_custom_warnings(mock_warn):
- """Test check_dependency_versions with custom warning parameters."""
- custom_warning1 = DependencyConstraint("pkg1", "1.0.0", "2.0.0")
- custom_warning2 = DependencyConstraint("pkg2", "2.0.0", "3.0.0")
-
- check_dependency_versions("my-consumer", custom_warning1, custom_warning2)
-
- assert mock_warn.call_count == 2
- mock_warn.assert_any_call(
- "my-consumer", "pkg1", "1.0.0", recommended_version="2.0.0"
- )
- mock_warn.assert_any_call(
- "my-consumer", "pkg2", "2.0.0", recommended_version="3.0.0"
- )
diff --git a/tests/unit/test_python_version_support.py b/tests/unit/test_python_version_support.py
deleted file mode 100644
index 76eb821..0000000
--- a/tests/unit/test_python_version_support.py
+++ /dev/null
@@ -1,257 +0,0 @@
-# Copyright 2025 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import pytest
-import datetime
-import textwrap
-import warnings
-from collections import namedtuple
-
-from unittest.mock import patch
-
-# Code to be tested
-from google.api_core._python_version_support import (
- _flatten_message,
- check_python_version,
- PythonVersionStatus,
- PYTHON_VERSION_INFO,
-)
-
-# Helper object for mocking sys.version_info
-VersionInfoMock = namedtuple("VersionInfoMock", ["major", "minor"])
-
-
-def test_flatten_message():
- """Test that _flatten_message correctly dedents and flattens a string."""
- input_text = """
- This is a multi-line
- string with some
- indentation.
- """
- expected_output = "This is a multi-line string with some indentation."
- assert _flatten_message(input_text) == expected_output
-
-
-def _create_failure_message(
- expected, result, py_version, date, gapic_dep, py_eol, eol_warn, gapic_end
-):
- """Create a detailed failure message for a test."""
- return textwrap.dedent( # pragma: NO COVER
- f"""
- --- Test Failed ---
- Expected status: {expected.name}
- Received status: {result.name}
- ---------------------
- Context:
- - Mocked Python Version: {py_version}
- - Mocked Today's Date: {date}
- Calculated Dates:
- - gapic_deprecation: {gapic_dep}
- - python_eol: {py_eol}
- - eol_warning_starts: {eol_warn}
- - gapic_end: {gapic_end}
- """
- )
-
-
-def generate_tracked_version_test_cases():
- """
- Yields test parameters for all tracked versions and boundary conditions.
- """
- for version_tuple, version_info in PYTHON_VERSION_INFO.items():
- py_version_str = f"{version_tuple[0]}.{version_tuple[1]}"
- gapic_dep = version_info.gapic_deprecation or (
- version_info.python_eol - datetime.timedelta(days=365)
- )
- gapic_end = version_info.gapic_end or (
- version_info.python_eol + datetime.timedelta(weeks=1)
- )
- eol_warning_starts = version_info.python_eol + datetime.timedelta(weeks=1)
-
- test_cases = {
- "supported_before_deprecation_date": {
- "date": gapic_dep - datetime.timedelta(days=1),
- "expected": PythonVersionStatus.PYTHON_VERSION_SUPPORTED,
- },
- "deprecated_on_deprecation_date": {
- "date": gapic_dep,
- "expected": PythonVersionStatus.PYTHON_VERSION_DEPRECATED,
- },
- "deprecated_on_eol_date": {
- "date": version_info.python_eol,
- "expected": PythonVersionStatus.PYTHON_VERSION_DEPRECATED,
- },
- "deprecated_before_eol_warning_starts": {
- "date": eol_warning_starts - datetime.timedelta(days=1),
- "expected": PythonVersionStatus.PYTHON_VERSION_DEPRECATED,
- },
- "eol_on_eol_warning_date": {
- "date": eol_warning_starts,
- "expected": PythonVersionStatus.PYTHON_VERSION_EOL,
- },
- "eol_on_gapic_end_date": {
- "date": gapic_end,
- "expected": PythonVersionStatus.PYTHON_VERSION_EOL,
- },
- "unsupported_after_gapic_end_date": {
- "date": gapic_end + datetime.timedelta(days=1),
- "expected": PythonVersionStatus.PYTHON_VERSION_UNSUPPORTED,
- },
- }
-
- for name, params in test_cases.items():
- yield pytest.param(
- version_tuple,
- params["date"],
- params["expected"],
- gapic_dep,
- gapic_end,
- eol_warning_starts,
- id=f"{py_version_str}-{name}",
- )
-
-
-@pytest.mark.parametrize(
- "version_tuple, mock_date, expected_status, gapic_dep, gapic_end, eol_warning_starts",
- generate_tracked_version_test_cases(),
-)
-def test_all_tracked_versions_and_date_scenarios(
- version_tuple, mock_date, expected_status, gapic_dep, gapic_end, eol_warning_starts
-):
- """Test all outcomes for each tracked version using parametrization."""
- mock_py_v = VersionInfoMock(major=version_tuple[0], minor=version_tuple[1])
-
- with patch("google.api_core._python_version_support.sys.version_info", mock_py_v):
- # Supported versions should not issue warnings
- if expected_status == PythonVersionStatus.PYTHON_VERSION_SUPPORTED:
- with warnings.catch_warnings(record=True) as w:
- warnings.simplefilter("always")
- result = check_python_version(today=mock_date)
- assert len(w) == 0
- # All other statuses should issue a warning
- else:
- with pytest.warns(FutureWarning) as record:
- result = check_python_version(today=mock_date)
- assert len(record) == 1
-
- if result != expected_status: # pragma: NO COVER
- py_version_str = f"{version_tuple[0]}.{version_tuple[1]}"
- version_info = PYTHON_VERSION_INFO[version_tuple]
-
- fail_msg = _create_failure_message(
- expected_status,
- result,
- py_version_str,
- mock_date,
- gapic_dep,
- version_info.python_eol,
- eol_warning_starts,
- gapic_end,
- )
- pytest.fail(fail_msg, pytrace=False)
-
-
-def test_override_gapic_end_only():
- """Test behavior when only gapic_end is manually overridden."""
- version_tuple = (3, 9)
- original_info = PYTHON_VERSION_INFO[version_tuple]
- mock_py_version = VersionInfoMock(major=version_tuple[0], minor=version_tuple[1])
-
- custom_gapic_end = original_info.python_eol + datetime.timedelta(days=212)
- overridden_info = original_info._replace(gapic_end=custom_gapic_end)
-
- with patch(
- "google.api_core._python_version_support.sys.version_info", mock_py_version
- ):
- with patch.dict(
- "google.api_core._python_version_support.PYTHON_VERSION_INFO",
- {version_tuple: overridden_info},
- ):
- with pytest.warns(FutureWarning, match="past its end of life"):
- result_before_boundary = check_python_version(
- today=custom_gapic_end + datetime.timedelta(days=-1)
- )
- assert result_before_boundary == PythonVersionStatus.PYTHON_VERSION_EOL
-
- with pytest.warns(FutureWarning, match="past its end of life"):
- result_at_boundary = check_python_version(today=custom_gapic_end)
- assert result_at_boundary == PythonVersionStatus.PYTHON_VERSION_EOL
-
- with pytest.warns(FutureWarning, match="non-supported Python version"):
- result_after_boundary = check_python_version(
- today=custom_gapic_end + datetime.timedelta(days=1)
- )
- assert (
- result_after_boundary == PythonVersionStatus.PYTHON_VERSION_UNSUPPORTED
- )
-
-
-def test_override_gapic_deprecation_only():
- """Test behavior when only gapic_deprecation is manually overridden."""
- version_tuple = (3, 9)
- original_info = PYTHON_VERSION_INFO[version_tuple]
- mock_py_version = VersionInfoMock(major=version_tuple[0], minor=version_tuple[1])
-
- custom_gapic_dep = original_info.python_eol - datetime.timedelta(days=120)
- overridden_info = original_info._replace(gapic_deprecation=custom_gapic_dep)
-
- with patch(
- "google.api_core._python_version_support.sys.version_info", mock_py_version
- ):
- with patch.dict(
- "google.api_core._python_version_support.PYTHON_VERSION_INFO",
- {version_tuple: overridden_info},
- ):
- result_before_boundary = check_python_version(
- today=custom_gapic_dep - datetime.timedelta(days=1)
- )
- assert (
- result_before_boundary == PythonVersionStatus.PYTHON_VERSION_SUPPORTED
- )
-
- with pytest.warns(FutureWarning, match="Google will stop supporting"):
- result_at_boundary = check_python_version(today=custom_gapic_dep)
- assert result_at_boundary == PythonVersionStatus.PYTHON_VERSION_DEPRECATED
-
-
-def test_untracked_older_version_is_unsupported():
- """Test that an old, untracked version is unsupported and logs."""
- mock_py_version = VersionInfoMock(major=3, minor=6)
-
- with patch(
- "google.api_core._python_version_support.sys.version_info", mock_py_version
- ):
- with pytest.warns(FutureWarning) as record:
- mock_date = datetime.date(2025, 1, 15)
- result = check_python_version(today=mock_date)
-
- assert result == PythonVersionStatus.PYTHON_VERSION_UNSUPPORTED
- assert len(record) == 1
- assert "non-supported" in str(record[0].message)
-
-
-def test_untracked_newer_version_is_supported():
- """Test that a new, untracked version is supported and does not log."""
- mock_py_version = VersionInfoMock(major=40, minor=0)
-
- with patch(
- "google.api_core._python_version_support.sys.version_info", mock_py_version
- ):
- with warnings.catch_warnings(record=True) as w:
- warnings.simplefilter("always")
- mock_date = datetime.date(2025, 1, 15)
- result = check_python_version(today=mock_date)
-
- assert result == PythonVersionStatus.PYTHON_VERSION_SUPPORTED
- assert len(w) == 0
diff --git a/tests/unit/test_rest_helpers.py b/tests/unit/test_rest_helpers.py
deleted file mode 100644
index ff1a43f..0000000
--- a/tests/unit/test_rest_helpers.py
+++ /dev/null
@@ -1,94 +0,0 @@
-# Copyright 2021 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import pytest
-
-from google.api_core import rest_helpers
-
-
-def test_flatten_simple_value():
- with pytest.raises(TypeError):
- rest_helpers.flatten_query_params("abc")
-
-
-def test_flatten_list():
- with pytest.raises(TypeError):
- rest_helpers.flatten_query_params(["abc", "def"])
-
-
-def test_flatten_none():
- assert rest_helpers.flatten_query_params(None) == []
-
-
-def test_flatten_empty_dict():
- assert rest_helpers.flatten_query_params({}) == []
-
-
-def test_flatten_simple_dict():
- obj = {"a": "abc", "b": "def", "c": True, "d": False, "e": 10, "f": -3.76}
- assert rest_helpers.flatten_query_params(obj) == [
- ("a", "abc"),
- ("b", "def"),
- ("c", True),
- ("d", False),
- ("e", 10),
- ("f", -3.76),
- ]
-
-
-def test_flatten_simple_dict_strict():
- obj = {"a": "abc", "b": "def", "c": True, "d": False, "e": 10, "f": -3.76}
- assert rest_helpers.flatten_query_params(obj, strict=True) == [
- ("a", "abc"),
- ("b", "def"),
- ("c", "true"),
- ("d", "false"),
- ("e", "10"),
- ("f", "-3.76"),
- ]
-
-
-def test_flatten_repeated_field():
- assert rest_helpers.flatten_query_params({"a": ["x", "y", "z", None]}) == [
- ("a", "x"),
- ("a", "y"),
- ("a", "z"),
- ]
-
-
-def test_flatten_nested_dict():
- obj = {"a": {"b": {"c": ["x", "y", "z"]}}, "d": {"e": "uvw"}}
- expected_result = [("a.b.c", "x"), ("a.b.c", "y"), ("a.b.c", "z"), ("d.e", "uvw")]
-
- assert rest_helpers.flatten_query_params(obj) == expected_result
-
-
-def test_flatten_repeated_dict():
- obj = {
- "a": {"b": {"c": [{"v": 1}, {"v": 2}]}},
- "d": "uvw",
- }
-
- with pytest.raises(ValueError):
- rest_helpers.flatten_query_params(obj)
-
-
-def test_flatten_repeated_list():
- obj = {
- "a": {"b": {"c": [["e", "f"], ["g", "h"]]}},
- "d": "uvw",
- }
-
- with pytest.raises(ValueError):
- rest_helpers.flatten_query_params(obj)
diff --git a/tests/unit/test_rest_streaming.py b/tests/unit/test_rest_streaming.py
deleted file mode 100644
index 0f998df..0000000
--- a/tests/unit/test_rest_streaming.py
+++ /dev/null
@@ -1,296 +0,0 @@
-# Copyright 2021 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import datetime
-import logging
-import random
-import time
-from typing import List
-from unittest.mock import patch
-
-import proto
-import pytest
-import requests
-
-from google.api_core import rest_streaming
-from google.api import http_pb2
-from google.api import httpbody_pb2
-
-from ..helpers import Composer, Song, EchoResponse, parse_responses
-
-
-__protobuf__ = proto.module(package=__name__)
-SEED = int(time.time())
-logging.info(f"Starting sync rest streaming tests with random seed: {SEED}")
-random.seed(SEED)
-
-
-class ResponseMock(requests.Response):
- class _ResponseItr:
- def __init__(self, _response_bytes: bytes, random_split=False):
- self._responses_bytes = _response_bytes
- self._i = 0
- self._random_split = random_split
-
- def __next__(self):
- if self._i == len(self._responses_bytes):
- raise StopIteration
- if self._random_split:
- n = random.randint(1, len(self._responses_bytes[self._i :]))
- else:
- n = 1
- x = self._responses_bytes[self._i : self._i + n]
- self._i += n
- return x.decode("utf-8")
-
- def __init__(
- self,
- responses: List[proto.Message],
- response_cls,
- random_split=False,
- ):
- super().__init__()
- self._responses = responses
- self._random_split = random_split
- self._response_message_cls = response_cls
-
- def _parse_responses(self):
- return parse_responses(self._response_message_cls, self._responses)
-
- def close(self):
- raise NotImplementedError()
-
- def iter_content(self, *args, **kwargs):
- return self._ResponseItr(
- self._parse_responses(),
- random_split=self._random_split,
- )
-
-
-@pytest.mark.parametrize(
- "random_split,resp_message_is_proto_plus",
- [(False, True), (False, False)],
-)
-def test_next_simple(random_split, resp_message_is_proto_plus):
- if resp_message_is_proto_plus:
- response_type = EchoResponse
- responses = [EchoResponse(content="hello world"), EchoResponse(content="yes")]
- else:
- response_type = httpbody_pb2.HttpBody
- responses = [
- httpbody_pb2.HttpBody(content_type="hello world"),
- httpbody_pb2.HttpBody(content_type="yes"),
- ]
-
- resp = ResponseMock(
- responses=responses, random_split=random_split, response_cls=response_type
- )
- itr = rest_streaming.ResponseIterator(resp, response_type)
- assert list(itr) == responses
-
-
-@pytest.mark.parametrize(
- "random_split,resp_message_is_proto_plus",
- [
- (True, True),
- (False, True),
- (True, False),
- (False, False),
- ],
-)
-def test_next_nested(random_split, resp_message_is_proto_plus):
- if resp_message_is_proto_plus:
- response_type = Song
- responses = [
- Song(title="some song", composer=Composer(given_name="some name")),
- Song(title="another song", date_added=datetime.datetime(2021, 12, 17)),
- ]
- else:
- # Although `http_pb2.HttpRule`` is used in the response, any response message
- # can be used which meets this criteria for the test of having a nested field.
- response_type = http_pb2.HttpRule
- responses = [
- http_pb2.HttpRule(
- selector="some selector",
- custom=http_pb2.CustomHttpPattern(kind="some kind"),
- ),
- http_pb2.HttpRule(
- selector="another selector",
- custom=http_pb2.CustomHttpPattern(path="some path"),
- ),
- ]
- resp = ResponseMock(
- responses=responses, random_split=random_split, response_cls=response_type
- )
- itr = rest_streaming.ResponseIterator(resp, response_type)
- assert list(itr) == responses
-
-
-@pytest.mark.parametrize(
- "random_split,resp_message_is_proto_plus",
- [
- (True, True),
- (False, True),
- (True, False),
- (False, False),
- ],
-)
-def test_next_stress(random_split, resp_message_is_proto_plus):
- n = 50
- if resp_message_is_proto_plus:
- response_type = Song
- responses = [
- Song(title="title_%d" % i, composer=Composer(given_name="name_%d" % i))
- for i in range(n)
- ]
- else:
- response_type = http_pb2.HttpRule
- responses = [
- http_pb2.HttpRule(
- selector="selector_%d" % i,
- custom=http_pb2.CustomHttpPattern(path="path_%d" % i),
- )
- for i in range(n)
- ]
- resp = ResponseMock(
- responses=responses, random_split=random_split, response_cls=response_type
- )
- itr = rest_streaming.ResponseIterator(resp, response_type)
- assert list(itr) == responses
-
-
-@pytest.mark.parametrize(
- "random_split,resp_message_is_proto_plus",
- [
- (True, True),
- (False, True),
- (True, False),
- (False, False),
- ],
-)
-def test_next_escaped_characters_in_string(random_split, resp_message_is_proto_plus):
- if resp_message_is_proto_plus:
- response_type = Song
- composer_with_relateds = Composer()
- relateds = ["Artist A", "Artist B"]
- composer_with_relateds.relateds = relateds
-
- responses = [
- Song(
- title='ti"tle\nfoo\tbar{}', composer=Composer(given_name="name\n\n\n")
- ),
- Song(
- title='{"this is weird": "totally"}',
- composer=Composer(given_name="\\{}\\"),
- ),
- Song(title='\\{"key": ["value",]}\\', composer=composer_with_relateds),
- ]
- else:
- response_type = http_pb2.Http
- responses = [
- http_pb2.Http(
- rules=[
- http_pb2.HttpRule(
- selector='ti"tle\nfoo\tbar{}',
- custom=http_pb2.CustomHttpPattern(kind="name\n\n\n"),
- )
- ]
- ),
- http_pb2.Http(
- rules=[
- http_pb2.HttpRule(
- selector='{"this is weird": "totally"}',
- custom=http_pb2.CustomHttpPattern(kind="\\{}\\"),
- )
- ]
- ),
- http_pb2.Http(
- rules=[
- http_pb2.HttpRule(
- selector='\\{"key": ["value",]}\\',
- custom=http_pb2.CustomHttpPattern(kind="\\{}\\"),
- )
- ]
- ),
- ]
- resp = ResponseMock(
- responses=responses, random_split=random_split, response_cls=response_type
- )
- itr = rest_streaming.ResponseIterator(resp, response_type)
- assert list(itr) == responses
-
-
-@pytest.mark.parametrize("response_type", [EchoResponse, httpbody_pb2.HttpBody])
-def test_next_not_array(response_type):
- with patch.object(
- ResponseMock, "iter_content", return_value=iter('{"hello": 0}')
- ) as mock_method:
- resp = ResponseMock(responses=[], response_cls=response_type)
- itr = rest_streaming.ResponseIterator(resp, response_type)
- with pytest.raises(ValueError):
- next(itr)
- mock_method.assert_called_once()
-
-
-@pytest.mark.parametrize("response_type", [EchoResponse, httpbody_pb2.HttpBody])
-def test_cancel(response_type):
- with patch.object(ResponseMock, "close", return_value=None) as mock_method:
- resp = ResponseMock(responses=[], response_cls=response_type)
- itr = rest_streaming.ResponseIterator(resp, response_type)
- itr.cancel()
- mock_method.assert_called_once()
-
-
-@pytest.mark.parametrize(
- "response_type,return_value",
- [
- (EchoResponse, bytes('[{"content": "hello"}, {', "utf-8")),
- (httpbody_pb2.HttpBody, bytes('[{"content_type": "hello"}, {', "utf-8")),
- ],
-)
-def test_check_buffer(response_type, return_value):
- with patch.object(
- ResponseMock,
- "_parse_responses",
- return_value=return_value,
- ):
- resp = ResponseMock(responses=[], response_cls=response_type)
- itr = rest_streaming.ResponseIterator(resp, response_type)
- with pytest.raises(ValueError):
- next(itr)
- next(itr)
-
-
-@pytest.mark.parametrize("response_type", [EchoResponse, httpbody_pb2.HttpBody])
-def test_next_html(response_type):
- with patch.object(
- ResponseMock, "iter_content", return_value=iter("<!DOCTYPE html><html></html>")
- ) as mock_method:
- resp = ResponseMock(responses=[], response_cls=response_type)
- itr = rest_streaming.ResponseIterator(resp, response_type)
- with pytest.raises(ValueError):
- next(itr)
- mock_method.assert_called_once()
-
-
-def test_invalid_response_class():
- class SomeClass:
- pass
-
- resp = ResponseMock(responses=[], response_cls=SomeClass)
- with pytest.raises(
- ValueError,
- match="Response message class must be a subclass of proto.Message or google.protobuf.message.Message",
- ):
- rest_streaming.ResponseIterator(resp, SomeClass)
diff --git a/tests/unit/test_timeout.py b/tests/unit/test_timeout.py
deleted file mode 100644
index da7ea18..0000000
--- a/tests/unit/test_timeout.py
+++ /dev/null
@@ -1,220 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import datetime
-import itertools
-import pytest
-from unittest import mock
-
-from google.api_core import timeout as timeouts
-
-
-def test__exponential_timeout_generator_base_2():
- gen = timeouts._exponential_timeout_generator(1.0, 60.0, 2.0, deadline=None)
-
- result = list(itertools.islice(gen, 8))
- assert result == [1, 2, 4, 8, 16, 32, 60, 60]
-
-
-@mock.patch("google.api_core.datetime_helpers.utcnow", autospec=True)
-def test__exponential_timeout_generator_base_deadline(utcnow):
- # Make each successive call to utcnow() advance one second.
- utcnow.side_effect = [
- datetime.datetime.min + datetime.timedelta(seconds=n) for n in range(15)
- ]
-
- gen = timeouts._exponential_timeout_generator(1.0, 60.0, 2.0, deadline=30.0)
-
- result = list(itertools.islice(gen, 14))
- # Should grow until the cumulative time is > 30s, then start decreasing as
- # the cumulative time approaches 60s.
- assert result == [1, 2, 4, 8, 16, 24, 23, 22, 21, 20, 19, 18, 17, 16]
-
-
-class TestTimeToDeadlineTimeout(object):
- def test_constructor(self):
- timeout_ = timeouts.TimeToDeadlineTimeout()
- assert timeout_._timeout is None
-
- def test_constructor_args(self):
- timeout_ = timeouts.TimeToDeadlineTimeout(42.0)
- assert timeout_._timeout == 42.0
-
- def test___str__(self):
- timeout_ = timeouts.TimeToDeadlineTimeout(1)
- assert str(timeout_) == "<TimeToDeadlineTimeout timeout=1.0>"
-
- def test_apply(self):
- target = mock.Mock(spec=["__call__", "__name__"], __name__="target")
-
- datetime.datetime.now(tz=datetime.timezone.utc)
- datetime.timedelta(seconds=1)
-
- now = datetime.datetime.now(tz=datetime.timezone.utc)
-
- times = [
- now,
- now + datetime.timedelta(seconds=0.0009),
- now + datetime.timedelta(seconds=1),
- now + datetime.timedelta(seconds=39),
- now + datetime.timedelta(seconds=42),
- now + datetime.timedelta(seconds=43),
- ]
-
- def _clock():
- return times.pop(0)
-
- timeout_ = timeouts.TimeToDeadlineTimeout(42.0, _clock)
- wrapped = timeout_(target)
-
- wrapped()
- target.assert_called_with(timeout=42.0)
- wrapped()
- target.assert_called_with(timeout=41.0)
- wrapped()
- target.assert_called_with(timeout=3.0)
- wrapped()
- target.assert_called_with(timeout=42.0)
- wrapped()
- target.assert_called_with(timeout=42.0)
-
- def test_apply_no_timeout(self):
- target = mock.Mock(spec=["__call__", "__name__"], __name__="target")
-
- datetime.datetime.now(tz=datetime.timezone.utc)
- datetime.timedelta(seconds=1)
-
- now = datetime.datetime.now(tz=datetime.timezone.utc)
-
- times = [
- now,
- now + datetime.timedelta(seconds=0.0009),
- now + datetime.timedelta(seconds=1),
- now + datetime.timedelta(seconds=2),
- ]
-
- def _clock():
- return times.pop(0)
-
- timeout_ = timeouts.TimeToDeadlineTimeout(clock=_clock)
- wrapped = timeout_(target)
-
- wrapped()
- target.assert_called_with()
- wrapped()
- target.assert_called_with()
-
- def test_apply_passthrough(self):
- target = mock.Mock(spec=["__call__", "__name__"], __name__="target")
- timeout_ = timeouts.TimeToDeadlineTimeout(42.0)
- wrapped = timeout_(target)
-
- wrapped(1, 2, meep="moop")
-
- actual_arg_0 = target.call_args[0][0]
- actual_arg_1 = target.call_args[0][1]
- actual_arg_meep = target.call_args[1]["meep"]
- actual_arg_timeuut = target.call_args[1]["timeout"]
-
- assert actual_arg_0 == 1
- assert actual_arg_1 == 2
- assert actual_arg_meep == "moop"
- assert actual_arg_timeuut == pytest.approx(42.0, abs=0.02)
-
-
-class TestConstantTimeout(object):
- def test_constructor(self):
- timeout_ = timeouts.ConstantTimeout()
- assert timeout_._timeout is None
-
- def test_constructor_args(self):
- timeout_ = timeouts.ConstantTimeout(42.0)
- assert timeout_._timeout == 42.0
-
- def test___str__(self):
- timeout_ = timeouts.ConstantTimeout(1)
- assert str(timeout_) == "<ConstantTimeout timeout=1.0>"
-
- def test_apply(self):
- target = mock.Mock(spec=["__call__", "__name__"], __name__="target")
- timeout_ = timeouts.ConstantTimeout(42.0)
- wrapped = timeout_(target)
-
- wrapped()
-
- target.assert_called_once_with(timeout=42.0)
-
- def test_apply_passthrough(self):
- target = mock.Mock(spec=["__call__", "__name__"], __name__="target")
- timeout_ = timeouts.ConstantTimeout(42.0)
- wrapped = timeout_(target)
-
- wrapped(1, 2, meep="moop")
-
- target.assert_called_once_with(1, 2, meep="moop", timeout=42.0)
-
-
-class TestExponentialTimeout(object):
- def test_constructor(self):
- timeout_ = timeouts.ExponentialTimeout()
- assert timeout_._initial == timeouts._DEFAULT_INITIAL_TIMEOUT
- assert timeout_._maximum == timeouts._DEFAULT_MAXIMUM_TIMEOUT
- assert timeout_._multiplier == timeouts._DEFAULT_TIMEOUT_MULTIPLIER
- assert timeout_._deadline == timeouts._DEFAULT_DEADLINE
-
- def test_constructor_args(self):
- timeout_ = timeouts.ExponentialTimeout(1, 2, 3, 4)
- assert timeout_._initial == 1
- assert timeout_._maximum == 2
- assert timeout_._multiplier == 3
- assert timeout_._deadline == 4
-
- def test_with_timeout(self):
- original_timeout = timeouts.ExponentialTimeout()
- timeout_ = original_timeout.with_deadline(42)
- assert original_timeout is not timeout_
- assert timeout_._initial == timeouts._DEFAULT_INITIAL_TIMEOUT
- assert timeout_._maximum == timeouts._DEFAULT_MAXIMUM_TIMEOUT
- assert timeout_._multiplier == timeouts._DEFAULT_TIMEOUT_MULTIPLIER
- assert timeout_._deadline == 42
-
- def test___str__(self):
- timeout_ = timeouts.ExponentialTimeout(1, 2, 3, 4)
- assert str(timeout_) == (
- "<ExponentialTimeout initial=1.0, maximum=2.0, multiplier=3.0, "
- "deadline=4.0>"
- )
-
- def test_apply(self):
- target = mock.Mock(spec=["__call__", "__name__"], __name__="target")
- timeout_ = timeouts.ExponentialTimeout(1, 10, 2)
- wrapped = timeout_(target)
-
- wrapped()
- target.assert_called_with(timeout=1)
-
- wrapped()
- target.assert_called_with(timeout=2)
-
- wrapped()
- target.assert_called_with(timeout=4)
-
- def test_apply_passthrough(self):
- target = mock.Mock(spec=["__call__", "__name__"], __name__="target")
- timeout_ = timeouts.ExponentialTimeout(42.0, 100, 2)
- wrapped = timeout_(target)
-
- wrapped(1, 2, meep="moop")
-
- target.assert_called_once_with(1, 2, meep="moop", timeout=42.0)
diff --git a/tests/unit/test_universe.py b/tests/unit/test_universe.py
deleted file mode 100644
index 214e00a..0000000
--- a/tests/unit/test_universe.py
+++ /dev/null
@@ -1,63 +0,0 @@
-# Copyright 2024 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import pytest
-from google.api_core import universe
-
-
-class _Fake_Credentials:
- def __init__(self, universe_domain=None):
- if universe_domain:
- self.universe_domain = universe_domain
-
-
-def test_determine_domain():
- domain_client = "foo.com"
- domain_env = "bar.com"
-
- assert universe.determine_domain(domain_client, domain_env) == domain_client
- assert universe.determine_domain(None, domain_env) == domain_env
- assert universe.determine_domain(domain_client, None) == domain_client
- assert universe.determine_domain(None, None) == universe.DEFAULT_UNIVERSE
-
- with pytest.raises(universe.EmptyUniverseError):
- universe.determine_domain("", None)
-
- with pytest.raises(universe.EmptyUniverseError):
- universe.determine_domain(None, "")
-
-
-def test_compare_domains():
- fake_domain = "foo.com"
- another_fake_domain = "bar.com"
-
- assert universe.compare_domains(universe.DEFAULT_UNIVERSE, _Fake_Credentials())
- assert universe.compare_domains(fake_domain, _Fake_Credentials(fake_domain))
-
- with pytest.raises(universe.UniverseMismatchError) as excinfo:
- universe.compare_domains(
- universe.DEFAULT_UNIVERSE, _Fake_Credentials(fake_domain)
- )
- assert str(excinfo.value).find(universe.DEFAULT_UNIVERSE) >= 0
- assert str(excinfo.value).find(fake_domain) >= 0
-
- with pytest.raises(universe.UniverseMismatchError) as excinfo:
- universe.compare_domains(fake_domain, _Fake_Credentials())
- assert str(excinfo.value).find(fake_domain) >= 0
- assert str(excinfo.value).find(universe.DEFAULT_UNIVERSE) >= 0
-
- with pytest.raises(universe.UniverseMismatchError) as excinfo:
- universe.compare_domains(fake_domain, _Fake_Credentials(another_fake_domain))
- assert str(excinfo.value).find(fake_domain) >= 0
- assert str(excinfo.value).find(another_fake_domain) >= 0
diff --git a/tests/unit/test_version_header.py b/tests/unit/test_version_header.py
deleted file mode 100644
index ea7028e..0000000
--- a/tests/unit/test_version_header.py
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright 2024 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import pytest
-
-from google.api_core import version_header
-
-
-@pytest.mark.parametrize("version_identifier", ["some_value", ""])
-def test_to_api_version_header(version_identifier):
- value = version_header.to_api_version_header(version_identifier)
- assert value == (version_header.API_VERSION_METADATA_KEY, version_identifier)