diff --git a/.github/blunderbuss.yml b/.github/blunderbuss.yml
index bf218bbc3f9..f298284000e 100644
--- a/.github/blunderbuss.yml
+++ b/.github/blunderbuss.yml
@@ -16,12 +16,6 @@
# Updates should be made to both assign_issues_by & assign_prs_by sections
###
assign_issues_by:
- # DEE teams
- - labels:
- - "api: people-and-planet-ai"
- to:
- - davidcavazos
-
# AppEco teams
- labels:
- "api: cloudsql"
@@ -57,21 +51,6 @@ assign_issues_by:
to:
- GoogleCloudPlatform/api-bigquery
- # AppEco individuals
- - labels:
- - "api: aml-ai"
- to:
- - nickcook
- - labels:
- - "api: bigquery"
- to:
- - shollyman
- - labels:
- - "api: datascienceonramp"
- to:
- - leahecole
- - bradmiro
-
# Self-service teams
- labels:
- "api: asset"
@@ -120,27 +99,10 @@ assign_issues_by:
to:
- GoogleCloudPlatform/googleapi-dataplex
- # Self-service individuals
- - labels:
- - "api: auth"
- to:
- - arithmetic1728
- - labels:
- - "api: appengine"
- to:
- - jinglundong
-
-
###
# Updates should be made to both assign_issues_by & assign_prs_by sections
###
assign_prs_by:
- # DEE teams
- - labels:
- - "api: people-and-planet-ai"
- to:
- - davidcavazos
-
# AppEco teams
- labels:
- "api: cloudsql"
@@ -170,17 +132,6 @@ assign_prs_by:
to:
- GoogleCloudPlatform/cloud-dpes-composer
- # AppEco individuals
- - labels:
- - "api: bigquery"
- to:
- - shollyman
- - labels:
- - "api: datascienceonramp"
- to:
- - leahecole
- - bradmiro
-
# Self-service teams
- labels:
- "api: asset"
@@ -235,16 +186,3 @@ assign_prs_by:
- "api: connectgateway"
to:
- GoogleCloudPlatform/connectgateway
- # Self-service individuals
- - labels:
- - "api: auth"
- to:
- - arithmetic1728
- - labels:
- - "api: appengine"
- to:
- - jinglundong
-
-###
-# Updates should be made to both assign_issues_by & assign_prs_by sections
-###
diff --git a/.github/snippet-bot.yml b/.github/snippet-bot.yml
index 88aa1e8fae4..14e8ba1a64c 100644
--- a/.github/snippet-bot.yml
+++ b/.github/snippet-bot.yml
@@ -1,4 +1,5 @@
aggregateChecks: true
alwaysCreateStatusCheck: true
ignoreFiles:
- - README.md
+ - "README.md"
+ - "AUTHORING_GUIDE.md"
diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml
index bf76c480c47..1403afe718c 100644
--- a/.github/sync-repo-settings.yaml
+++ b/.github/sync-repo-settings.yaml
@@ -43,7 +43,6 @@ branchProtectionRules:
# List of required status check contexts that must pass for commits to be accepted to matching branches.
requiredStatusCheckContexts:
- "Kokoro CI - Lint"
- - "Kokoro CI - Python 2.7 (App Engine Standard Only)"
- "Kokoro CI - Python 3.9"
- "Kokoro CI - Python 3.13"
- "cla/google"
diff --git a/.gitignore b/.gitignore
index bcb6b89f6ff..80cf8846a58 100644
--- a/.gitignore
+++ b/.gitignore
@@ -30,4 +30,8 @@ env/
.idea
.env*
**/venv
-**/noxfile.py
\ No newline at end of file
+**/noxfile.py
+
+# Auth Local secrets file
+auth/custom-credentials/okta/custom-credentials-okta-secrets.json
+auth/custom-credentials/aws/custom-credentials-aws-secrets.json
diff --git a/.kokoro/docker/Dockerfile b/.kokoro/docker/Dockerfile
index ba9af12a933..c37e7f091e2 100644
--- a/.kokoro/docker/Dockerfile
+++ b/.kokoro/docker/Dockerfile
@@ -110,33 +110,68 @@ RUN curl https://packages.microsoft.com/keys/microsoft.asc | apt-key add - \
&& rm -rf /var/lib/apt/lists/* \
&& rm -f /var/cache/apt/archives/*.deb
-COPY fetch_gpg_keys.sh /tmp
-# Install the desired versions of Python.
-RUN set -ex \
- && export GNUPGHOME="$(mktemp -d)" \
- && echo "disable-ipv6" >> "${GNUPGHOME}/dirmngr.conf" \
- && /tmp/fetch_gpg_keys.sh \
- && for PYTHON_VERSION in 2.7.18 3.7.17 3.8.20 3.9.20 3.10.15 3.11.10 3.12.7 3.13.0; do \
- wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz" \
- && wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz.asc "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz.asc" \
- && gpg --batch --verify python-${PYTHON_VERSION}.tar.xz.asc python-${PYTHON_VERSION}.tar.xz \
- && rm -r python-${PYTHON_VERSION}.tar.xz.asc \
- && mkdir -p /usr/src/python-${PYTHON_VERSION} \
- && tar -xJC /usr/src/python-${PYTHON_VERSION} --strip-components=1 -f python-${PYTHON_VERSION}.tar.xz \
- && rm python-${PYTHON_VERSION}.tar.xz \
- && cd /usr/src/python-${PYTHON_VERSION} \
- && ./configure \
- --enable-shared \
- # This works only on Python 2.7 and throws a warning on every other
- # version, but seems otherwise harmless.
- --enable-unicode=ucs4 \
- --with-system-ffi \
- --without-ensurepip \
- && make -j$(nproc) \
- && make install \
- && ldconfig \
+# From https://www.python.org/downloads/metadata/sigstore/
+# Starting with Python 3.14, Sigstore is the only method of signing and verification of release artifacts.
+RUN LATEST_VERSION="2.6.1" && \
+ wget "https://github.com/sigstore/cosign/releases/download/v${LATEST_VERSION}/cosign_${LATEST_VERSION}_amd64.deb" && \
+ dpkg -i cosign_${LATEST_VERSION}_amd64.deb && \
+ rm cosign_${LATEST_VERSION}_amd64.deb
+
+ARG PYTHON_VERSIONS="3.7.17 3.8.20 3.9.23 3.10.18 3.11.13 3.12.11 3.13.8 3.14.0"
+
+SHELL ["/bin/bash", "-c"]
+
+RUN set -eux; \
+ # Define the required associative arrays completely.
+ declare -A PYTHON_IDENTITIES; \
+ PYTHON_IDENTITIES=(\
+ [3.7]="nad@python.org" \
+ [3.8]="lukasz@langa.pl" \
+ [3.9]="lukasz@langa.pl" \
+ [3.10]="pablogsal@python.org" \
+ [3.11]="pablogsal@python.org" \
+ [3.12]="thomas@python.org" \
+ [3.13]="thomas@python.org" \
+ [3.14]="hugo@python.org" \
+ ); \
+ declare -A PYTHON_ISSUERS; \
+ PYTHON_ISSUERS=(\
+ [3.7]="https://github.com/login/oauth" \
+ [3.8]="https://github.com/login/oauth" \
+ [3.9]="https://github.com/login/oauth" \
+ [3.10]="https://accounts.google.com" \
+ [3.11]="https://accounts.google.com" \
+ [3.12]="https://accounts.google.com" \
+ [3.13]="https://accounts.google.com" \
+ [3.14]="https://github.com/login/oauth" \
+ ); \
+ \
+ for VERSION in $PYTHON_VERSIONS; do \
+ # 1. Define VERSION_GROUP (e.g., 3.14 from 3.14.0)
+ VERSION_GROUP="$(echo "${VERSION}" | cut -d . -f 1,2)"; \
+ \
+ # 2. Look up IDENTITY and ISSUER using the defined VERSION_GROUP
+ IDENTITY="${PYTHON_IDENTITIES[$VERSION_GROUP]}"; \
+ ISSUER="${PYTHON_ISSUERS[$VERSION_GROUP]}"; \
+ \
+ wget --quiet -O python-${VERSION}.tar.xz "https://www.python.org/ftp/python/${VERSION}/Python-$VERSION.tar.xz" \
+ && wget --quiet -O python-${VERSION}.tar.xz.sigstore "https://www.python.org/ftp/python/${VERSION}/Python-$VERSION.tar.xz.sigstore" \
+ # Verify the Python tarball signature with cosign.
+ && cosign verify-blob python-${VERSION}.tar.xz \
+ --certificate-oidc-issuer "${ISSUER}" \
+ --certificate-identity "${IDENTITY}" \
+ --bundle python-${VERSION}.tar.xz.sigstore \
+ && mkdir -p /usr/src/python-${VERSION} \
+ && tar -xJC /usr/src/python-${VERSION} --strip-components=1 -f python-${VERSION}.tar.xz \
+ && rm python-${VERSION}.tar.xz \
+ && cd /usr/src/python-${VERSION} \
+ && ./configure \
+ --enable-shared \
+ --with-system-ffi \
+ && make -j$(nproc) \
+ && make install \
+ && ldconfig \
; done \
- && rm -rf "${GNUPGHOME}" \
&& rm -rf /usr/src/python* \
&& rm -rf ~/.cache/
@@ -158,6 +193,7 @@ RUN wget --no-check-certificate -O /tmp/get-pip-3-7.py 'https://bootstrap.pypa.i
&& [ "$(pip list |tac|tac| awk -F '[ ()]+' '$1 == "pip" { print $2; exit }')" = "$PYTHON_PIP_VERSION" ]
# Ensure Pip for all python3 versions
+RUN python3.14 /tmp/get-pip.py
RUN python3.13 /tmp/get-pip.py
RUN python3.12 /tmp/get-pip.py
RUN python3.11 /tmp/get-pip.py
@@ -175,6 +211,7 @@ RUN python3.10 -m pip
RUN python3.11 -m pip
RUN python3.12 -m pip
RUN python3.13 -m pip
+RUN python3.14 -m pip
# Install "setuptools" for Python 3.12+ (see https://docs.python.org/3/whatsnew/3.12.html#distutils)
RUN python3.12 -m pip install --no-cache-dir setuptools
diff --git a/.kokoro/docker/fetch_gpg_keys.sh b/.kokoro/docker/fetch_gpg_keys.sh
deleted file mode 100755
index 5b8dbbab1ed..00000000000
--- a/.kokoro/docker/fetch_gpg_keys.sh
+++ /dev/null
@@ -1,57 +0,0 @@
-#!/bin/bash
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# A script to fetch gpg keys with retry.
-
-function retry {
- if [[ "${#}" -le 1 ]]; then
- echo "Usage: ${0} retry_count commands.."
- exit 1
- fi
- local retries=${1}
- local command="${@:2}"
- until [[ "${retries}" -le 0 ]]; do
- $command && return 0
- if [[ $? -ne 0 ]]; then
- echo "command failed, retrying"
- ((retries--))
- fi
- done
- return 1
-}
-
-# 2.7.17 (Benjamin Peterson)
-retry 3 gpg --keyserver keyserver.ubuntu.com --recv-keys \
- C01E1CAD5EA2C4F0B8E3571504C367C218ADD4FF
-
-# 3.4.10, 3.5.9 (Larry Hastings)
-retry 3 gpg --keyserver keyserver.ubuntu.com --recv-keys \
- 97FC712E4C024BBEA48A61ED3A5CA953F73C700D
-
-# 3.6.9, 3.7.5 (Ned Deily)
-retry 3 gpg --keyserver keyserver.ubuntu.com --recv-keys \
- 0D96DF4D4110E5C43FBFB17F2D347EA6AA65421D
-
-# 3.8.0, 3.9.0 (Łukasz Langa)
-retry 3 gpg --keyserver keyserver.ubuntu.com --recv-keys \
- E3FF2839C048B25C084DEBE9B26995E310250568
-
-# 3.10.x and 3.11.x (Pablo Galindo Salgado)
-retry 3 gpg --keyserver keyserver.ubuntu.com --recv-keys \
- A035C8C19219BA821ECEA86B64E628F8D684696D
-
-# 3.12.x and 3.13.x source files and tags (Thomas Wouters)
-retry 3 gpg --keyserver keyserver.ubuntu.com --recv-keys \
- A821E680E5FA6305
\ No newline at end of file
diff --git a/.kokoro/python3.10/periodic.cfg b/.kokoro/python3.10/periodic.cfg
index 095f5fde9ae..2aad97c46ad 100644
--- a/.kokoro/python3.10/periodic.cfg
+++ b/.kokoro/python3.10/periodic.cfg
@@ -20,11 +20,6 @@ env_vars: {
value: ".kokoro/tests/run_tests.sh"
}
-env_vars: {
- key: "REPORT_TO_BUILD_COP_BOT"
- value: "false"
-}
-
# Tell Trampoline to upload the Docker image after successfull build.
env_vars: {
key: "TRAMPOLINE_IMAGE_UPLOAD"
diff --git a/.kokoro/python3.11/periodic.cfg b/.kokoro/python3.11/periodic.cfg
index 2c6918c02a8..22df60eae56 100644
--- a/.kokoro/python3.11/periodic.cfg
+++ b/.kokoro/python3.11/periodic.cfg
@@ -20,11 +20,6 @@ env_vars: {
value: ".kokoro/tests/run_tests.sh"
}
-env_vars: {
- key: "REPORT_TO_BUILD_COP_BOT"
- value: "false"
-}
-
# Tell Trampoline to upload the Docker image after successfull build.
env_vars: {
key: "TRAMPOLINE_IMAGE_UPLOAD"
diff --git a/.kokoro/python3.12/periodic.cfg b/.kokoro/python3.12/periodic.cfg
index 2c6918c02a8..22df60eae56 100644
--- a/.kokoro/python3.12/periodic.cfg
+++ b/.kokoro/python3.12/periodic.cfg
@@ -20,11 +20,6 @@ env_vars: {
value: ".kokoro/tests/run_tests.sh"
}
-env_vars: {
- key: "REPORT_TO_BUILD_COP_BOT"
- value: "false"
-}
-
# Tell Trampoline to upload the Docker image after successfull build.
env_vars: {
key: "TRAMPOLINE_IMAGE_UPLOAD"
diff --git a/.kokoro/python3.13/periodic.cfg b/.kokoro/python3.13/periodic.cfg
index fd4d6e8dcd5..3ba78a1ab92 100644
--- a/.kokoro/python3.13/periodic.cfg
+++ b/.kokoro/python3.13/periodic.cfg
@@ -20,11 +20,6 @@ env_vars: {
value: ".kokoro/tests/run_tests.sh"
}
-env_vars: {
- key: "REPORT_TO_BUILD_COP_BOT"
- value: "false"
-}
-
# Tell Trampoline to upload the Docker image after successfull build.
env_vars: {
key: "TRAMPOLINE_IMAGE_UPLOAD"
diff --git a/.kokoro/python2.7/common.cfg b/.kokoro/python3.14/common.cfg
similarity index 89%
rename from .kokoro/python2.7/common.cfg
rename to .kokoro/python3.14/common.cfg
index ad2c8f64523..8d12e9ed952 100644
--- a/.kokoro/python2.7/common.cfg
+++ b/.kokoro/python3.14/common.cfg
@@ -1,4 +1,4 @@
-# Copyright 2019 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -44,11 +44,16 @@ action {
# Specify which tests to run
env_vars: {
key: "RUN_TESTS_SESSION"
- value: "py-2.7"
+ value: "py-3.14"
}
-# Declare build specific Cloud project.
env_vars: {
key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
- value: "python-docs-samples-tests"
+ value: "python-docs-samples-tests-314"
+}
+
+# Number of test workers.
+env_vars: {
+ key: "NUM_TEST_WORKERS"
+ value: "10"
}
diff --git a/.kokoro/python2.7/continuous.cfg b/.kokoro/python3.14/continuous.cfg
similarity index 96%
rename from .kokoro/python2.7/continuous.cfg
rename to .kokoro/python3.14/continuous.cfg
index cfbe29058c8..5753c38482a 100644
--- a/.kokoro/python2.7/continuous.cfg
+++ b/.kokoro/python3.14/continuous.cfg
@@ -1,4 +1,4 @@
-# Copyright 2020 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/.kokoro/python2.7/periodic.cfg b/.kokoro/python3.14/periodic.cfg
similarity index 83%
rename from .kokoro/python2.7/periodic.cfg
rename to .kokoro/python3.14/periodic.cfg
index 2f3556908d3..8a14abb05ef 100644
--- a/.kokoro/python2.7/periodic.cfg
+++ b/.kokoro/python3.14/periodic.cfg
@@ -1,4 +1,4 @@
-# Copyright 2020 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,7 +20,8 @@ env_vars: {
value: ".kokoro/tests/run_tests.sh"
}
+# Tell Trampoline to upload the Docker image after successfull build.
env_vars: {
- key: "REPORT_TO_BUILD_COP_BOT"
- value: "false"
+ key: "TRAMPOLINE_IMAGE_UPLOAD"
+ value: "true"
}
diff --git a/.kokoro/python2.7/presubmit.cfg b/.kokoro/python3.14/presubmit.cfg
similarity index 96%
rename from .kokoro/python2.7/presubmit.cfg
rename to .kokoro/python3.14/presubmit.cfg
index d74d307bbed..b8ecd3b0d15 100644
--- a/.kokoro/python2.7/presubmit.cfg
+++ b/.kokoro/python3.14/presubmit.cfg
@@ -1,4 +1,4 @@
-# Copyright 2019 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/.kokoro/python3.8/periodic.cfg b/.kokoro/python3.8/periodic.cfg
index 5aff64926c5..3c5ea1d2f14 100644
--- a/.kokoro/python3.8/periodic.cfg
+++ b/.kokoro/python3.8/periodic.cfg
@@ -20,11 +20,6 @@ env_vars: {
value: ".kokoro/tests/run_tests.sh"
}
-env_vars: {
- key: "REPORT_TO_BUILD_COP_BOT"
- value: "false"
-}
-
# Tell Trampoline to upload the Docker image after successfull build.
env_vars: {
key: "TRAMPOLINE_IMAGE_UPLOAD"
diff --git a/.kokoro/python3.9/periodic.cfg b/.kokoro/python3.9/periodic.cfg
index 5aff64926c5..3c5ea1d2f14 100644
--- a/.kokoro/python3.9/periodic.cfg
+++ b/.kokoro/python3.9/periodic.cfg
@@ -20,11 +20,6 @@ env_vars: {
value: ".kokoro/tests/run_tests.sh"
}
-env_vars: {
- key: "REPORT_TO_BUILD_COP_BOT"
- value: "false"
-}
-
# Tell Trampoline to upload the Docker image after successfull build.
env_vars: {
key: "TRAMPOLINE_IMAGE_UPLOAD"
diff --git a/.kokoro/tests/run_single_test.sh b/.kokoro/tests/run_single_test.sh
index e7730f6f550..2119805bdc5 100755
--- a/.kokoro/tests/run_single_test.sh
+++ b/.kokoro/tests/run_single_test.sh
@@ -90,15 +90,6 @@ if [[ "${INJECT_REGION_TAGS:-}" == "true" ]]; then
fi
set -e
-# If REPORT_TO_BUILD_COP_BOT is set to "true", send the test log
-# to the FlakyBot.
-# See:
-# https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot.
-if [[ "${REPORT_TO_BUILD_COP_BOT:-}" == "true" ]]; then
- chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
- $KOKORO_GFILE_DIR/linux_amd64/flakybot
-fi
-
if [[ "${EXIT}" -ne 0 ]]; then
echo -e "\n Testing failed: Nox returned a non-zero exit code. \n"
else
diff --git a/.kokoro/tests/run_tests.sh b/.kokoro/tests/run_tests.sh
index 1715decdce7..191b40b09e0 100755
--- a/.kokoro/tests/run_tests.sh
+++ b/.kokoro/tests/run_tests.sh
@@ -58,7 +58,7 @@ if [[ $* == *--only-diff-head* ]]; then
fi
fi
-# Because Kokoro runs presubmit builds simalteneously, we often see
+# Because Kokoro runs presubmit builds simultaneously, we often see
# quota related errors. I think we can avoid this by changing the
# order of tests to execute (e.g. reverse order for py-3.8
# build). Currently there's no easy way to do that with btlr, so we
diff --git a/.kokoro/tests/run_tests_orig.sh b/.kokoro/tests/run_tests_orig.sh
index b641d00495f..dc954fd13bd 100755
--- a/.kokoro/tests/run_tests_orig.sh
+++ b/.kokoro/tests/run_tests_orig.sh
@@ -176,15 +176,6 @@ for file in **/requirements.txt; do
nox -s "$RUN_TESTS_SESSION"
EXIT=$?
- # If REPORT_TO_BUILD_COP_BOT is set to "true", send the test log
- # to the FlakyBot.
- # See:
- # https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot.
- if [[ "${REPORT_TO_BUILD_COP_BOT:-}" == "true" ]]; then
- chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
- $KOKORO_GFILE_DIR/linux_amd64/flakybot
- fi
-
if [[ $EXIT -ne 0 ]]; then
RTN=1
echo -e "\n Testing failed: Nox returned a non-zero exit code. \n"
diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh
index b0334486492..d9031cfd6fa 100755
--- a/.kokoro/trampoline_v2.sh
+++ b/.kokoro/trampoline_v2.sh
@@ -159,9 +159,6 @@ if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then
"KOKORO_GITHUB_COMMIT"
"KOKORO_GITHUB_PULL_REQUEST_NUMBER"
"KOKORO_GITHUB_PULL_REQUEST_COMMIT"
- # For FlakyBot
- "KOKORO_GITHUB_COMMIT_URL"
- "KOKORO_GITHUB_PULL_REQUEST_URL"
)
elif [[ "${TRAVIS:-}" == "true" ]]; then
RUNNING_IN_CI="true"
diff --git a/.trampolinerc b/.trampolinerc
index e9ed9bbb060..ea532d7ea51 100644
--- a/.trampolinerc
+++ b/.trampolinerc
@@ -24,7 +24,6 @@ required_envvars+=(
pass_down_envvars+=(
"BUILD_SPECIFIC_GCLOUD_PROJECT"
- "REPORT_TO_BUILD_COP_BOT"
"INJECT_REGION_TAGS"
# Target directories.
"RUN_TESTS_DIRS"
diff --git a/AUTHORING_GUIDE.md b/AUTHORING_GUIDE.md
index 42b9545ceac..6ae8d0a0372 100644
--- a/AUTHORING_GUIDE.md
+++ b/AUTHORING_GUIDE.md
@@ -68,7 +68,7 @@ We recommend using the Python version management tool
[Pyenv](https://github.com/pyenv/pyenv) if you are using MacOS or Linux.
**Googlers:** See [the internal Python policies
-doc](https://g3doc.corp.google.com/company/teams/cloud-devrel/dpe/samples/python.md?cl=head).
+doc](go/cloudsamples/language-guides/python).
**Using MacOS?:** See [Setting up a Mac development environment with pyenv and
pyenv-virtualenv](MAC_SETUP.md).
@@ -82,10 +82,6 @@ Guidelines](#testing-guidelines) are covered separately below.
### Folder Location
-Samples that primarily show the use of one client library should be placed in
-the client library repository `googleapis/python-{api}`. Other samples should be
-placed in this repository `python-docs-samples`.
-
**Library repositories:** Each sample should be in a folder under the top-level
samples folder `samples` in the client library repository. See the
[Text-to-Speech
@@ -108,12 +104,6 @@ folder, and App Engine Flex samples are under the
[appengine/flexible](https://github.com/GoogleCloudPlatform/python-docs-samples/tree/main/appengine/flexible)
folder.
-If your sample is a set of discrete code snippets that each demonstrate a single
-operation, these should be grouped into a `snippets` folder. For example, see
-the snippets in the
-[bigtable/snippets/writes](https://github.com/googleapis/python-bigtable/tree/main/samples/snippets/writes)
-folder.
-
If your sample is a quickstart — intended to demonstrate how to quickly get
started with using a service or API — it should be in a _quickstart_ folder.
@@ -274,11 +264,12 @@ task_from_dict = {
### Functions and Classes
-Very few samples will require authoring classes. Prefer functions whenever
-possible. See [this video](https://www.youtube.com/watch?v=o9pEzgHorH0) for some
-insight into why classes aren't as necessary as you might think in Python.
-Classes also introduce cognitive load. If you do write a class in a sample, be
-prepared to justify its existence during code review.
+Prefer functions over classes whenever possible.
+
+See [this video](https://www.youtube.com/watch?v=o9pEzgHorH0) for some
+hints into practical refactoring examples where simpler functions lead to more
+readable and maintainable code.
+
#### Descriptive function names
@@ -456,17 +447,33 @@ git+https://github.com/googleapis/python-firestore.git@ee518b741eb5d7167393c23ba
### Region Tags
-Sample code may be integrated into Google Cloud Documentation through the use of
-region tags, which are comments added to the source code to identify code blocks
-that correspond to specific topics covered in the documentation. For example,
-see [this
-sample](https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/cloud-sql/mysql/sqlalchemy/main.py)
-— the region tags are the comments that begin with `[START` or `[END`.
-
-The use of region tags is beyond the scope of this document, but if you’re using
-region tags they should start after the source code header (license/copyright
-information), but before imports and global configuration such as initializing
-constants.
+Region tags are comments added to the source code that begin with
+`[START region_tag]` and end with `[END region_tag]`. They enclose
+the core sample logic that can be easily copied into a REPL and run.
+
+This allows us to integrate this copy-paste callable code into
+documentation directly. Region tags should be placed after the
+license header but before imports that are crucial to the
+sample running.
+
+Example:
+```python
+# This import is not included within the region tag as
+# it is used to make the sample command-line runnable
+import sys
+
+# [START example_storage_control_create_folder]
+# This import is included within the region tag
+# as it is critical to understanding the sample
+from google.cloud import storage_control_v2
+
+
+def create_folder(bucket_name: str, folder_name: str) -> None:
+ print(f"Created folder: {response.name}")
+
+
+# [END example_storage_control_create_folder]
+```
### Exception Handling
diff --git a/README.md b/README.md
index e699be6032e..398102e8902 100644
--- a/README.md
+++ b/README.md
@@ -2,8 +2,6 @@
Python samples for [Google Cloud Platform products][cloud].
-[![Build Status][py-2.7-shield]][py-2.7-link] [![Build Status][py-3.9-shield]][py-3.9-link] [![Build Status][py-3.10-shield]][py-3.10-link] [![Build Status][py-3.11-shield]][py-3.11-link] [![Build Status][py-3.12-shield]][py-3.12-link] [![Build Status][py-3.13-shield]][py-3.13-link]
-
## Google Cloud Samples
Check out some of the samples found on this repository on the [Google Cloud Samples](https://cloud.google.com/docs/samples?l=python) page.
@@ -66,16 +64,3 @@ Contributions welcome! See the [Contributing Guide](CONTRIBUTING.md).
[cloud_python_setup]: https://cloud.google.com/python/setup
[auth_command]: https://cloud.google.com/sdk/gcloud/reference/beta/auth/application-default/login
[gcp_auth]: https://cloud.google.com/docs/authentication#projects_and_resources
-
-[py-2.7-shield]: https://storage.googleapis.com/cloud-devrel-public/python-docs-samples/badges/py-2.7.svg
-[py-2.7-link]: https://storage.googleapis.com/cloud-devrel-public/python-docs-samples/badges/py-2.7.html
-[py-3.9-shield]: https://storage.googleapis.com/cloud-devrel-public/python-docs-samples/badges/py-3.9.svg
-[py-3.9-link]: https://storage.googleapis.com/cloud-devrel-public/python-docs-samples/badges/py-3.9.html
-[py-3.10-shield]: https://storage.googleapis.com/cloud-devrel-public/python-docs-samples/badges/py-310.svg
-[py-3.10-link]: https://storage.googleapis.com/cloud-devrel-public/python-docs-samples/badges/py-3.10.html
-[py-3.11-shield]: https://storage.googleapis.com/cloud-devrel-public/python-docs-samples/badges/py-311.svg
-[py-3.11-link]: https://storage.googleapis.com/cloud-devrel-public/python-docs-samples/badges/py-3.11.html
-[py-3.12-shield]: https://storage.googleapis.com/cloud-devrel-public/python-docs-samples/badges/py-3.12.svg
-[py-3.12-link]: https://storage.googleapis.com/cloud-devrel-public/python-docs-samples/badges/py-3.12.html
-[py-3.13-shield]: https://storage.googleapis.com/cloud-devrel-public/python-docs-samples/badges/py-3.13.svg
-[py-3.13-link]: https://storage.googleapis.com/cloud-devrel-public/python-docs-samples/badges/py-3.13.html
diff --git a/alloydb/notebooks/embeddings_batch_processing.ipynb b/alloydb/notebooks/embeddings_batch_processing.ipynb
index 794b8032e8b..862656f1c7a 100644
--- a/alloydb/notebooks/embeddings_batch_processing.ipynb
+++ b/alloydb/notebooks/embeddings_batch_processing.ipynb
@@ -31,7 +31,7 @@
"source": [
"# Generate and store embeddings with batch processing\n",
"\n",
- "[](https://colab.research.google.com/github/GoogleCloudPlatform/python-docs-samples/blob/main/alloydb/notebooks/generate_batch_embeddings.ipynb)\n",
+ "[](https://colab.research.google.com/github/GoogleCloudPlatform/python-docs-samples/blob/main/alloydb/notebooks/embeddings_batch_processing.ipynb)\n",
"\n",
"---\n",
"## Introduction\n",
@@ -358,7 +358,7 @@
"source": [
"### Create a Database\n",
"\n",
- "Nex, you will create database to store the data using the connection pool. Enabling public IP takes a few minutes, you may get an error that there is no public IP address. Please wait and retry this step if you hit an error!"
+ "Next, you will create a database to store the data using the connection pool. Enabling public IP takes a few minutes, you may get an error that there is no public IP address. Please wait and retry this step if you hit an error!"
]
},
{
diff --git a/appengine/flexible/README.md b/appengine/flexible/README.md
index 0cc851a437e..8f6a03a894f 100644
--- a/appengine/flexible/README.md
+++ b/appengine/flexible/README.md
@@ -7,8 +7,6 @@
These are samples for using Python on Google App Engine Flexible Environment. These samples are typically referenced from the [docs](https://cloud.google.com/appengine/docs).
-For code samples of Python version 3.7 and earlier, please check
-https://github.com/GoogleCloudPlatform/python-docs-samples/tree/main/appengine/flexible_python37_and_earlier
See our other [Google Cloud Platform github repos](https://github.com/GoogleCloudPlatform) for sample applications and
scaffolding for other frameworks and use cases.
diff --git a/appengine/flexible/django_cloudsql/noxfile_config.py b/appengine/flexible/django_cloudsql/noxfile_config.py
index 30010ba672d..60e19bd8a96 100644
--- a/appengine/flexible/django_cloudsql/noxfile_config.py
+++ b/appengine/flexible/django_cloudsql/noxfile_config.py
@@ -22,7 +22,7 @@
TEST_CONFIG_OVERRIDE = {
# You can opt out from the test for specific Python versions.
- "ignored_versions": ["2.7", "3.7", "3.8", "3.10", "3.12", "3.13"],
+ "ignored_versions": ["2.7", "3.7", "3.8", "3.9", "3.10", "3.11"],
# An envvar key for determining the project id to use. Change it
# to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
# build specific Cloud project. You can also use your own string
diff --git a/appengine/flexible/django_cloudsql/requirements.txt b/appengine/flexible/django_cloudsql/requirements.txt
index 1cca009774e..da90b09edaa 100644
--- a/appengine/flexible/django_cloudsql/requirements.txt
+++ b/appengine/flexible/django_cloudsql/requirements.txt
@@ -1,6 +1,6 @@
-Django==5.2.3
+Django==6.0.1; python_version >= "3.12"
gunicorn==23.0.0
-psycopg2-binary==2.9.10
+psycopg2-binary==2.9.11
django-environ==0.12.0
google-cloud-secret-manager==2.21.1
-django-storages[google]==1.14.5
+django-storages[google]==1.14.6
diff --git a/appengine/flexible/hello_world/app.yaml b/appengine/flexible/hello_world/app.yaml
index ac38af83425..8a9b1e1763b 100644
--- a/appengine/flexible/hello_world/app.yaml
+++ b/appengine/flexible/hello_world/app.yaml
@@ -17,7 +17,8 @@ env: flex
entrypoint: gunicorn -b :$PORT main:app
runtime_config:
- operating_system: ubuntu22
+ operating_system: ubuntu24
+ runtime_version: 3.12
# This sample incurs costs to run on the App Engine flexible environment.
# The settings below are to reduce costs during testing and are not appropriate
diff --git a/appengine/flexible/hello_world/requirements.txt b/appengine/flexible/hello_world/requirements.txt
index 068ea0acdfc..bdb61ec2417 100644
--- a/appengine/flexible/hello_world/requirements.txt
+++ b/appengine/flexible/hello_world/requirements.txt
@@ -1,5 +1,2 @@
-Flask==3.0.3; python_version > '3.6'
-Flask==2.3.3; python_version < '3.7'
-Werkzeug==3.0.3; python_version > '3.6'
-Werkzeug==2.3.8; python_version < '3.7'
-gunicorn==23.0.0
\ No newline at end of file
+Flask==3.0.3
+gunicorn==22.0.0
diff --git a/appengine/flexible/hello_world_django/app.yaml b/appengine/flexible/hello_world_django/app.yaml
index 62b74a9c27e..85096c4adc4 100644
--- a/appengine/flexible/hello_world_django/app.yaml
+++ b/appengine/flexible/hello_world_django/app.yaml
@@ -17,4 +17,4 @@ env: flex
entrypoint: gunicorn -b :$PORT project_name.wsgi
runtime_config:
- python_version: 3
+ operating_system: "ubuntu24"
diff --git a/appengine/flexible/hello_world_django/noxfile_config.py b/appengine/flexible/hello_world_django/noxfile_config.py
index 196376e7023..692b834f789 100644
--- a/appengine/flexible/hello_world_django/noxfile_config.py
+++ b/appengine/flexible/hello_world_django/noxfile_config.py
@@ -22,7 +22,7 @@
TEST_CONFIG_OVERRIDE = {
# You can opt out from the test for specific Python versions.
- "ignored_versions": ["2.7", "3.7"],
+ "ignored_versions": ["2.7", "3.7", "3.8", "3.9", "3.10", "3.11"],
# Old samples are opted out of enforcing Python type hints
# All new samples should feature them
"enforce_type_hints": False,
diff --git a/appengine/flexible/hello_world_django/project_name/settings.py b/appengine/flexible/hello_world_django/project_name/settings.py
index f8b93099d56..bd094b5f576 100644
--- a/appengine/flexible/hello_world_django/project_name/settings.py
+++ b/appengine/flexible/hello_world_django/project_name/settings.py
@@ -114,3 +114,5 @@
# https://docs.djangoproject.com/en/stable/howto/static-files/
STATIC_URL = "/static/"
+
+STATIC_ROOT = os.path.join(BASE_DIR, 'static')
diff --git a/appengine/flexible/hello_world_django/project_name/urls.py b/appengine/flexible/hello_world_django/project_name/urls.py
index 9a393bb42d2..7d3a1e0f315 100644
--- a/appengine/flexible/hello_world_django/project_name/urls.py
+++ b/appengine/flexible/hello_world_django/project_name/urls.py
@@ -13,12 +13,12 @@
# limitations under the License.
from django.contrib import admin
-from django.urls import include, path
+from django.urls import path
import helloworld.views
urlpatterns = [
- path("admin/", include(admin.site.urls)),
+ path("admin/", admin.site.urls),
path("", helloworld.views.index),
]
diff --git a/appengine/flexible/hello_world_django/requirements.txt b/appengine/flexible/hello_world_django/requirements.txt
index b1ec55c859c..a7f029a554d 100644
--- a/appengine/flexible/hello_world_django/requirements.txt
+++ b/appengine/flexible/hello_world_django/requirements.txt
@@ -1,2 +1,2 @@
-Django==5.2.3
+Django==6.0.1; python_version >= "3.12"
gunicorn==23.0.0
diff --git a/appengine/flexible_python37_and_earlier/README.md b/appengine/flexible_python37_and_earlier/README.md
deleted file mode 100644
index 41927a35c3d..00000000000
--- a/appengine/flexible_python37_and_earlier/README.md
+++ /dev/null
@@ -1,70 +0,0 @@
-## Google App Engine Flexible Environment Python Samples
-
-[![Open in Cloud Shell][shell_img]][shell_link]
-
-[shell_img]: http://gstatic.com/cloudssh/images/open-btn.png
-[shell_link]: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=appengine/flexible_python37_and_earlier/README.md
-
-These are samples for using Python on Google App Engine Flexible Environment. These samples are typically referenced from the [docs](https://cloud.google.com/appengine/docs).
-
-See our other [Google Cloud Platform github repos](https://github.com/GoogleCloudPlatform) for sample applications and
-scaffolding for other frameworks and use cases.
-
-## Run Locally
-
-Some samples have specific instructions. If there is a README in the sample folder, please refer to it for any additional steps required to run the sample.
-
-In general, the samples typically require:
-
-1. Install the [Google Cloud SDK](https://cloud.google.com/sdk/), including the [gcloud tool](https://cloud.google.com/sdk/gcloud/), and [gcloud app component](https://cloud.google.com/sdk/gcloud-app).
-
-2. Setup the gcloud tool. This provides authentication to Google Cloud APIs and services.
-
- ```
- gcloud init
- ```
-
-3. Clone this repo.
-
- ```
- git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git
- cd python-docs-samples/appengine/flexible_python37_and_earlier
- ```
-
-4. Follow https://cloud.google.com/python/docs/setup to set up a Python development environment. Then run:
-
- ```
- pip install -r requirements.txt
- python main.py
- ```
-
-5. Visit the application at [http://localhost:8080](http://localhost:8080).
-
-
-## Deploying
-
-Some samples in this repositories may have special deployment instructions. Refer to the readme in the sample directory.
-
-1. Use the [Google Developers Console](https://console.developer.google.com) to create a project/app id. (App id and project id are identical)
-
-2. Setup the gcloud tool, if you haven't already.
-
- ```
- gcloud init
- ```
-
-3. Use gcloud to deploy your app.
-
- ```
- gcloud app deploy
- ```
-
-4. Congratulations! Your application is now live at `your-app-id.appspot.com`
-
-## Contributing changes
-
-* See [CONTRIBUTING.md](../../CONTRIBUTING.md)
-
-## Licensing
-
-* See [LICENSE](../../LICENSE)
diff --git a/appengine/flexible_python37_and_earlier/analytics/README.md b/appengine/flexible_python37_and_earlier/analytics/README.md
deleted file mode 100644
index d4fa88bef8b..00000000000
--- a/appengine/flexible_python37_and_earlier/analytics/README.md
+++ /dev/null
@@ -1,25 +0,0 @@
-# Google Analytics Measurement Protocol sample for Google App Engine Flexible
-
-[![Open in Cloud Shell][shell_img]][shell_link]
-
-[shell_img]: http://gstatic.com/cloudssh/images/open-btn.png
-[shell_link]: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=appengine/flexible_python37_and_earlier/analytics/README.md
-
-This sample demonstrates how to use the [Google Analytics Measurement Protocol](https://developers.google.com/analytics/devguides/collection/protocol/v1/) (or any other SQL server) on [Google App Engine Flexible Environment](https://cloud.google.com/appengine).
-
-## Setup
-
-Before you can run or deploy the sample, you will need to do the following:
-
-1. Create a Google Analytics Property and obtain the Tracking ID.
-
-2. Update the environment variables in in ``app.yaml`` with your Tracking ID.
-
-## Running locally
-
-Refer to the [top-level README](../README.md) for instructions on running and deploying.
-
-You will need to set the following environment variables via your shell before running the sample:
-
- $ export GA_TRACKING_ID=[your Tracking ID]
- $ python main.py
diff --git a/appengine/flexible_python37_and_earlier/analytics/app.yaml b/appengine/flexible_python37_and_earlier/analytics/app.yaml
deleted file mode 100644
index 0f5590d7058..00000000000
--- a/appengine/flexible_python37_and_earlier/analytics/app.yaml
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2021 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-runtime: python
-env: flex
-entrypoint: gunicorn -b :$PORT main:app
-
-runtime_config:
- python_version: 3
-
-#[START gae_flex_analytics_env_variables]
-env_variables:
- GA_TRACKING_ID: your-tracking-id
-#[END gae_flex_analytics_env_variables]
diff --git a/appengine/flexible_python37_and_earlier/analytics/main.py b/appengine/flexible_python37_and_earlier/analytics/main.py
deleted file mode 100644
index c07ab9b4703..00000000000
--- a/appengine/flexible_python37_and_earlier/analytics/main.py
+++ /dev/null
@@ -1,77 +0,0 @@
-# Copyright 2015 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# [START gae_flex_analytics_track_event]
-import logging
-import os
-
-from flask import Flask
-import requests
-
-
-app = Flask(__name__)
-
-
-# Environment variables are defined in app.yaml.
-GA_TRACKING_ID = os.environ["GA_TRACKING_ID"]
-
-
-def track_event(category, action, label=None, value=0):
- data = {
- "v": "1", # API Version.
- "tid": GA_TRACKING_ID, # Tracking ID / Property ID.
- # Anonymous Client Identifier. Ideally, this should be a UUID that
- # is associated with particular user, device, or browser instance.
- "cid": "555",
- "t": "event", # Event hit type.
- "ec": category, # Event category.
- "ea": action, # Event action.
- "el": label, # Event label.
- "ev": value, # Event value, must be an integer
- "ua": "Opera/9.80 (Windows NT 6.0) Presto/2.12.388 Version/12.14",
- }
-
- response = requests.post("https://www.google-analytics.com/collect", data=data)
-
- # If the request fails, this will raise a RequestException. Depending
- # on your application's needs, this may be a non-error and can be caught
- # by the caller.
- response.raise_for_status()
-
-
-@app.route("/")
-def track_example():
- track_event(category="Example", action="test action")
- return "Event tracked."
-
-
-@app.errorhandler(500)
-def server_error(e):
- logging.exception("An error occurred during a request.")
- return (
- """
- An internal error occurred:
{}
- See logs for full stacktrace.
- """.format(
- e
- ),
- 500,
- )
-
-
-if __name__ == "__main__":
- # This is used when running locally. Gunicorn is used to run the
- # application on Google App Engine. See entrypoint in app.yaml.
- app.run(host="127.0.0.1", port=8080, debug=True)
-# [END gae_flex_analytics_track_event]
diff --git a/appengine/flexible_python37_and_earlier/analytics/main_test.py b/appengine/flexible_python37_and_earlier/analytics/main_test.py
deleted file mode 100644
index 02914bda79d..00000000000
--- a/appengine/flexible_python37_and_earlier/analytics/main_test.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# Copyright 2016 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import re
-
-import pytest
-import responses
-
-
-@pytest.fixture
-def app(monkeypatch):
- monkeypatch.setenv("GA_TRACKING_ID", "1234")
-
- import main
-
- main.app.testing = True
- return main.app.test_client()
-
-
-@responses.activate
-def test_tracking(app):
- responses.add(
- responses.POST, re.compile(r".*"), body="{}", content_type="application/json"
- )
-
- r = app.get("/")
-
- assert r.status_code == 200
- assert "Event tracked" in r.data.decode("utf-8")
-
- assert len(responses.calls) == 1
- request_body = responses.calls[0].request.body
- assert "tid=1234" in request_body
- assert "ea=test+action" in request_body
diff --git a/appengine/flexible_python37_and_earlier/analytics/requirements-test.txt b/appengine/flexible_python37_and_earlier/analytics/requirements-test.txt
deleted file mode 100644
index e89f6031ad7..00000000000
--- a/appengine/flexible_python37_and_earlier/analytics/requirements-test.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-pytest==8.2.0
-responses==0.17.0; python_version < '3.7'
-responses==0.23.1; python_version > '3.6'
diff --git a/appengine/flexible_python37_and_earlier/analytics/requirements.txt b/appengine/flexible_python37_and_earlier/analytics/requirements.txt
deleted file mode 100644
index 9bfb6dcc546..00000000000
--- a/appengine/flexible_python37_and_earlier/analytics/requirements.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-Flask==3.0.3; python_version > '3.6'
-Flask==2.3.3; python_version < '3.7'
-gunicorn==23.0.0
-requests[security]==2.31.0
-Werkzeug==3.0.3
diff --git a/appengine/flexible_python37_and_earlier/datastore/README.md b/appengine/flexible_python37_and_earlier/datastore/README.md
deleted file mode 100644
index 5676c53aab9..00000000000
--- a/appengine/flexible_python37_and_earlier/datastore/README.md
+++ /dev/null
@@ -1,24 +0,0 @@
-# Python Google Cloud Datastore sample for Google App Engine Flexible Environment
-
-[![Open in Cloud Shell][shell_img]][shell_link]
-
-[shell_img]: http://gstatic.com/cloudssh/images/open-btn.png
-[shell_link]: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=appengine/flexible_python37_and_earlier/datastore/README.md
-
-This sample demonstrates how to use [Google Cloud Datastore](https://cloud.google.com/datastore/) on [Google App Engine Flexible Environment](https://cloud.google.com/appengine).
-
-## Setup
-
-Before you can run or deploy the sample, you will need to enable the Cloud Datastore API in the [Google Developers Console](https://console.developers.google.com/project/_/apiui/apiview/datastore/overview).
-
-## Running locally
-
-Refer to the [top-level README](../README.md) for instructions on running and deploying.
-
-When running locally, you can use the [Google Cloud SDK](https://cloud.google.com/sdk) to provide authentication to use Google Cloud APIs:
-
- $ gcloud init
-
-Starting your application:
-
- $ python main.py
diff --git a/appengine/flexible_python37_and_earlier/datastore/main.py b/appengine/flexible_python37_and_earlier/datastore/main.py
deleted file mode 100644
index ac1cec4ee5b..00000000000
--- a/appengine/flexible_python37_and_earlier/datastore/main.py
+++ /dev/null
@@ -1,91 +0,0 @@
-# Copyright 2015 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import datetime
-import logging
-import socket
-
-from flask import Flask, request
-from google.cloud import datastore
-
-
-app = Flask(__name__)
-
-
-def is_ipv6(addr):
- """Checks if a given address is an IPv6 address."""
- try:
- socket.inet_pton(socket.AF_INET6, addr)
- return True
- except OSError:
- return False
-
-
-# [START gae_flex_datastore_app]
-@app.route("/")
-def index():
- ds = datastore.Client()
-
- user_ip = request.remote_addr
-
- # Keep only the first two octets of the IP address.
- if is_ipv6(user_ip):
- user_ip = ":".join(user_ip.split(":")[:2])
- else:
- user_ip = ".".join(user_ip.split(".")[:2])
-
- entity = datastore.Entity(key=ds.key("visit"))
- entity.update(
- {
- "user_ip": user_ip,
- "timestamp": datetime.datetime.now(tz=datetime.timezone.utc),
- }
- )
-
- ds.put(entity)
- query = ds.query(kind="visit", order=("-timestamp",))
-
- results = []
- for x in query.fetch(limit=10):
- try:
- results.append("Time: {timestamp} Addr: {user_ip}".format(**x))
- except KeyError:
- print("Error with result format, skipping entry.")
-
- output = "Last 10 visits:\n{}".format("\n".join(results))
-
- return output, 200, {"Content-Type": "text/plain; charset=utf-8"}
-
-
-# [END gae_flex_datastore_app]
-
-
-@app.errorhandler(500)
-def server_error(e):
- logging.exception("An error occurred during a request.")
- return (
- """
- An internal error occurred:
{}
- See logs for full stacktrace.
- """.format(
- e
- ),
- 500,
- )
-
-
-if __name__ == "__main__":
- # This is used when running locally. Gunicorn is used to run the
- # application on Google App Engine. See entrypoint in app.yaml.
- app.run(host="127.0.0.1", port=8080, debug=True)
diff --git a/appengine/flexible_python37_and_earlier/datastore/main_test.py b/appengine/flexible_python37_and_earlier/datastore/main_test.py
deleted file mode 100644
index 6b17c44ca79..00000000000
--- a/appengine/flexible_python37_and_earlier/datastore/main_test.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright 2015 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import main
-
-
-def test_index():
- main.app.testing = True
- client = main.app.test_client()
-
- r = client.get("/", environ_base={"REMOTE_ADDR": "127.0.0.1"})
- assert r.status_code == 200
- assert "Last 10 visits" in r.data.decode("utf-8")
diff --git a/appengine/flexible_python37_and_earlier/datastore/noxfile_config.py b/appengine/flexible_python37_and_earlier/datastore/noxfile_config.py
deleted file mode 100644
index 1665dd736f8..00000000000
--- a/appengine/flexible_python37_and_earlier/datastore/noxfile_config.py
+++ /dev/null
@@ -1,39 +0,0 @@
-# Copyright 2023 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Default TEST_CONFIG_OVERRIDE for python repos.
-
-# You can copy this file into your directory, then it will be imported from
-# the noxfile.py.
-
-# The source of truth:
-# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/noxfile_config.py
-
-TEST_CONFIG_OVERRIDE = {
- # You can opt out from the test for specific Python versions.
- # Skipping for Python 3.9 due to pyarrow compilation failure.
- "ignored_versions": ["2.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"],
- # Old samples are opted out of enforcing Python type hints
- # All new samples should feature them
- "enforce_type_hints": False,
- # An envvar key for determining the project id to use. Change it
- # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
- # build specific Cloud project. You can also use your own string
- # to use your own Cloud project.
- "gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
- # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
- # A dictionary you want to inject into your test. Don't put any
- # secrets here. These values will override predefined values.
- "envs": {},
-}
diff --git a/appengine/flexible_python37_and_earlier/datastore/requirements-test.txt b/appengine/flexible_python37_and_earlier/datastore/requirements-test.txt
deleted file mode 100644
index 15d066af319..00000000000
--- a/appengine/flexible_python37_and_earlier/datastore/requirements-test.txt
+++ /dev/null
@@ -1 +0,0 @@
-pytest==8.2.0
diff --git a/appengine/flexible_python37_and_earlier/datastore/requirements.txt b/appengine/flexible_python37_and_earlier/datastore/requirements.txt
deleted file mode 100644
index ff3c9dcce0c..00000000000
--- a/appengine/flexible_python37_and_earlier/datastore/requirements.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-Flask==3.0.3; python_version > '3.6'
-Flask==2.3.3; python_version < '3.7'
-google-cloud-datastore==2.20.2
-gunicorn==23.0.0
-Werkzeug==3.0.3
diff --git a/appengine/flexible_python37_and_earlier/django_cloudsql/README.md b/appengine/flexible_python37_and_earlier/django_cloudsql/README.md
deleted file mode 100644
index 60e3ff2f5e7..00000000000
--- a/appengine/flexible_python37_and_earlier/django_cloudsql/README.md
+++ /dev/null
@@ -1,25 +0,0 @@
-# Getting started with Django on Google Cloud Platform on App Engine Flexible
-
-[![Open in Cloud Shell][shell_img]][shell_link]
-
-[shell_img]: http://gstatic.com/cloudssh/images/open-btn.png
-[shell_link]: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=appengine/flexible_python37_and_earlier/django_cloudsql/README.md
-
-This repository is an example of how to run a [Django](https://www.djangoproject.com/)
-app on Google App Engine Flexible Environment. It uses the
-[Writing your first Django app](https://docs.djangoproject.com/en/stable/intro/tutorial01/) as the
-example app to deploy.
-
-
-# Tutorial
-See our [Running Django in the App Engine Flexible Environment](https://cloud.google.com/python/django/flexible-environment) tutorial for instructions for setting up and deploying this sample application.
-
-
-## Contributing changes
-
-* See [CONTRIBUTING.md](CONTRIBUTING.md)
-
-
-## Licensing
-
-* See [LICENSE](LICENSE)
diff --git a/appengine/flexible_python37_and_earlier/django_cloudsql/app.yaml b/appengine/flexible_python37_and_earlier/django_cloudsql/app.yaml
deleted file mode 100644
index 7fcf498d62e..00000000000
--- a/appengine/flexible_python37_and_earlier/django_cloudsql/app.yaml
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2021 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# [START gaeflex_py_django_app_yaml]
-runtime: python
-env: flex
-entrypoint: gunicorn -b :$PORT mysite.wsgi
-
-beta_settings:
- cloud_sql_instances: PROJECT_ID:REGION:INSTANCE_NAME
-
-runtime_config:
- python_version: 3.7
-# [END gaeflex_py_django_app_yaml]
diff --git a/appengine/flexible_python37_and_earlier/django_cloudsql/mysite/settings.py b/appengine/flexible_python37_and_earlier/django_cloudsql/mysite/settings.py
deleted file mode 100644
index ab4d8e7d5e1..00000000000
--- a/appengine/flexible_python37_and_earlier/django_cloudsql/mysite/settings.py
+++ /dev/null
@@ -1,176 +0,0 @@
-# Copyright 2015 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import io
-import os
-
-import environ
-from google.cloud import secretmanager
-
-# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
-BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
-
-# [START gaeflex_py_django_secret_config]
-env = environ.Env(DEBUG=(bool, False))
-env_file = os.path.join(BASE_DIR, ".env")
-
-if os.path.isfile(env_file):
- # Use a local secret file, if provided
-
- env.read_env(env_file)
-# [START_EXCLUDE]
-elif os.getenv("TRAMPOLINE_CI", None):
- # Create local settings if running with CI, for unit testing
-
- placeholder = (
- f"SECRET_KEY=a\n"
- "GS_BUCKET_NAME=None\n"
- f"DATABASE_URL=sqlite://{os.path.join(BASE_DIR, 'db.sqlite3')}"
- )
- env.read_env(io.StringIO(placeholder))
-# [END_EXCLUDE]
-elif os.environ.get("GOOGLE_CLOUD_PROJECT", None):
- # Pull secrets from Secret Manager
- project_id = os.environ.get("GOOGLE_CLOUD_PROJECT")
-
- client = secretmanager.SecretManagerServiceClient()
- settings_name = os.environ.get("SETTINGS_NAME", "django_settings")
- name = f"projects/{project_id}/secrets/{settings_name}/versions/latest"
- payload = client.access_secret_version(name=name).payload.data.decode("UTF-8")
-
- env.read_env(io.StringIO(payload))
-else:
- raise Exception("No local .env or GOOGLE_CLOUD_PROJECT detected. No secrets found.")
-# [END gaeflex_py_django_secret_config]
-
-SECRET_KEY = env("SECRET_KEY")
-
-# SECURITY WARNING: don't run with debug turned on in production!
-# Change this to "False" when you are ready for production
-DEBUG = env("DEBUG")
-
-# SECURITY WARNING: App Engine's security features ensure that it is safe to
-# have ALLOWED_HOSTS = ['*'] when the app is deployed. If you deploy a Django
-# app not on App Engine, make sure to set an appropriate host here.
-ALLOWED_HOSTS = ["*"]
-
-# Application definition
-
-INSTALLED_APPS = (
- "django.contrib.admin",
- "django.contrib.auth",
- "django.contrib.contenttypes",
- "django.contrib.sessions",
- "django.contrib.messages",
- "django.contrib.staticfiles",
- "polls",
-)
-
-MIDDLEWARE = (
- "django.middleware.security.SecurityMiddleware",
- "django.contrib.sessions.middleware.SessionMiddleware",
- "django.middleware.common.CommonMiddleware",
- "django.middleware.csrf.CsrfViewMiddleware",
- "django.contrib.auth.middleware.AuthenticationMiddleware",
- "django.contrib.messages.middleware.MessageMiddleware",
- "django.middleware.clickjacking.XFrameOptionsMiddleware",
-)
-
-ROOT_URLCONF = "mysite.urls"
-
-TEMPLATES = [
- {
- "BACKEND": "django.template.backends.django.DjangoTemplates",
- "DIRS": [],
- "APP_DIRS": True,
- "OPTIONS": {
- "context_processors": [
- "django.template.context_processors.debug",
- "django.template.context_processors.request",
- "django.contrib.auth.context_processors.auth",
- "django.contrib.messages.context_processors.messages",
- ],
- },
- },
-]
-
-WSGI_APPLICATION = "mysite.wsgi.application"
-
-# Database
-
-# [START gaeflex_py_django_database_config]
-# Use django-environ to parse the connection string
-DATABASES = {"default": env.db()}
-
-# If the flag as been set, configure to use proxy
-if os.getenv("USE_CLOUD_SQL_AUTH_PROXY", None):
- DATABASES["default"]["HOST"] = "127.0.0.1"
- DATABASES["default"]["PORT"] = 5432
-
-# [END gaeflex_py_django_database_config]
-
-# Use a in-memory sqlite3 database when testing in CI systems
-if os.getenv("TRAMPOLINE_CI", None):
- DATABASES = {
- "default": {
- "ENGINE": "django.db.backends.sqlite3",
- "NAME": os.path.join(BASE_DIR, "db.sqlite3"),
- }
- }
-
-
-# Password validation
-
-AUTH_PASSWORD_VALIDATORS = [
- {
- "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator", # noqa: 501
- },
- {
- "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator", # noqa: 501
- },
- {
- "NAME": "django.contrib.auth.password_validation.CommonPasswordValidator", # noqa: 501
- },
- {
- "NAME": "django.contrib.auth.password_validation.NumericPasswordValidator", # noqa: 501
- },
-]
-
-# Internationalization
-# https://docs.djangoproject.com/en/stable/topics/i18n/
-
-LANGUAGE_CODE = "en-us"
-
-TIME_ZONE = "UTC"
-
-USE_I18N = True
-
-USE_L10N = True
-
-USE_TZ = True
-
-# Static files (CSS, JavaScript, Images)
-# [START gaeflex_py_django_static_config]
-# Define static storage via django-storages[google]
-GS_BUCKET_NAME = env("GS_BUCKET_NAME")
-STATIC_URL = "/static/"
-DEFAULT_FILE_STORAGE = "storages.backends.gcloud.GoogleCloudStorage"
-STATICFILES_STORAGE = "storages.backends.gcloud.GoogleCloudStorage"
-GS_DEFAULT_ACL = "publicRead"
-# [END gaeflex_py_django_static_config]
-
-# Default primary key field type
-# https://docs.djangoproject.com/en/stable/ref/settings/#default-auto-field
-
-DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
diff --git a/appengine/flexible_python37_and_earlier/django_cloudsql/mysite/urls.py b/appengine/flexible_python37_and_earlier/django_cloudsql/mysite/urls.py
deleted file mode 100644
index 62e72564fc2..00000000000
--- a/appengine/flexible_python37_and_earlier/django_cloudsql/mysite/urls.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2015 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# [START gaeflex_py_django_local_static]
-from django.conf import settings
-from django.conf.urls.static import static
-from django.contrib import admin
-from django.urls import include, path
-
-urlpatterns = [
- path("", include("polls.urls")),
- path("admin/", admin.site.urls),
-] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
-# [END gaeflex_py_django_local_static]
diff --git a/appengine/flexible_python37_and_earlier/django_cloudsql/noxfile_config.py b/appengine/flexible_python37_and_earlier/django_cloudsql/noxfile_config.py
deleted file mode 100644
index a51f3680ad6..00000000000
--- a/appengine/flexible_python37_and_earlier/django_cloudsql/noxfile_config.py
+++ /dev/null
@@ -1,39 +0,0 @@
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Default TEST_CONFIG_OVERRIDE for python repos.
-
-# You can copy this file into your directory, then it will be imported from
-# the noxfile.py.
-
-# The source of truth:
-# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/noxfile_config.py
-
-TEST_CONFIG_OVERRIDE = {
- # You can opt out from the test for specific Python versions.
- # Skipping for Python 3.9 due to pyarrow compilation failure.
- "ignored_versions": ["2.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"],
- # Old samples are opted out of enforcing Python type hints
- # All new samples should feature them
- "enforce_type_hints": False,
- # An envvar key for determining the project id to use. Change it
- # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
- # build specific Cloud project. You can also use your own string
- # to use your own Cloud project.
- "gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
- # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
- # A dictionary you want to inject into your test. Don't put any
- # secrets here. These values will override predefined values.
- "envs": {"DJANGO_SETTINGS_MODULE": "mysite.settings"},
-}
diff --git a/appengine/flexible_python37_and_earlier/django_cloudsql/polls/models.py b/appengine/flexible_python37_and_earlier/django_cloudsql/polls/models.py
deleted file mode 100644
index 5d2bf302721..00000000000
--- a/appengine/flexible_python37_and_earlier/django_cloudsql/polls/models.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright 2015 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from django.db import models
-
-
-class Question(models.Model):
- question_text = models.CharField(max_length=200)
- pub_date = models.DateTimeField("date published")
-
-
-class Choice(models.Model):
- question = models.ForeignKey(Question, on_delete=models.CASCADE)
- choice_text = models.CharField(max_length=200)
- votes = models.IntegerField(default=0)
-
-
-# Create your models here.
diff --git a/appengine/flexible_python37_and_earlier/django_cloudsql/polls/test_polls.py b/appengine/flexible_python37_and_earlier/django_cloudsql/polls/test_polls.py
deleted file mode 100644
index 3ce4c624bbb..00000000000
--- a/appengine/flexible_python37_and_earlier/django_cloudsql/polls/test_polls.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright 2020 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from django.test import Client, TestCase # noqa: 401
-
-
-class PollViewTests(TestCase):
- def test_index_view(self):
- response = self.client.get("/")
- assert response.status_code == 200
- assert "Hello, world" in str(response.content)
diff --git a/appengine/flexible_python37_and_earlier/django_cloudsql/polls/urls.py b/appengine/flexible_python37_and_earlier/django_cloudsql/polls/urls.py
deleted file mode 100644
index ca52d749043..00000000000
--- a/appengine/flexible_python37_and_earlier/django_cloudsql/polls/urls.py
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2015 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from django.urls import path
-
-from . import views
-
-urlpatterns = [
- path("", views.index, name="index"),
-]
diff --git a/appengine/flexible_python37_and_earlier/django_cloudsql/requirements-test.txt b/appengine/flexible_python37_and_earlier/django_cloudsql/requirements-test.txt
deleted file mode 100644
index 5e5d2c73a81..00000000000
--- a/appengine/flexible_python37_and_earlier/django_cloudsql/requirements-test.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-pytest==8.2.0
-pytest-django==4.9.0
diff --git a/appengine/flexible_python37_and_earlier/django_cloudsql/requirements.txt b/appengine/flexible_python37_and_earlier/django_cloudsql/requirements.txt
deleted file mode 100644
index 1cca009774e..00000000000
--- a/appengine/flexible_python37_and_earlier/django_cloudsql/requirements.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-Django==5.2.3
-gunicorn==23.0.0
-psycopg2-binary==2.9.10
-django-environ==0.12.0
-google-cloud-secret-manager==2.21.1
-django-storages[google]==1.14.5
diff --git a/appengine/flexible_python37_and_earlier/hello_world/app.yaml b/appengine/flexible_python37_and_earlier/hello_world/app.yaml
deleted file mode 100644
index 7aa7a47e159..00000000000
--- a/appengine/flexible_python37_and_earlier/hello_world/app.yaml
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright 2021 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-runtime: python
-env: flex
-entrypoint: gunicorn -b :$PORT main:app
-
-runtime_config:
- python_version: 3
-
-# This sample incurs costs to run on the App Engine flexible environment.
-# The settings below are to reduce costs during testing and are not appropriate
-# for production use. For more information, see:
-# https://cloud.google.com/appengine/docs/flexible/python/configuring-your-app-with-app-yaml
-manual_scaling:
- instances: 1
-resources:
- cpu: 1
- memory_gb: 0.5
- disk_size_gb: 10
diff --git a/appengine/flexible_python37_and_earlier/hello_world/main.py b/appengine/flexible_python37_and_earlier/hello_world/main.py
deleted file mode 100644
index eba195ed4fd..00000000000
--- a/appengine/flexible_python37_and_earlier/hello_world/main.py
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2015 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# [START gae_flex_quickstart]
-from flask import Flask
-
-app = Flask(__name__)
-
-
-@app.route("/")
-def hello():
- """Return a friendly HTTP greeting.
-
- Returns:
- A string with the words 'Hello World!'.
- """
- return "Hello World!"
-
-
-if __name__ == "__main__":
- # This is used when running locally only. When deploying to Google App
- # Engine, a webserver process such as Gunicorn will serve the app.
- app.run(host="127.0.0.1", port=8080, debug=True)
-# [END gae_flex_quickstart]
diff --git a/appengine/flexible_python37_and_earlier/hello_world/requirements-test.txt b/appengine/flexible_python37_and_earlier/hello_world/requirements-test.txt
deleted file mode 100644
index 15d066af319..00000000000
--- a/appengine/flexible_python37_and_earlier/hello_world/requirements-test.txt
+++ /dev/null
@@ -1 +0,0 @@
-pytest==8.2.0
diff --git a/appengine/flexible_python37_and_earlier/hello_world/requirements.txt b/appengine/flexible_python37_and_earlier/hello_world/requirements.txt
deleted file mode 100644
index 055e4c6a13d..00000000000
--- a/appengine/flexible_python37_and_earlier/hello_world/requirements.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-Flask==3.0.3; python_version > '3.6'
-Flask==3.0.3; python_version < '3.7'
-gunicorn==23.0.0
-Werkzeug==3.0.3
diff --git a/appengine/flexible_python37_and_earlier/hello_world_django/.gitignore b/appengine/flexible_python37_and_earlier/hello_world_django/.gitignore
deleted file mode 100644
index 49ef2557b16..00000000000
--- a/appengine/flexible_python37_and_earlier/hello_world_django/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-db.sqlite3
diff --git a/appengine/flexible_python37_and_earlier/hello_world_django/README.md b/appengine/flexible_python37_and_earlier/hello_world_django/README.md
deleted file mode 100644
index d6705b131a3..00000000000
--- a/appengine/flexible_python37_and_earlier/hello_world_django/README.md
+++ /dev/null
@@ -1,66 +0,0 @@
-# Django sample for Google App Engine Flexible Environment
-
-[![Open in Cloud Shell][shell_img]][shell_link]
-
-[shell_img]: http://gstatic.com/cloudssh/images/open-btn.png
-[shell_link]: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=appengine/flexible_python37_and_earlier/hello_world_django/README.md
-
-This is a basic hello world [Django](https://www.djangoproject.com/) example
-for [Google App Engine Flexible Environment](https://cloud.google.com/appengine).
-
-## Running locally
-
-You can run locally using django's `manage.py`:
-
- $ python manage.py runserver
-
-## Deployment & how the application runs on Google App Engine.
-
-Follow the standard deployment instructions in
-[the top-level README](../README.md). Google App Engine runs the application
-using [gunicorn](http://gunicorn.org/) as defined by `entrypoint` in
-[`app.yaml`](app.yaml). You can use a different WSGI container if you want, as
-long as it listens for web traffic on port `$PORT` and is declared in
-[`requirements.txt`](requirements.txt).
-
-## How this was created
-
-To set up Python development environment, please follow
-https://cloud.google.com/python/docs/setup.
-
-This project was created using standard Django commands:
-
- $ virtualenv env
- $ source env/bin/activate
- $ pip install django gunicorn
- $ pip freeze > requirements.txt
- $ django-admin startproject project_name
- $ python manage.py startapp helloworld
-
-Then, we added a simple view in `hellworld.views`, added the app to
-`project_name.settings.INSTALLED_APPS`, and finally added a URL rule to
-`project_name.urls`.
-
-In order to deploy to Google App Engine, we created a simple
-[`app.yaml`](app.yaml).
-
-## Database notice
-
-This sample project uses Django's default sqlite database. This isn't suitable
-for production as your application can run multiple instances and each will
-have a different sqlite database. Additionally, instance disks are ephemeral,
-so data will not survive restarts.
-
-For production applications running on Google Cloud Platform, you have
-the following options:
-
-* Use [Cloud SQL](https://cloud.google.com/sql), a fully-managed MySQL database.
- There is a [Flask CloudSQL](../cloudsql) sample that should be straightforward
- to adapt to Django.
-* Use any database of your choice hosted on
- [Google Compute Engine](https://cloud.google.com/compute). The
- [Cloud Launcher](https://cloud.google.com/launcher/) can be used to easily
- deploy common databases.
-* Use third-party database services, or services hosted by other providers,
- provided you have configured access.
-
diff --git a/appengine/flexible_python37_and_earlier/hello_world_django/manage.py b/appengine/flexible_python37_and_earlier/hello_world_django/manage.py
deleted file mode 100755
index c213c77eca6..00000000000
--- a/appengine/flexible_python37_and_earlier/hello_world_django/manage.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2021 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import sys
-
-if __name__ == "__main__":
- os.environ.setdefault("DJANGO_SETTINGS_MODULE", "project_name.settings")
-
- from django.core.management import execute_from_command_line
-
- execute_from_command_line(sys.argv)
diff --git a/appengine/flexible_python37_and_earlier/hello_world_django/project_name/__init__.py b/appengine/flexible_python37_and_earlier/hello_world_django/project_name/__init__.py
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/appengine/flexible_python37_and_earlier/hello_world_django/project_name/settings.py b/appengine/flexible_python37_and_earlier/hello_world_django/project_name/settings.py
deleted file mode 100644
index f8b93099d56..00000000000
--- a/appengine/flexible_python37_and_earlier/hello_world_django/project_name/settings.py
+++ /dev/null
@@ -1,116 +0,0 @@
-# Copyright 2015 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Django settings for project_name project.
-
-Generated by 'django-admin startproject' using Django 1.8.4.
-
-For more information on this file, see
-https://docs.djangoproject.com/en/stable/topics/settings/
-
-For the full list of settings and their values, see
-https://docs.djangoproject.com/en/stable/ref/settings/
-"""
-
-# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
-import os
-
-BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
-
-
-# Quick-start development settings - unsuitable for production
-# See https://docs.djangoproject.com/en/stable/howto/deployment/checklist/
-
-# SECURITY WARNING: keep the secret key used in production secret!
-SECRET_KEY = "qgw!j*bpxo7g&o1ux-(2ph818ojfj(3c#-#*_8r^8&hq5jg$3@"
-
-# SECURITY WARNING: don't run with debug turned on in production!
-DEBUG = True
-
-ALLOWED_HOSTS = []
-
-
-# Application definition
-
-INSTALLED_APPS = (
- "django.contrib.admin",
- "django.contrib.auth",
- "django.contrib.contenttypes",
- "django.contrib.sessions",
- "django.contrib.messages",
- "django.contrib.staticfiles",
- "helloworld",
-)
-
-MIDDLEWARE = (
- "django.middleware.security.SecurityMiddleware",
- "django.contrib.sessions.middleware.SessionMiddleware",
- "django.middleware.common.CommonMiddleware",
- "django.middleware.csrf.CsrfViewMiddleware",
- "django.contrib.auth.middleware.AuthenticationMiddleware",
- "django.contrib.messages.middleware.MessageMiddleware",
- "django.middleware.clickjacking.XFrameOptionsMiddleware",
-)
-
-ROOT_URLCONF = "project_name.urls"
-
-TEMPLATES = [
- {
- "BACKEND": "django.template.backends.django.DjangoTemplates",
- "DIRS": [],
- "APP_DIRS": True,
- "OPTIONS": {
- "context_processors": [
- "django.template.context_processors.debug",
- "django.template.context_processors.request",
- "django.contrib.auth.context_processors.auth",
- "django.contrib.messages.context_processors.messages",
- ],
- },
- },
-]
-
-WSGI_APPLICATION = "project_name.wsgi.application"
-
-
-# Database
-# https://docs.djangoproject.com/en/stable/ref/settings/#databases
-
-DATABASES = {
- "default": {
- "ENGINE": "django.db.backends.sqlite3",
- "NAME": os.path.join(BASE_DIR, "db.sqlite3"),
- }
-}
-
-
-# Internationalization
-# https://docs.djangoproject.com/en/stable/topics/i18n/
-
-LANGUAGE_CODE = "en-us"
-
-TIME_ZONE = "UTC"
-
-USE_I18N = True
-
-USE_L10N = True
-
-USE_TZ = True
-
-
-# Static files (CSS, JavaScript, Images)
-# https://docs.djangoproject.com/en/stable/howto/static-files/
-
-STATIC_URL = "/static/"
diff --git a/appengine/flexible_python37_and_earlier/hello_world_django/project_name/urls.py b/appengine/flexible_python37_and_earlier/hello_world_django/project_name/urls.py
deleted file mode 100644
index 9a393bb42d2..00000000000
--- a/appengine/flexible_python37_and_earlier/hello_world_django/project_name/urls.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright 2015 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from django.contrib import admin
-from django.urls import include, path
-
-import helloworld.views
-
-
-urlpatterns = [
- path("admin/", include(admin.site.urls)),
- path("", helloworld.views.index),
-]
diff --git a/appengine/flexible_python37_and_earlier/hello_world_django/project_name/wsgi.py b/appengine/flexible_python37_and_earlier/hello_world_django/project_name/wsgi.py
deleted file mode 100644
index c069a496999..00000000000
--- a/appengine/flexible_python37_and_earlier/hello_world_django/project_name/wsgi.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2021 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-WSGI config for project_name project.
-
-It exposes the WSGI callable as a module-level variable named ``application``.
-
-For more information on this file, see
-https://docs.djangoproject.com/en/stable/howto/deployment/wsgi/
-"""
-
-import os
-
-from django.core.wsgi import get_wsgi_application
-
-os.environ.setdefault("DJANGO_SETTINGS_MODULE", "project_name.settings")
-
-application = get_wsgi_application()
diff --git a/appengine/flexible_python37_and_earlier/hello_world_django/requirements-test.txt b/appengine/flexible_python37_and_earlier/hello_world_django/requirements-test.txt
deleted file mode 100644
index 15d066af319..00000000000
--- a/appengine/flexible_python37_and_earlier/hello_world_django/requirements-test.txt
+++ /dev/null
@@ -1 +0,0 @@
-pytest==8.2.0
diff --git a/appengine/flexible_python37_and_earlier/hello_world_django/requirements.txt b/appengine/flexible_python37_and_earlier/hello_world_django/requirements.txt
deleted file mode 100644
index b1ec55c859c..00000000000
--- a/appengine/flexible_python37_and_earlier/hello_world_django/requirements.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Django==5.2.3
-gunicorn==23.0.0
diff --git a/appengine/flexible_python37_and_earlier/metadata/main.py b/appengine/flexible_python37_and_earlier/metadata/main.py
deleted file mode 100644
index 9d1e320865a..00000000000
--- a/appengine/flexible_python37_and_earlier/metadata/main.py
+++ /dev/null
@@ -1,87 +0,0 @@
-# Copyright 2015 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-
-from flask import Flask
-import requests
-
-
-logging.basicConfig(level=logging.INFO)
-app = Flask(__name__)
-
-
-# [START gae_flex_metadata]
-METADATA_NETWORK_INTERFACE_URL = (
- "http://metadata/computeMetadata/v1/instance/network-interfaces/0/"
- "access-configs/0/external-ip"
-)
-
-
-def get_external_ip():
- """Gets the instance's external IP address from the Compute Engine metadata
- server.
-
- If the metadata server is unavailable, it assumes that the application is running locally.
-
- Returns:
- The instance's external IP address, or the string 'localhost' if the IP address
- is not available.
- """
- try:
- r = requests.get(
- METADATA_NETWORK_INTERFACE_URL,
- headers={"Metadata-Flavor": "Google"},
- timeout=2,
- )
- return r.text
- except requests.RequestException:
- logging.info("Metadata server could not be reached, assuming local.")
- return "localhost"
-
-
-# [END gae_flex_metadata]
-
-
-@app.route("/")
-def index():
- """Serves a string with the instance's external IP address.
-
- Websocket connections must be made directly to this instance.
-
- Returns:
- A formatted string containing the instance's external IP address.
- """
- external_ip = get_external_ip()
- return f"External IP: {external_ip}"
-
-
-@app.errorhandler(500)
-def server_error(e):
- """Serves a formatted message on-error.
-
- Returns:
- The error message and a code 500 status.
- """
- logging.exception("An error occurred during a request.")
- return (
- f"An internal error occurred:
{e}
See logs for full stacktrace.",
- 500,
- )
-
-
-if __name__ == "__main__":
- # This is used when running locally. Gunicorn is used to run the
- # application on Google App Engine. See entrypoint in app.yaml.
- app.run(host="127.0.0.1", port=8080, debug=True)
diff --git a/appengine/flexible_python37_and_earlier/metadata/main_test.py b/appengine/flexible_python37_and_earlier/metadata/main_test.py
deleted file mode 100644
index 55d345d170d..00000000000
--- a/appengine/flexible_python37_and_earlier/metadata/main_test.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright 2023 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import main
-
-
-def test_index():
- main.app.testing = True
- client = main.app.test_client()
-
- external_ip = main.get_external_ip()
-
- r = client.get("/")
- assert r.status_code == 200
- assert f"External IP: {external_ip}" in r.data.decode("utf-8")
diff --git a/appengine/flexible_python37_and_earlier/metadata/noxfile_config.py b/appengine/flexible_python37_and_earlier/metadata/noxfile_config.py
deleted file mode 100644
index 1665dd736f8..00000000000
--- a/appengine/flexible_python37_and_earlier/metadata/noxfile_config.py
+++ /dev/null
@@ -1,39 +0,0 @@
-# Copyright 2023 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Default TEST_CONFIG_OVERRIDE for python repos.
-
-# You can copy this file into your directory, then it will be imported from
-# the noxfile.py.
-
-# The source of truth:
-# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/noxfile_config.py
-
-TEST_CONFIG_OVERRIDE = {
- # You can opt out from the test for specific Python versions.
- # Skipping for Python 3.9 due to pyarrow compilation failure.
- "ignored_versions": ["2.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"],
- # Old samples are opted out of enforcing Python type hints
- # All new samples should feature them
- "enforce_type_hints": False,
- # An envvar key for determining the project id to use. Change it
- # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
- # build specific Cloud project. You can also use your own string
- # to use your own Cloud project.
- "gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
- # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
- # A dictionary you want to inject into your test. Don't put any
- # secrets here. These values will override predefined values.
- "envs": {},
-}
diff --git a/appengine/flexible_python37_and_earlier/metadata/requirements-test.txt b/appengine/flexible_python37_and_earlier/metadata/requirements-test.txt
deleted file mode 100644
index 15d066af319..00000000000
--- a/appengine/flexible_python37_and_earlier/metadata/requirements-test.txt
+++ /dev/null
@@ -1 +0,0 @@
-pytest==8.2.0
diff --git a/appengine/flexible_python37_and_earlier/metadata/requirements.txt b/appengine/flexible_python37_and_earlier/metadata/requirements.txt
deleted file mode 100644
index 9bfb6dcc546..00000000000
--- a/appengine/flexible_python37_and_earlier/metadata/requirements.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-Flask==3.0.3; python_version > '3.6'
-Flask==2.3.3; python_version < '3.7'
-gunicorn==23.0.0
-requests[security]==2.31.0
-Werkzeug==3.0.3
diff --git a/appengine/flexible_python37_and_earlier/multiple_services/README.md b/appengine/flexible_python37_and_earlier/multiple_services/README.md
deleted file mode 100644
index 1e300dd8e00..00000000000
--- a/appengine/flexible_python37_and_earlier/multiple_services/README.md
+++ /dev/null
@@ -1,63 +0,0 @@
-# Python Google Cloud Microservices Example - API Gateway
-
-[![Open in Cloud Shell][shell_img]][shell_link]
-
-[shell_img]: http://gstatic.com/cloudssh/images/open-btn.png
-[shell_link]: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=appengine/flexible_python37_and_earlier/multiple_services/README.md
-
-This example demonstrates how to deploy multiple python services to [App Engine flexible environment](https://cloud.google.com/appengine/docs/flexible/)
-
-## To Run Locally
-
-Open a terminal and start the first service:
-
-```Bash
-$ cd gateway-service
-$ # follow https://cloud.google.com/python/docs/setup to set up a Python
-development environment
-$ pip install -r requirements.txt
-$ python main.py
-```
-
-In a separate terminal, start the second service:
-
-```Bash
-$ cd static-service
-$ # follow https://cloud.google.com/python/docs/setup to set up a Python
-$ pip install -r requirements.txt
-$ python main.py
-```
-
-## To Deploy to App Engine
-
-### YAML Files
-
-Each directory contains an `app.yaml` file. These files all describe a
-separate App Engine service within the same project.
-
-For the gateway:
-
-[Gateway service ](gateway/app.yaml)
-
-This is the `default` service. There must be one (and not more). The deployed
-url will be `https://.appspot.com`
-
-For the static file server:
-
-[Static file service ](static/app.yaml)
-
-The deployed url will be `https://-dot-.appspot.com`
-
-### Deployment
-
-To deploy a service cd into its directory and run:
-```Bash
-$ gcloud app deploy app.yaml
-```
-and enter `Y` when prompted. Or to skip the check add `-q`.
-
-To deploy multiple services simultaneously just add the path to each `app.yaml`
-file as an argument to `gcloud app deploy `:
-```Bash
-$ gcloud app deploy gateway-service/app.yaml static-service/app.yaml
-```
diff --git a/appengine/flexible_python37_and_earlier/multiple_services/gateway-service/app.yaml b/appengine/flexible_python37_and_earlier/multiple_services/gateway-service/app.yaml
deleted file mode 100644
index fde45fada1c..00000000000
--- a/appengine/flexible_python37_and_earlier/multiple_services/gateway-service/app.yaml
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright 2021 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-service: default
-runtime: python
-env: flex
-entrypoint: gunicorn -b :$PORT main:app
-
-runtime_config:
- python_version: 3
-
-manual_scaling:
- instances: 1
diff --git a/appengine/flexible_python37_and_earlier/multiple_services/gateway-service/main.py b/appengine/flexible_python37_and_earlier/multiple_services/gateway-service/main.py
deleted file mode 100644
index f963995ae0b..00000000000
--- a/appengine/flexible_python37_and_earlier/multiple_services/gateway-service/main.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# Copyright 2016 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from flask import Flask
-import requests
-
-import services_config
-
-app = Flask(__name__)
-services_config.init_app(app)
-
-
-@app.route("/")
-def root():
- """Gets index.html from the static file server"""
- res = requests.get(app.config["SERVICE_MAP"]["static"])
- return res.content
-
-
-@app.route("/hello/")
-def say_hello(service):
- """Recieves requests from buttons on the front end and resopnds
- or sends request to the static file server"""
- # If 'gateway' is specified return immediate
- if service == "gateway":
- return "Gateway says hello"
-
- # Otherwise send request to service indicated by URL param
- responses = []
- url = app.config["SERVICE_MAP"][service]
- res = requests.get(url + "/hello")
- responses.append(res.content)
- return b"\n".join(responses)
-
-
-@app.route("/")
-def static_file(path):
- """Gets static files required by index.html to static file server"""
- url = app.config["SERVICE_MAP"]["static"]
- res = requests.get(url + "/" + path)
- return res.content, 200, {"Content-Type": res.headers["Content-Type"]}
-
-
-if __name__ == "__main__":
- # This is used when running locally. Gunicorn is used to run the
- # application on Google App Engine. See entrypoint in app.yaml.
- app.run(host="127.0.0.1", port=8000, debug=True)
diff --git a/appengine/flexible_python37_and_earlier/multiple_services/gateway-service/requirements-test.txt b/appengine/flexible_python37_and_earlier/multiple_services/gateway-service/requirements-test.txt
deleted file mode 100644
index 15d066af319..00000000000
--- a/appengine/flexible_python37_and_earlier/multiple_services/gateway-service/requirements-test.txt
+++ /dev/null
@@ -1 +0,0 @@
-pytest==8.2.0
diff --git a/appengine/flexible_python37_and_earlier/multiple_services/gateway-service/requirements.txt b/appengine/flexible_python37_and_earlier/multiple_services/gateway-service/requirements.txt
deleted file mode 100644
index 052021ed812..00000000000
--- a/appengine/flexible_python37_and_earlier/multiple_services/gateway-service/requirements.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-Flask==3.0.3; python_version > '3.6'
-Flask==2.3.3; python_version < '3.7'
-gunicorn==23.0.0
-requests==2.31.0
-Werkzeug==3.0.3
diff --git a/appengine/flexible_python37_and_earlier/multiple_services/gateway-service/services_config.py b/appengine/flexible_python37_and_earlier/multiple_services/gateway-service/services_config.py
deleted file mode 100644
index 429ed402e03..00000000000
--- a/appengine/flexible_python37_and_earlier/multiple_services/gateway-service/services_config.py
+++ /dev/null
@@ -1,53 +0,0 @@
-# Copyright 2016 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-
-# To add services insert key value pair of the name of the service and
-# the port you want it to run on when running locally
-SERVICES = {"default": 8000, "static": 8001}
-
-
-def init_app(app):
- # The GAE_INSTANCE environment variable will be set when deployed to GAE.
- gae_instance = os.environ.get("GAE_INSTANCE", os.environ.get("GAE_MODULE_INSTANCE"))
- environment = "production" if gae_instance is not None else "development"
- app.config["SERVICE_MAP"] = map_services(environment)
-
-
-def map_services(environment):
- """Generates a map of services to correct urls for running locally
- or when deployed."""
- url_map = {}
- for service, local_port in SERVICES.items():
- if environment == "production":
- url_map[service] = production_url(service)
- if environment == "development":
- url_map[service] = local_url(local_port)
- return url_map
-
-
-def production_url(service_name):
- """Generates url for a service when deployed to App Engine."""
- project_id = os.getenv("GOOGLE_CLOUD_PROJECT")
- project_url = f"{project_id}.appspot.com"
- if service_name == "default":
- return f"https://{project_url}"
- else:
- return f"https://{service_name}-dot-{project_url}"
-
-
-def local_url(port):
- """Generates url for a service when running locally"""
- return f"http://localhost:{str(port)}"
diff --git a/appengine/flexible_python37_and_earlier/multiple_services/noxfile_config.py b/appengine/flexible_python37_and_earlier/multiple_services/noxfile_config.py
deleted file mode 100644
index 1665dd736f8..00000000000
--- a/appengine/flexible_python37_and_earlier/multiple_services/noxfile_config.py
+++ /dev/null
@@ -1,39 +0,0 @@
-# Copyright 2023 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Default TEST_CONFIG_OVERRIDE for python repos.
-
-# You can copy this file into your directory, then it will be imported from
-# the noxfile.py.
-
-# The source of truth:
-# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/noxfile_config.py
-
-TEST_CONFIG_OVERRIDE = {
- # You can opt out from the test for specific Python versions.
- # Skipping for Python 3.9 due to pyarrow compilation failure.
- "ignored_versions": ["2.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"],
- # Old samples are opted out of enforcing Python type hints
- # All new samples should feature them
- "enforce_type_hints": False,
- # An envvar key for determining the project id to use. Change it
- # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
- # build specific Cloud project. You can also use your own string
- # to use your own Cloud project.
- "gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
- # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
- # A dictionary you want to inject into your test. Don't put any
- # secrets here. These values will override predefined values.
- "envs": {},
-}
diff --git a/appengine/flexible_python37_and_earlier/multiple_services/static-service/app.yaml b/appengine/flexible_python37_and_earlier/multiple_services/static-service/app.yaml
deleted file mode 100644
index 0583df96c7e..00000000000
--- a/appengine/flexible_python37_and_earlier/multiple_services/static-service/app.yaml
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright 2021 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-service: static
-runtime: python
-env: flex
-entrypoint: gunicorn -b :$PORT main:app
-
-runtime_config:
- python_version: 3
-
-manual_scaling:
- instances: 1
diff --git a/appengine/flexible_python37_and_earlier/multiple_services/static-service/main.py b/appengine/flexible_python37_and_earlier/multiple_services/static-service/main.py
deleted file mode 100644
index c4b3a8d4799..00000000000
--- a/appengine/flexible_python37_and_earlier/multiple_services/static-service/main.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# Copyright 2016 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from flask import Flask
-
-app = Flask(__name__)
-
-
-@app.route("/hello")
-def say_hello():
- """responds to request from frontend via gateway"""
- return "Static File Server says hello!"
-
-
-@app.route("/")
-def root():
- """serves index.html"""
- return app.send_static_file("index.html")
-
-
-@app.route("/")
-def static_file(path):
- """serves static files required by index.html"""
- mimetype = ""
- if "." in path and path.split(".")[1] == "css":
- mimetype = "text/css"
- if "." in path and path.split(".")[1] == "js":
- mimetype = "application/javascript"
- return app.send_static_file(path), 200, {"Content-Type": mimetype}
-
-
-if __name__ == "__main__":
- # This is used when running locally. Gunicorn is used to run the
- # application on Google App Engine. See entrypoint in app.yaml.
- app.run(host="127.0.0.1", port=8001, debug=True)
diff --git a/appengine/flexible_python37_and_earlier/multiple_services/static-service/requirements-test.txt b/appengine/flexible_python37_and_earlier/multiple_services/static-service/requirements-test.txt
deleted file mode 100644
index 15d066af319..00000000000
--- a/appengine/flexible_python37_and_earlier/multiple_services/static-service/requirements-test.txt
+++ /dev/null
@@ -1 +0,0 @@
-pytest==8.2.0
diff --git a/appengine/flexible_python37_and_earlier/multiple_services/static-service/requirements.txt b/appengine/flexible_python37_and_earlier/multiple_services/static-service/requirements.txt
deleted file mode 100644
index 052021ed812..00000000000
--- a/appengine/flexible_python37_and_earlier/multiple_services/static-service/requirements.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-Flask==3.0.3; python_version > '3.6'
-Flask==2.3.3; python_version < '3.7'
-gunicorn==23.0.0
-requests==2.31.0
-Werkzeug==3.0.3
diff --git a/appengine/flexible_python37_and_earlier/multiple_services/static-service/static/index.html b/appengine/flexible_python37_and_earlier/multiple_services/static-service/static/index.html
deleted file mode 100644
index 9310b700113..00000000000
--- a/appengine/flexible_python37_and_earlier/multiple_services/static-service/static/index.html
+++ /dev/null
@@ -1,32 +0,0 @@
-
-
-
-
-
-
-
-
- API Gateway on App Engine Flexible Environment
-
-
-
API GATEWAY DEMO
-
Say hi to:
-
-
-
-
-
diff --git a/appengine/flexible_python37_and_earlier/multiple_services/static-service/static/index.js b/appengine/flexible_python37_and_earlier/multiple_services/static-service/static/index.js
deleted file mode 100644
index 021f835b9c1..00000000000
--- a/appengine/flexible_python37_and_earlier/multiple_services/static-service/static/index.js
+++ /dev/null
@@ -1,36 +0,0 @@
-// Copyright 2016 Google LLC.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-function handleResponse(resp){
- const li = document.createElement('li');
- li.innerHTML = resp;
- document.querySelector('.responses').appendChild(li)
-}
-
-function handleClick(event){
- $.ajax({
- url: `hello/${event.target.id}`,
- type: `GET`,
- success(resp){
- handleResponse(resp);
- }
- });
-}
-
-document.addEventListener('DOMContentLoaded', () => {
- const buttons = document.getElementsByTagName('button')
- for (var i = 0; i < buttons.length; i++) {
- buttons[i].addEventListener('click', handleClick);
- }
-});
diff --git a/appengine/flexible_python37_and_earlier/multiple_services/static-service/static/style.css b/appengine/flexible_python37_and_earlier/multiple_services/static-service/static/style.css
deleted file mode 100644
index 65074a9ef4d..00000000000
--- a/appengine/flexible_python37_and_earlier/multiple_services/static-service/static/style.css
+++ /dev/null
@@ -1,19 +0,0 @@
-/**
- * Copyright 2021 Google LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-h1 {
- color: red;
-}
diff --git a/appengine/flexible_python37_and_earlier/numpy/app.yaml b/appengine/flexible_python37_and_earlier/numpy/app.yaml
deleted file mode 100644
index ca76f83fc3b..00000000000
--- a/appengine/flexible_python37_and_earlier/numpy/app.yaml
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2021 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-runtime: python
-env: flex
-entrypoint: gunicorn -b :$PORT main:app
-
-runtime_config:
- python_version: 3
diff --git a/appengine/flexible_python37_and_earlier/numpy/main.py b/appengine/flexible_python37_and_earlier/numpy/main.py
deleted file mode 100644
index cb14c931d62..00000000000
--- a/appengine/flexible_python37_and_earlier/numpy/main.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# Copyright 2016 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-
-from flask import Flask
-import numpy as np
-
-app = Flask(__name__)
-
-
-@app.route("/")
-def calculate():
- """Performs a dot product on predefined arrays.
-
- Returns:
- Returns a formatted message containing the dot product result of
- two predefined arrays.
- """
- return_str = ""
- x = np.array([[1, 2], [3, 4]])
- y = np.array([[5, 6], [7, 8]])
-
- return_str += f"x: {str(x)} , y: {str(y)} "
-
- # Multiply matrices
- return_str += f"x dot y : {str(np.dot(x, y))}"
- return return_str
-
-
-@app.errorhandler(500)
-def server_error(e):
- """Serves a formatted message on-error.
-
- Returns:
- The error message and a code 500 status.
- """
- logging.exception("An error occurred during a request.")
- return (
- f"An internal error occurred:
{e}
See logs for full stacktrace.",
- 500,
- )
-
-
-if __name__ == "__main__":
- # This is used when running locally. Gunicorn is used to run the
- # application on Google App Engine. See entrypoint in app.yaml.
- app.run(host="127.0.0.1", port=8080, debug=True)
diff --git a/appengine/flexible_python37_and_earlier/numpy/main_test.py b/appengine/flexible_python37_and_earlier/numpy/main_test.py
deleted file mode 100644
index e25c4dfcac3..00000000000
--- a/appengine/flexible_python37_and_earlier/numpy/main_test.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright 2016 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import main
-
-
-def test_index():
- main.app.testing = True
- client = main.app.test_client()
-
- r = client.get("/")
- assert r.status_code == 200
- assert "[[19 22]\n [43 50]]" in r.data.decode("utf-8")
diff --git a/appengine/flexible_python37_and_earlier/numpy/noxfile_config.py b/appengine/flexible_python37_and_earlier/numpy/noxfile_config.py
deleted file mode 100644
index 1665dd736f8..00000000000
--- a/appengine/flexible_python37_and_earlier/numpy/noxfile_config.py
+++ /dev/null
@@ -1,39 +0,0 @@
-# Copyright 2023 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Default TEST_CONFIG_OVERRIDE for python repos.
-
-# You can copy this file into your directory, then it will be imported from
-# the noxfile.py.
-
-# The source of truth:
-# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/noxfile_config.py
-
-TEST_CONFIG_OVERRIDE = {
- # You can opt out from the test for specific Python versions.
- # Skipping for Python 3.9 due to pyarrow compilation failure.
- "ignored_versions": ["2.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"],
- # Old samples are opted out of enforcing Python type hints
- # All new samples should feature them
- "enforce_type_hints": False,
- # An envvar key for determining the project id to use. Change it
- # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
- # build specific Cloud project. You can also use your own string
- # to use your own Cloud project.
- "gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
- # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
- # A dictionary you want to inject into your test. Don't put any
- # secrets here. These values will override predefined values.
- "envs": {},
-}
diff --git a/appengine/flexible_python37_and_earlier/numpy/requirements-test.txt b/appengine/flexible_python37_and_earlier/numpy/requirements-test.txt
deleted file mode 100644
index 15d066af319..00000000000
--- a/appengine/flexible_python37_and_earlier/numpy/requirements-test.txt
+++ /dev/null
@@ -1 +0,0 @@
-pytest==8.2.0
diff --git a/appengine/flexible_python37_and_earlier/numpy/requirements.txt b/appengine/flexible_python37_and_earlier/numpy/requirements.txt
deleted file mode 100644
index ccd96a3d6d1..00000000000
--- a/appengine/flexible_python37_and_earlier/numpy/requirements.txt
+++ /dev/null
@@ -1,8 +0,0 @@
-Flask==3.0.3; python_version > '3.6'
-Flask==2.0.3; python_version < '3.7'
-gunicorn==23.0.0
-numpy==2.2.4; python_version > '3.9'
-numpy==2.2.4; python_version == '3.9'
-numpy==2.2.4; python_version == '3.8'
-numpy==2.2.4; python_version == '3.7'
-Werkzeug==3.0.3
diff --git a/appengine/flexible_python37_and_earlier/pubsub/README.md b/appengine/flexible_python37_and_earlier/pubsub/README.md
deleted file mode 100644
index 2e9b0d71918..00000000000
--- a/appengine/flexible_python37_and_earlier/pubsub/README.md
+++ /dev/null
@@ -1,75 +0,0 @@
-# Python Google Cloud Pub/Sub sample for Google App Engine Flexible Environment
-
-[![Open in Cloud Shell][shell_img]][shell_link]
-
-[shell_img]: http://gstatic.com/cloudssh/images/open-btn.png
-[shell_link]: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=appengine/flexible_python37_and_earlier/pubsub/README.md
-
-This demonstrates how to send and receive messages using [Google Cloud Pub/Sub](https://cloud.google.com/pubsub) on [Google App Engine Flexible Environment](https://cloud.google.com/appengine).
-
-## Setup
-
-Before you can run or deploy the sample, you will need to do the following:
-
-1. Enable the Cloud Pub/Sub API in the [Google Developers Console](https://console.developers.google.com/project/_/apiui/apiview/pubsub/overview).
-
-2. Create a topic and subscription.
-
- $ gcloud beta pubsub topics create [your-topic-name]
- $ gcloud beta pubsub subscriptions create [your-subscription-name] \
- --topic [your-topic-name] \
- --push-endpoint \
- https://[your-app-id].appspot.com/pubsub/push?token=[your-token] \
- --ack-deadline 30
-
-3. Update the environment variables in ``app.yaml``.
-
-## Running locally
-
-Refer to the [top-level README](../README.md) for instructions on running and deploying.
-
-When running locally, you can use the [Google Cloud SDK](https://cloud.google.com/sdk) to provide authentication to use Google Cloud APIs:
-
- $ gcloud init
-
-Install dependencies, please follow https://cloud.google.com/python/docs/setup
-to set up a Python development environment. Then run:
-
- $ pip install -r requirements.txt
-
-Then set environment variables before starting your application:
-
- $ export PUBSUB_VERIFICATION_TOKEN=[your-verification-token]
- $ export PUBSUB_TOPIC=[your-topic]
- $ python main.py
-
-### Simulating push notifications
-
-The application can send messages locally, but it is not able to receive push messages locally. You can, however, simulate a push message by making an HTTP request to the local push notification endpoint. There is an included ``sample_message.json``. You can use
-``curl`` or [httpie](https://github.com/jkbrzt/httpie) to POST this:
-
- $ curl -i --data @sample_message.json ":8080/pubsub/push?token=[your-token]"
-
-Or
-
- $ http POST ":8080/pubsub/push?token=[your-token]" < sample_message.json
-
-Response:
-
- HTTP/1.0 200 OK
- Content-Length: 2
- Content-Type: text/html; charset=utf-8
- Date: Mon, 10 Aug 2015 17:52:03 GMT
- Server: Werkzeug/0.10.4 Python/2.7.10
-
- OK
-
-After the request completes, you can refresh ``localhost:8080`` and see the message in the list of received messages.
-
-## Running on App Engine
-
-Deploy using `gcloud`:
-
- gcloud app deploy app.yaml
-
-You can now access the application at `https://your-app-id.appspot.com`. You can use the form to submit messages, but it's non-deterministic which instance of your application will receive the notification. You can send multiple messages and refresh the page to see the received message.
diff --git a/appengine/flexible_python37_and_earlier/pubsub/app.yaml b/appengine/flexible_python37_and_earlier/pubsub/app.yaml
deleted file mode 100644
index 5804ac2b266..00000000000
--- a/appengine/flexible_python37_and_earlier/pubsub/app.yaml
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2021 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-runtime: python
-env: flex
-entrypoint: gunicorn -b :$PORT main:app
-
-runtime_config:
- python_version: 3
-
-# [START gae_flex_pubsub_env]
-env_variables:
- PUBSUB_TOPIC: your-topic
- # This token is used to verify that requests originate from your
- # application. It can be any sufficiently random string.
- PUBSUB_VERIFICATION_TOKEN: 1234abc
-# [END gae_flex_pubsub_env]
diff --git a/appengine/flexible_python37_and_earlier/pubsub/main.py b/appengine/flexible_python37_and_earlier/pubsub/main.py
deleted file mode 100644
index 5ffc960841c..00000000000
--- a/appengine/flexible_python37_and_earlier/pubsub/main.py
+++ /dev/null
@@ -1,98 +0,0 @@
-# Copyright 2015 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import base64
-import json
-import logging
-import os
-
-from flask import current_app, Flask, render_template, request
-from google.cloud import pubsub_v1
-
-
-app = Flask(__name__)
-
-# Configure the following environment variables via app.yaml
-# This is used in the push request handler to verify that the request came from
-# pubsub and originated from a trusted source.
-app.config["PUBSUB_VERIFICATION_TOKEN"] = os.environ["PUBSUB_VERIFICATION_TOKEN"]
-app.config["PUBSUB_TOPIC"] = os.environ["PUBSUB_TOPIC"]
-app.config["PROJECT"] = os.environ["GOOGLE_CLOUD_PROJECT"]
-
-
-# Global list to storage messages received by this instance.
-MESSAGES = []
-
-# Initialize the publisher client once to avoid memory leak
-# and reduce publish latency.
-publisher = pubsub_v1.PublisherClient()
-
-
-# [START gae_flex_pubsub_index]
-@app.route("/", methods=["GET", "POST"])
-def index():
- if request.method == "GET":
- return render_template("index.html", messages=MESSAGES)
-
- data = request.form.get("payload", "Example payload").encode("utf-8")
-
- # publisher = pubsub_v1.PublisherClient()
- topic_path = publisher.topic_path(
- current_app.config["PROJECT"], current_app.config["PUBSUB_TOPIC"]
- )
-
- publisher.publish(topic_path, data=data)
-
- return "OK", 200
-
-
-# [END gae_flex_pubsub_index]
-
-
-# [START gae_flex_pubsub_push]
-@app.route("/pubsub/push", methods=["POST"])
-def pubsub_push():
- if request.args.get("token", "") != current_app.config["PUBSUB_VERIFICATION_TOKEN"]:
- return "Invalid request", 400
-
- envelope = json.loads(request.data.decode("utf-8"))
- payload = base64.b64decode(envelope["message"]["data"])
-
- MESSAGES.append(payload)
-
- # Returning any 2xx status indicates successful receipt of the message.
- return "OK", 200
-
-
-# [END gae_flex_pubsub_push]
-
-
-@app.errorhandler(500)
-def server_error(e):
- logging.exception("An error occurred during a request.")
- return (
- """
- An internal error occurred:
{}
- See logs for full stacktrace.
- """.format(
- e
- ),
- 500,
- )
-
-
-if __name__ == "__main__":
- # This is used when running locally. Gunicorn is used to run the
- # application on Google App Engine. See entrypoint in app.yaml.
- app.run(host="127.0.0.1", port=8080, debug=True)
diff --git a/appengine/flexible_python37_and_earlier/pubsub/main_test.py b/appengine/flexible_python37_and_earlier/pubsub/main_test.py
deleted file mode 100644
index 37abb0d6240..00000000000
--- a/appengine/flexible_python37_and_earlier/pubsub/main_test.py
+++ /dev/null
@@ -1,65 +0,0 @@
-# Copyright 2015 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import base64
-import json
-import os
-
-import pytest
-
-import main
-
-
-@pytest.fixture
-def client():
- main.app.testing = True
- return main.app.test_client()
-
-
-def test_index(client):
- r = client.get("/")
- assert r.status_code == 200
-
-
-def test_post_index(client):
- r = client.post("/", data={"payload": "Test payload"})
- assert r.status_code == 200
-
-
-def test_push_endpoint(client):
- url = "/pubsub/push?token=" + os.environ["PUBSUB_VERIFICATION_TOKEN"]
-
- r = client.post(
- url,
- data=json.dumps(
- {"message": {"data": base64.b64encode(b"Test message").decode("utf-8")}}
- ),
- )
-
- assert r.status_code == 200
-
- # Make sure the message is visible on the home page.
- r = client.get("/")
- assert r.status_code == 200
- assert "Test message" in r.data.decode("utf-8")
-
-
-def test_push_endpoint_errors(client):
- # no token
- r = client.post("/pubsub/push")
- assert r.status_code == 400
-
- # invalid token
- r = client.post("/pubsub/push?token=bad")
- assert r.status_code == 400
diff --git a/appengine/flexible_python37_and_earlier/pubsub/noxfile_config.py b/appengine/flexible_python37_and_earlier/pubsub/noxfile_config.py
deleted file mode 100644
index 1665dd736f8..00000000000
--- a/appengine/flexible_python37_and_earlier/pubsub/noxfile_config.py
+++ /dev/null
@@ -1,39 +0,0 @@
-# Copyright 2023 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Default TEST_CONFIG_OVERRIDE for python repos.
-
-# You can copy this file into your directory, then it will be imported from
-# the noxfile.py.
-
-# The source of truth:
-# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/noxfile_config.py
-
-TEST_CONFIG_OVERRIDE = {
- # You can opt out from the test for specific Python versions.
- # Skipping for Python 3.9 due to pyarrow compilation failure.
- "ignored_versions": ["2.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"],
- # Old samples are opted out of enforcing Python type hints
- # All new samples should feature them
- "enforce_type_hints": False,
- # An envvar key for determining the project id to use. Change it
- # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
- # build specific Cloud project. You can also use your own string
- # to use your own Cloud project.
- "gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
- # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
- # A dictionary you want to inject into your test. Don't put any
- # secrets here. These values will override predefined values.
- "envs": {},
-}
diff --git a/appengine/flexible_python37_and_earlier/pubsub/requirements-test.txt b/appengine/flexible_python37_and_earlier/pubsub/requirements-test.txt
deleted file mode 100644
index 15d066af319..00000000000
--- a/appengine/flexible_python37_and_earlier/pubsub/requirements-test.txt
+++ /dev/null
@@ -1 +0,0 @@
-pytest==8.2.0
diff --git a/appengine/flexible_python37_and_earlier/pubsub/requirements.txt b/appengine/flexible_python37_and_earlier/pubsub/requirements.txt
deleted file mode 100644
index d5b7ce68695..00000000000
--- a/appengine/flexible_python37_and_earlier/pubsub/requirements.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-Flask==3.0.3; python_version > '3.6'
-Flask==2.3.3; python_version < '3.7'
-google-cloud-pubsub==2.28.0
-gunicorn==23.0.0
-Werkzeug==3.0.3
diff --git a/appengine/flexible_python37_and_earlier/pubsub/sample_message.json b/appengine/flexible_python37_and_earlier/pubsub/sample_message.json
deleted file mode 100644
index 8fe62d23fb9..00000000000
--- a/appengine/flexible_python37_and_earlier/pubsub/sample_message.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "message": {
- "data": "SGVsbG8sIFdvcmxkIQ=="
- }
-}
diff --git a/appengine/flexible_python37_and_earlier/pubsub/templates/index.html b/appengine/flexible_python37_and_earlier/pubsub/templates/index.html
deleted file mode 100644
index 28449216c37..00000000000
--- a/appengine/flexible_python37_and_earlier/pubsub/templates/index.html
+++ /dev/null
@@ -1,36 +0,0 @@
-{#
-# Copyright 2015 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#}
-
-
-
- Pub/Sub Python on Google App Engine Flexible Environment
-
-
-
-
Messages received by this instance:
-
- {% for message in messages: %}
-
{{message}}
- {% endfor %}
-
-
Note: because your application is likely running multiple instances, each instance will have a different list of messages.
-
-
-
-
diff --git a/appengine/flexible_python37_and_earlier/scipy/.gitignore b/appengine/flexible_python37_and_earlier/scipy/.gitignore
deleted file mode 100644
index de724cf6213..00000000000
--- a/appengine/flexible_python37_and_earlier/scipy/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-assets/resized_google_logo.jpg
diff --git a/appengine/flexible_python37_and_earlier/scipy/README.md b/appengine/flexible_python37_and_earlier/scipy/README.md
deleted file mode 100644
index f1fe346a338..00000000000
--- a/appengine/flexible_python37_and_earlier/scipy/README.md
+++ /dev/null
@@ -1,9 +0,0 @@
-# SciPy on App Engine Flexible
-
-[![Open in Cloud Shell][shell_img]][shell_link]
-
-[shell_img]: http://gstatic.com/cloudssh/images/open-btn.png
-[shell_link]: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=appengine/flexible_python37_and_earlier/scipy/README.md
-
-This sample demonstrates how to use SciPy to resize an image on App Engine Flexible.
-
diff --git a/appengine/flexible_python37_and_earlier/scipy/app.yaml b/appengine/flexible_python37_and_earlier/scipy/app.yaml
deleted file mode 100644
index ca76f83fc3b..00000000000
--- a/appengine/flexible_python37_and_earlier/scipy/app.yaml
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2021 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-runtime: python
-env: flex
-entrypoint: gunicorn -b :$PORT main:app
-
-runtime_config:
- python_version: 3
diff --git a/appengine/flexible_python37_and_earlier/scipy/assets/google_logo.jpg b/appengine/flexible_python37_and_earlier/scipy/assets/google_logo.jpg
deleted file mode 100644
index 5538eaed2bd..00000000000
Binary files a/appengine/flexible_python37_and_earlier/scipy/assets/google_logo.jpg and /dev/null differ
diff --git a/appengine/flexible_python37_and_earlier/scipy/main.py b/appengine/flexible_python37_and_earlier/scipy/main.py
deleted file mode 100644
index 992aa59d32d..00000000000
--- a/appengine/flexible_python37_and_earlier/scipy/main.py
+++ /dev/null
@@ -1,63 +0,0 @@
-# Copyright 2016 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import os
-
-from flask import Flask
-from flask import request
-import imageio
-from PIL import Image
-
-app = Flask(__name__)
-
-
-@app.route("/")
-def resize():
- """Demonstrates using Pillow to resize an image.
-
- This takes a predefined image, resizes it to 300x300 pixesls, and writes it on disk.
-
- Returns:
- A message stating that the image has been resized.
- """
- app_path = os.path.dirname(os.path.realpath(__file__))
- image_path = os.path.join(app_path, "assets/google_logo.jpg")
- img = Image.fromarray(imageio.imread(image_path))
- img_tinted = img.resize((300, 300))
-
- output_image_path = request.args.get("output_image_path")
- # Write the tinted image back to disk
- imageio.imwrite(output_image_path, img_tinted)
- return "Image resized."
-
-
-@app.errorhandler(500)
-def server_error(e):
- """Serves a formatted message on-error.
-
- Returns:
- The error message and a code 500 status.
- """
- logging.exception("An error occurred during a request.")
- return (
- f"An internal error occurred:
{e}
See logs for full stacktrace.",
- 500,
- )
-
-
-if __name__ == "__main__":
- # This is used when running locally. Gunicorn is used to run the
- # application on Google App Engine. See entrypoint in app.yaml.
- app.run(host="127.0.0.1", port=8080, debug=True)
diff --git a/appengine/flexible_python37_and_earlier/scipy/main_test.py b/appengine/flexible_python37_and_earlier/scipy/main_test.py
deleted file mode 100644
index d3124ffbb0a..00000000000
--- a/appengine/flexible_python37_and_earlier/scipy/main_test.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright 2016 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import tempfile
-
-import main
-
-
-def test_index():
- main.app.testing = True
- client = main.app.test_client()
- with tempfile.TemporaryDirectory() as test_dir:
- output_image_path = os.path.join(test_dir, "resized_google_logo.jpg")
- r = client.get("/", query_string={"output_image_path": output_image_path})
-
- assert os.path.isfile(output_image_path)
- assert r.status_code == 200
diff --git a/appengine/flexible_python37_and_earlier/scipy/requirements-test.txt b/appengine/flexible_python37_and_earlier/scipy/requirements-test.txt
deleted file mode 100644
index 15d066af319..00000000000
--- a/appengine/flexible_python37_and_earlier/scipy/requirements-test.txt
+++ /dev/null
@@ -1 +0,0 @@
-pytest==8.2.0
diff --git a/appengine/flexible_python37_and_earlier/scipy/requirements.txt b/appengine/flexible_python37_and_earlier/scipy/requirements.txt
deleted file mode 100644
index a67d9f49c61..00000000000
--- a/appengine/flexible_python37_and_earlier/scipy/requirements.txt
+++ /dev/null
@@ -1,11 +0,0 @@
-Flask==3.0.3; python_version > '3.6'
-Flask==2.0.3; python_version < '3.7'
-gunicorn==23.0.0
-imageio==2.36.1
-numpy==2.2.4; python_version > '3.9'
-numpy==2.2.4; python_version == '3.9'
-numpy==2.2.4; python_version == '3.8'
-numpy==2.2.4; python_version == '3.7'
-pillow==10.4.0
-scipy==1.14.1
-Werkzeug==3.0.3
diff --git a/appengine/flexible_python37_and_earlier/static_files/README.md b/appengine/flexible_python37_and_earlier/static_files/README.md
deleted file mode 100644
index 024a0abfbd9..00000000000
--- a/appengine/flexible_python37_and_earlier/static_files/README.md
+++ /dev/null
@@ -1,12 +0,0 @@
-# Python / Flask static files sample for Google App Engine Flexible Environment
-
-[![Open in Cloud Shell][shell_img]][shell_link]
-
-[shell_img]: http://gstatic.com/cloudssh/images/open-btn.png
-[shell_link]: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=appengine/flexible_python37_and_earlier/static_files/README.md
-
-This demonstrates how to use [Flask](http://flask.pocoo.org/) to serve static files in your application.
-
-Flask automatically makes anything in the ``static`` directory available via the ``/static`` URL. If you plan on using a different framework, it may have different conventions for serving static files.
-
-Refer to the [top-level README](../README.md) for instructions on running and deploying.
diff --git a/appengine/flexible_python37_and_earlier/static_files/app.yaml b/appengine/flexible_python37_and_earlier/static_files/app.yaml
deleted file mode 100644
index ca76f83fc3b..00000000000
--- a/appengine/flexible_python37_and_earlier/static_files/app.yaml
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2021 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-runtime: python
-env: flex
-entrypoint: gunicorn -b :$PORT main:app
-
-runtime_config:
- python_version: 3
diff --git a/appengine/flexible_python37_and_earlier/static_files/main.py b/appengine/flexible_python37_and_earlier/static_files/main.py
deleted file mode 100644
index d77eca69f44..00000000000
--- a/appengine/flexible_python37_and_earlier/static_files/main.py
+++ /dev/null
@@ -1,52 +0,0 @@
-# Copyright 2015 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# [START gae_flex_python_static_files]
-import logging
-
-from flask import Flask, render_template
-
-
-app = Flask(__name__)
-
-
-@app.route("/")
-def hello():
- """Renders and serves a static HTML template page.
-
- Returns:
- A string containing the rendered HTML page.
- """
- return render_template("index.html")
-
-
-@app.errorhandler(500)
-def server_error(e):
- """Serves a formatted message on-error.
-
- Returns:
- The error message and a code 500 status.
- """
- logging.exception("An error occurred during a request.")
- return (
- f"An internal error occurred:
{e}
See logs for full stacktrace.",
- 500,
- )
-
-
-if __name__ == "__main__":
- # This is used when running locally. Gunicorn is used to run the
- # application on Google App Engine. See entrypoint in app.yaml.
- app.run(host="127.0.0.1", port=8080, debug=True)
-# [END gae_flex_python_static_files]
diff --git a/appengine/flexible_python37_and_earlier/static_files/main_test.py b/appengine/flexible_python37_and_earlier/static_files/main_test.py
deleted file mode 100644
index 2662db44201..00000000000
--- a/appengine/flexible_python37_and_earlier/static_files/main_test.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright 2015 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import main
-
-
-def test_index():
- main.app.testing = True
- client = main.app.test_client()
-
- r = client.get("/")
- assert r.status_code == 200
-
- r = client.get("/static/main.css")
- assert r.status_code == 200
diff --git a/appengine/flexible_python37_and_earlier/static_files/noxfile_config.py b/appengine/flexible_python37_and_earlier/static_files/noxfile_config.py
deleted file mode 100644
index 1665dd736f8..00000000000
--- a/appengine/flexible_python37_and_earlier/static_files/noxfile_config.py
+++ /dev/null
@@ -1,39 +0,0 @@
-# Copyright 2023 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Default TEST_CONFIG_OVERRIDE for python repos.
-
-# You can copy this file into your directory, then it will be imported from
-# the noxfile.py.
-
-# The source of truth:
-# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/noxfile_config.py
-
-TEST_CONFIG_OVERRIDE = {
- # You can opt out from the test for specific Python versions.
- # Skipping for Python 3.9 due to pyarrow compilation failure.
- "ignored_versions": ["2.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"],
- # Old samples are opted out of enforcing Python type hints
- # All new samples should feature them
- "enforce_type_hints": False,
- # An envvar key for determining the project id to use. Change it
- # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
- # build specific Cloud project. You can also use your own string
- # to use your own Cloud project.
- "gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
- # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
- # A dictionary you want to inject into your test. Don't put any
- # secrets here. These values will override predefined values.
- "envs": {},
-}
diff --git a/appengine/flexible_python37_and_earlier/static_files/requirements-test.txt b/appengine/flexible_python37_and_earlier/static_files/requirements-test.txt
deleted file mode 100644
index 15d066af319..00000000000
--- a/appengine/flexible_python37_and_earlier/static_files/requirements-test.txt
+++ /dev/null
@@ -1 +0,0 @@
-pytest==8.2.0
diff --git a/appengine/flexible_python37_and_earlier/static_files/requirements.txt b/appengine/flexible_python37_and_earlier/static_files/requirements.txt
deleted file mode 100644
index 70ecce34b5b..00000000000
--- a/appengine/flexible_python37_and_earlier/static_files/requirements.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-Flask==3.0.3; python_version > '3.6'
-Flask==2.0.3; python_version < '3.7'
-gunicorn==23.0.0
-Werkzeug==3.0.3
diff --git a/appengine/flexible_python37_and_earlier/static_files/static/main.css b/appengine/flexible_python37_and_earlier/static_files/static/main.css
deleted file mode 100644
index f906044f4e4..00000000000
--- a/appengine/flexible_python37_and_earlier/static_files/static/main.css
+++ /dev/null
@@ -1,21 +0,0 @@
-/*
-Copyright 2015 Google LLC.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-/* [START gae_flex_python_css] */
-body {
- font-family: Verdana, Helvetica, sans-serif;
- background-color: #CCCCFF;
-}
-/* [END gae_flex_python_css] */
diff --git a/appengine/flexible_python37_and_earlier/static_files/templates/index.html b/appengine/flexible_python37_and_earlier/static_files/templates/index.html
deleted file mode 100644
index 13b2ebe61af..00000000000
--- a/appengine/flexible_python37_and_earlier/static_files/templates/index.html
+++ /dev/null
@@ -1,29 +0,0 @@
-{#
-# Copyright 2015 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#}
-
-
-
- Static Files
-
-
-
-
-
This is a static file serving example.
-
-
diff --git a/appengine/flexible_python37_and_earlier/storage/README.md b/appengine/flexible_python37_and_earlier/storage/README.md
deleted file mode 100644
index a2af4d60741..00000000000
--- a/appengine/flexible_python37_and_earlier/storage/README.md
+++ /dev/null
@@ -1,37 +0,0 @@
-# Python Google Cloud Storage sample for Google App Engine Flexible Environment
-
-[![Open in Cloud Shell][shell_img]][shell_link]
-
-[shell_img]: http://gstatic.com/cloudssh/images/open-btn.png
-[shell_link]: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=appengine/flexible_python37_and_earlier/storage/README.md
-
-This sample demonstrates how to use [Google Cloud Storage](https://cloud.google.com/storage/) on [Google App Engine Flexible Environment](https://cloud.google.com/appengine).
-
-## Setup
-
-Before you can run or deploy the sample, you will need to do the following:
-
-1. Enable the Cloud Storage API in the [Google Developers Console](https://console.developers.google.com/project/_/apiui/apiview/storage/overview).
-
-2. Create a Cloud Storage Bucket. You can do this with the [Google Cloud SDK](https://cloud.google.com/sdk) with the following command:
-
- $ gsutil mb gs://[your-bucket-name]
-
-3. Set the default ACL on your bucket to public read in order to serve files directly from Cloud Storage. You can do this with the [Google Cloud SDK](https://cloud.google.com/sdk) with the following command:
-
- $ gsutil defacl set public-read gs://[your-bucket-name]
-
-4. Update the environment variables in ``app.yaml``.
-
-## Running locally
-
-Refer to the [top-level README](../README.md) for instructions on running and deploying.
-
-When running locally, you can use the [Google Cloud SDK](https://cloud.google.com/sdk) to provide authentication to use Google Cloud APIs:
-
- $ gcloud init
-
-Then set environment variables before starting your application:
-
- $ export CLOUD_STORAGE_BUCKET=[your-bucket-name]
- $ python main.py
diff --git a/appengine/flexible_python37_and_earlier/storage/app.yaml b/appengine/flexible_python37_and_earlier/storage/app.yaml
deleted file mode 100644
index e21a4c0ae91..00000000000
--- a/appengine/flexible_python37_and_earlier/storage/app.yaml
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2021 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-runtime: python
-env: flex
-entrypoint: gunicorn -b :$PORT main:app
-
-runtime_config:
- python_version: 3
-
-#[START gae_flex_storage_yaml]
-env_variables:
- CLOUD_STORAGE_BUCKET: your-bucket-name
-#[END gae_flex_storage_yaml]
diff --git a/appengine/flexible_python37_and_earlier/storage/main.py b/appengine/flexible_python37_and_earlier/storage/main.py
deleted file mode 100644
index dc06fd2ae8e..00000000000
--- a/appengine/flexible_python37_and_earlier/storage/main.py
+++ /dev/null
@@ -1,88 +0,0 @@
-# Copyright 2015 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# [START gae_flex_storage_app]
-from __future__ import annotations
-
-import logging
-import os
-
-from flask import Flask, request
-from google.cloud import storage
-
-app = Flask(__name__)
-
-# Configure this environment variable via app.yaml
-CLOUD_STORAGE_BUCKET = os.environ["CLOUD_STORAGE_BUCKET"]
-
-
-@app.route("/")
-def index() -> str:
- return """
-
-"""
-
-
-@app.route("/upload", methods=["POST"])
-def upload() -> str:
- """Process the uploaded file and upload it to Google Cloud Storage."""
- uploaded_file = request.files.get("file")
-
- if not uploaded_file:
- return "No file uploaded.", 400
-
- # Create a Cloud Storage client.
- gcs = storage.Client()
-
- # Get the bucket that the file will be uploaded to.
- bucket = gcs.get_bucket(CLOUD_STORAGE_BUCKET)
-
- # Create a new blob and upload the file's content.
- blob = bucket.blob(uploaded_file.filename)
-
- blob.upload_from_string(
- uploaded_file.read(), content_type=uploaded_file.content_type
- )
-
- # Make the blob public. This is not necessary if the
- # entire bucket is public.
- # See https://cloud.google.com/storage/docs/access-control/making-data-public.
- blob.make_public()
-
- # The public URL can be used to directly access the uploaded file via HTTP.
- return blob.public_url
-
-
-@app.errorhandler(500)
-def server_error(e: Exception | int) -> str:
- logging.exception("An error occurred during a request.")
- return (
- """
- An internal error occurred:
{}
- See logs for full stacktrace.
- """.format(
- e
- ),
- 500,
- )
-
-
-if __name__ == "__main__":
- # This is used when running locally. Gunicorn is used to run the
- # application on Google App Engine. See entrypoint in app.yaml.
- app.run(host="127.0.0.1", port=8080, debug=True)
-# [END gae_flex_storage_app]
diff --git a/appengine/flexible_python37_and_earlier/storage/main_test.py b/appengine/flexible_python37_and_earlier/storage/main_test.py
deleted file mode 100644
index ceb979d1ba6..00000000000
--- a/appengine/flexible_python37_and_earlier/storage/main_test.py
+++ /dev/null
@@ -1,61 +0,0 @@
-# Copyright 2015 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from io import BytesIO
-import os
-import uuid
-
-import flask
-import flask.testing
-from google.cloud import storage
-import pytest
-import requests
-
-import main
-
-
-@pytest.fixture
-def client() -> flask.testing.FlaskClient:
- main.app.testing = True
- return main.app.test_client()
-
-
-def test_index(client: flask.testing.FlaskClient) -> None:
- r = client.get("/")
- assert r.status_code == 200
-
-
-@pytest.fixture(scope="module")
-def blob_name() -> str:
- name = f"gae-flex-storage-{uuid.uuid4()}"
- yield name
-
- bucket = storage.Client().bucket(os.environ["CLOUD_STORAGE_BUCKET"])
- blob = bucket.blob(name)
- blob.delete()
-
-
-def test_upload(client: flask.testing.FlaskClient, blob_name: str) -> None:
- # Upload a simple file
- file_content = b"This is some test content."
-
- r = client.post("/upload", data={"file": (BytesIO(file_content), blob_name)})
-
- assert r.status_code == 200
-
- # The app should return the public cloud storage URL for the uploaded
- # file. Download and verify it.
- cloud_storage_url = r.data.decode("utf-8")
- r = requests.get(cloud_storage_url)
- assert r.text.encode("utf-8") == file_content
diff --git a/appengine/flexible_python37_and_earlier/storage/requirements-test.txt b/appengine/flexible_python37_and_earlier/storage/requirements-test.txt
deleted file mode 100644
index f27726d7455..00000000000
--- a/appengine/flexible_python37_and_earlier/storage/requirements-test.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-pytest==8.2.0
-google-cloud-storage==2.9.0
diff --git a/appengine/flexible_python37_and_earlier/storage/requirements.txt b/appengine/flexible_python37_and_earlier/storage/requirements.txt
deleted file mode 100644
index 994d3201309..00000000000
--- a/appengine/flexible_python37_and_earlier/storage/requirements.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-Flask==3.0.3; python_version > '3.6'
-Flask==2.0.3; python_version < '3.7'
-werkzeug==3.0.3; python_version > '3.7'
-werkzeug==2.3.8; python_version <= '3.7'
-google-cloud-storage==2.9.0
-gunicorn==23.0.0
diff --git a/appengine/flexible_python37_and_earlier/tasks/Dockerfile b/appengine/flexible_python37_and_earlier/tasks/Dockerfile
deleted file mode 100644
index 5aaeb51144d..00000000000
--- a/appengine/flexible_python37_and_earlier/tasks/Dockerfile
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright 2021 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Use the official Python image.
-# https://hub.docker.com/_/python
-FROM python:3.11
-
-# Copy local code to the container image.
-ENV APP_HOME /app
-WORKDIR $APP_HOME
-COPY . .
-
-# Install production dependencies.
-RUN pip install Flask gunicorn
-
-# Run the web service on container startup. Here we use the gunicorn
-# webserver, with one worker process and 8 threads.
-# For environments with multiple CPU cores, increase the number of workers
-# to be equal to the cores available.
-CMD exec gunicorn --bind :$PORT --workers 1 --threads 8 main:app
diff --git a/appengine/flexible_python37_and_earlier/tasks/README.md b/appengine/flexible_python37_and_earlier/tasks/README.md
deleted file mode 100644
index 5eb60d5fa45..00000000000
--- a/appengine/flexible_python37_and_earlier/tasks/README.md
+++ /dev/null
@@ -1,122 +0,0 @@
-# Google Cloud Tasks Samples
-
-[![Open in Cloud Shell][shell_img]][shell_link]
-
-[shell_img]: http://gstatic.com/cloudssh/images/open-btn.png
-[shell_link]: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=appengine/flexible_python37_and_earlier/tasks/README.md
-
-Sample command-line programs for interacting with the Cloud Tasks API
-.
-
-App Engine queues push tasks to an App Engine HTTP target. This directory
-contains both the App Engine app to deploy, as well as the snippets to run
-locally to push tasks to it, which could also be called on App Engine.
-
-`create_app_engine_queue_task.py` is a simple command-line program to create
-tasks to be pushed to the App Engine app.
-
-`main.py` is the main App Engine app. This app serves as an endpoint to receive
-App Engine task attempts.
-
-`app.yaml` configures the App Engine app.
-
-
-## Prerequisites to run locally:
-
-Please refer to [Setting Up a Python Development Environment](https://cloud.google.com/python/setup).
-
-### Authentication
-
-To set up authentication, please refer to our
-[authentication getting started guide](https://cloud.google.com/docs/authentication/getting-started).
-
-### Install Dependencies
-
-To install the dependencies for this sample, use the following command:
-
-```sh
-pip install -r requirements.txt
-```
-
-This sample uses the common protos in the [googleapis](https://github.com/googleapis/googleapis)
-repository. For more info, see
-[Protocol Buffer Basics](https://developers.google.com/protocol-buffers/docs/pythontutorial).
-
-## Deploying the App Engine App
-
-Deploy the App Engine app with gcloud:
-
-* To deploy to the Standard environment:
- ```sh
- gcloud app deploy app.yaml
- ```
-* To deploy to the Flexible environment:
- ```sh
- gcloud app deploy app.flexible.yaml
- ```
-
-Verify the index page is serving:
-
-```sh
-gcloud app browse
-```
-
-The App Engine app serves as a target for the push requests. It has an
-endpoint `/example_task_handler` that reads the payload (i.e., the request body)
-of the HTTP POST request and logs it. The log output can be viewed with:
-
-```sh
-gcloud app logs read
-```
-
-## Creating a queue
-
-To create a queue using the Cloud SDK, use the following gcloud command:
-
-```sh
-gcloud tasks queues create my-appengine-queue
-```
-
-Note: A newly created queue will route to the default App Engine service and
-version unless configured to do otherwise.
-
-## Run the Sample Using the Command Line
-
-Set environment variables:
-
-First, your project ID:
-
-```sh
-export PROJECT_ID=my-project-id
-```
-
-Then the queue ID, as specified at queue creation time. Queue IDs already
-created can be listed with `gcloud tasks queues list`.
-
-```sh
-export QUEUE_ID=my-appengine-queue
-```
-
-And finally the location ID, which can be discovered with
-`gcloud tasks queues describe $QUEUE_ID`, with the location embedded in
-the "name" value (for instance, if the name is
-"projects/my-project/locations/us-central1/queues/my-appengine-queue", then the
-location is "us-central1").
-
-```sh
-export LOCATION_ID=us-central1
-```
-
-### Using App Engine Queues
-
-Running the sample will create a task, targeted at the `/example_task_handler`
-endpoint, with a payload specified:
-
-> **Note**
-> Please update
-> [create_app_engine_queue_task.py](./create_app_engine_queue_task.py) before running the following
-> command.
-
-```sh
-python create_app_engine_queue_task.py --project=$PROJECT_ID --queue=$QUEUE_ID --location=$LOCATION_ID --payload=hello
-```
diff --git a/appengine/flexible_python37_and_earlier/tasks/app.flexible.yaml b/appengine/flexible_python37_and_earlier/tasks/app.flexible.yaml
deleted file mode 100644
index 5b3b333fda6..00000000000
--- a/appengine/flexible_python37_and_earlier/tasks/app.flexible.yaml
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2019 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-runtime: python
-env: flex
-entrypoint: gunicorn -b :$PORT --threads=4 main:app
-
-runtime_config:
- python_version: 3
diff --git a/appengine/flexible_python37_and_earlier/tasks/create_app_engine_queue_task.py b/appengine/flexible_python37_and_earlier/tasks/create_app_engine_queue_task.py
deleted file mode 100644
index 7ddb6fb5a69..00000000000
--- a/appengine/flexible_python37_and_earlier/tasks/create_app_engine_queue_task.py
+++ /dev/null
@@ -1,121 +0,0 @@
-# Copyright 2019 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-import argparse
-
-
-def create_task(project, queue, location, payload=None, in_seconds=None):
- # [START cloud_tasks_appengine_create_task]
- """Create a task for a given queue with an arbitrary payload."""
-
- from google.cloud import tasks_v2
- from google.protobuf import timestamp_pb2
- import datetime
- import json
-
- # Create a client.
- client = tasks_v2.CloudTasksClient()
-
- # TODO(developer): Uncomment these lines and replace with your values.
- # project = 'my-project-id'
- # queue = 'my-appengine-queue'
- # location = 'us-central1'
- # payload = 'hello' or {'param': 'value'} for application/json
- # in_seconds = None
-
- # Construct the fully qualified queue name.
- parent = client.queue_path(project, location, queue)
-
- # Construct the request body.
- task = {
- "app_engine_http_request": { # Specify the type of request.
- "http_method": tasks_v2.HttpMethod.POST,
- "relative_uri": "/example_task_handler",
- }
- }
- if payload is not None:
- if isinstance(payload, dict):
- # Convert dict to JSON string
- payload = json.dumps(payload)
- # specify http content-type to application/json
- task["app_engine_http_request"]["headers"] = {
- "Content-type": "application/json"
- }
- # The API expects a payload of type bytes.
- converted_payload = payload.encode()
-
- # Add the payload to the request.
- task["app_engine_http_request"]["body"] = converted_payload
-
- if in_seconds is not None:
- # Convert "seconds from now" into an rfc3339 datetime string.
- d = datetime.datetime.now(tz=datetime.timezone.utc) + datetime.timedelta(
- seconds=in_seconds
- )
-
- # Create Timestamp protobuf.
- timestamp = timestamp_pb2.Timestamp()
- timestamp.FromDatetime(d)
-
- # Add the timestamp to the tasks.
- task["schedule_time"] = timestamp
-
- # Use the client to build and send the task.
- response = client.create_task(parent=parent, task=task)
-
- print(f"Created task {response.name}")
- return response
-
-
-# [END cloud_tasks_appengine_create_task]
-
-
-if __name__ == "__main__":
- parser = argparse.ArgumentParser(
- description=create_task.__doc__,
- formatter_class=argparse.RawDescriptionHelpFormatter,
- )
-
- parser.add_argument(
- "--project",
- help="Project of the queue to add the task to.",
- required=True,
- )
-
- parser.add_argument(
- "--queue",
- help="ID (short name) of the queue to add the task to.",
- required=True,
- )
-
- parser.add_argument(
- "--location",
- help="Location of the queue to add the task to.",
- required=True,
- )
-
- parser.add_argument(
- "--payload", help="Optional payload to attach to the push queue."
- )
-
- parser.add_argument(
- "--in_seconds",
- type=int,
- help="The number of seconds from now to schedule task attempt.",
- )
-
- args = parser.parse_args()
-
- create_task(args.project, args.queue, args.location, args.payload, args.in_seconds)
diff --git a/appengine/flexible_python37_and_earlier/tasks/create_app_engine_queue_task_test.py b/appengine/flexible_python37_and_earlier/tasks/create_app_engine_queue_task_test.py
deleted file mode 100644
index 3bacaed03ac..00000000000
--- a/appengine/flexible_python37_and_earlier/tasks/create_app_engine_queue_task_test.py
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2019 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-
-import create_app_engine_queue_task
-
-TEST_PROJECT_ID = os.getenv("GOOGLE_CLOUD_PROJECT")
-TEST_LOCATION = os.getenv("TEST_QUEUE_LOCATION", "us-central1")
-TEST_QUEUE_NAME = os.getenv("TEST_QUEUE_NAME", "my-appengine-queue")
-
-
-def test_create_task():
- result = create_app_engine_queue_task.create_task(
- TEST_PROJECT_ID, TEST_QUEUE_NAME, TEST_LOCATION
- )
- assert TEST_QUEUE_NAME in result.name
diff --git a/appengine/flexible_python37_and_earlier/tasks/main.py b/appengine/flexible_python37_and_earlier/tasks/main.py
deleted file mode 100644
index 4cb9b84a0b6..00000000000
--- a/appengine/flexible_python37_and_earlier/tasks/main.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright 2019 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""App Engine app to serve as an endpoint for App Engine queue samples."""
-
-# [START cloud_tasks_appengine_quickstart]
-from flask import Flask, render_template, request
-
-app = Flask(__name__)
-
-
-@app.route("/example_task_handler", methods=["POST"])
-def example_task_handler():
- """Log the request payload."""
- payload = request.get_data(as_text=True) or "(empty payload)"
- print(f"Received task with payload: {payload}")
- return render_template("index.html", payload=payload)
-
-
-# [END cloud_tasks_appengine_quickstart]
-
-
-@app.route("/")
-def hello():
- """Basic index to verify app is serving."""
- return "Hello World!"
-
-
-if __name__ == "__main__":
- # This is used when running locally. Gunicorn is used to run the
- # application on Google App Engine. See entrypoint in app.yaml.
- app.run(host="127.0.0.1", port=8080, debug=True)
diff --git a/appengine/flexible_python37_and_earlier/tasks/main_test.py b/appengine/flexible_python37_and_earlier/tasks/main_test.py
deleted file mode 100644
index 42b96402dd0..00000000000
--- a/appengine/flexible_python37_and_earlier/tasks/main_test.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# Copyright 2019 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import pytest
-
-
-@pytest.fixture
-def app():
- import main
-
- main.app.testing = True
- return main.app.test_client()
-
-
-def test_index(app):
- r = app.get("/")
- assert r.status_code == 200
-
-
-def test_log_payload(capsys, app):
- payload = "test_payload"
-
- r = app.post("/example_task_handler", data=payload)
- assert r.status_code == 200
-
- out, _ = capsys.readouterr()
- assert payload in out
-
-
-def test_empty_payload(capsys, app):
- r = app.post("/example_task_handler")
- assert r.status_code == 200
-
- out, _ = capsys.readouterr()
- assert "empty payload" in out
diff --git a/appengine/flexible_python37_and_earlier/tasks/noxfile_config.py b/appengine/flexible_python37_and_earlier/tasks/noxfile_config.py
deleted file mode 100644
index 1665dd736f8..00000000000
--- a/appengine/flexible_python37_and_earlier/tasks/noxfile_config.py
+++ /dev/null
@@ -1,39 +0,0 @@
-# Copyright 2023 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Default TEST_CONFIG_OVERRIDE for python repos.
-
-# You can copy this file into your directory, then it will be imported from
-# the noxfile.py.
-
-# The source of truth:
-# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/noxfile_config.py
-
-TEST_CONFIG_OVERRIDE = {
- # You can opt out from the test for specific Python versions.
- # Skipping for Python 3.9 due to pyarrow compilation failure.
- "ignored_versions": ["2.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"],
- # Old samples are opted out of enforcing Python type hints
- # All new samples should feature them
- "enforce_type_hints": False,
- # An envvar key for determining the project id to use. Change it
- # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
- # build specific Cloud project. You can also use your own string
- # to use your own Cloud project.
- "gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
- # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
- # A dictionary you want to inject into your test. Don't put any
- # secrets here. These values will override predefined values.
- "envs": {},
-}
diff --git a/appengine/flexible_python37_and_earlier/tasks/requirements-test.txt b/appengine/flexible_python37_and_earlier/tasks/requirements-test.txt
deleted file mode 100644
index 15d066af319..00000000000
--- a/appengine/flexible_python37_and_earlier/tasks/requirements-test.txt
+++ /dev/null
@@ -1 +0,0 @@
-pytest==8.2.0
diff --git a/appengine/flexible_python37_and_earlier/tasks/requirements.txt b/appengine/flexible_python37_and_earlier/tasks/requirements.txt
deleted file mode 100644
index 93643e9fb2a..00000000000
--- a/appengine/flexible_python37_and_earlier/tasks/requirements.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-Flask==3.0.3; python_version > '3.6'
-Flask==2.0.3; python_version < '3.7'
-gunicorn==23.0.0
-google-cloud-tasks==2.18.0
-Werkzeug==3.0.3
diff --git a/appengine/flexible_python37_and_earlier/tasks/snippets_test.py b/appengine/flexible_python37_and_earlier/tasks/snippets_test.py
deleted file mode 100644
index d0483389fc8..00000000000
--- a/appengine/flexible_python37_and_earlier/tasks/snippets_test.py
+++ /dev/null
@@ -1,130 +0,0 @@
-# Copyright 2019 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import uuid
-
-import pytest
-
-import snippets
-
-TEST_PROJECT_ID = os.getenv("GOOGLE_CLOUD_PROJECT")
-TEST_LOCATION = os.getenv("TEST_QUEUE_LOCATION", "us-central1")
-QUEUE_NAME_1 = f"queue-{uuid.uuid4()}"
-QUEUE_NAME_2 = f"queue-{uuid.uuid4()}"
-
-
-@pytest.mark.order1
-def test_create_queue():
- name = "projects/{}/locations/{}/queues/{}".format(
- TEST_PROJECT_ID, TEST_LOCATION, QUEUE_NAME_2
- )
- result = snippets.create_queue(
- TEST_PROJECT_ID, TEST_LOCATION, QUEUE_NAME_1, QUEUE_NAME_2
- )
- assert name in result.name
-
-
-@pytest.mark.order2
-def test_update_queue():
- name = "projects/{}/locations/{}/queues/{}".format(
- TEST_PROJECT_ID, TEST_LOCATION, QUEUE_NAME_1
- )
- result = snippets.update_queue(TEST_PROJECT_ID, TEST_LOCATION, QUEUE_NAME_1)
- assert name in result.name
-
-
-@pytest.mark.order3
-def test_create_task():
- name = "projects/{}/locations/{}/queues/{}".format(
- TEST_PROJECT_ID, TEST_LOCATION, QUEUE_NAME_1
- )
- result = snippets.create_task(TEST_PROJECT_ID, TEST_LOCATION, QUEUE_NAME_1)
- assert name in result.name
-
-
-@pytest.mark.order4
-def test_create_task_with_data():
- name = "projects/{}/locations/{}/queues/{}".format(
- TEST_PROJECT_ID, TEST_LOCATION, QUEUE_NAME_1
- )
- result = snippets.create_tasks_with_data(
- TEST_PROJECT_ID, TEST_LOCATION, QUEUE_NAME_1
- )
- assert name in result.name
-
-
-@pytest.mark.order5
-def test_create_task_with_name():
- name = "projects/{}/locations/{}/queues/{}".format(
- TEST_PROJECT_ID, TEST_LOCATION, QUEUE_NAME_1
- )
- result = snippets.create_task_with_name(
- TEST_PROJECT_ID, TEST_LOCATION, QUEUE_NAME_1, "foo"
- )
- assert name in result.name
-
-
-@pytest.mark.order6
-def test_delete_task():
- result = snippets.delete_task(TEST_PROJECT_ID, TEST_LOCATION, QUEUE_NAME_1)
- assert result is None
-
-
-@pytest.mark.order7
-def test_purge_queue():
- name = "projects/{}/locations/{}/queues/{}".format(
- TEST_PROJECT_ID, TEST_LOCATION, QUEUE_NAME_1
- )
- result = snippets.purge_queue(TEST_PROJECT_ID, TEST_LOCATION, QUEUE_NAME_1)
- assert name in result.name
-
-
-@pytest.mark.order8
-def test_pause_queue():
- name = "projects/{}/locations/{}/queues/{}".format(
- TEST_PROJECT_ID, TEST_LOCATION, QUEUE_NAME_1
- )
- result = snippets.pause_queue(TEST_PROJECT_ID, TEST_LOCATION, QUEUE_NAME_1)
- assert name in result.name
-
-
-@pytest.mark.order9
-def test_delete_queue():
- result = snippets.delete_queue(TEST_PROJECT_ID, TEST_LOCATION, QUEUE_NAME_1)
- assert result is None
-
- result = snippets.delete_queue(TEST_PROJECT_ID, TEST_LOCATION, QUEUE_NAME_2)
- assert result is None
-
-
-@pytest.mark.order10
-def test_retry_task():
- QUEUE_SIZE = 3
- QUEUE_NAME = []
- for i in range(QUEUE_SIZE):
- QUEUE_NAME.append(f"queue-{uuid.uuid4()}")
-
- name = "projects/{}/locations/{}/queues/{}".format(
- TEST_PROJECT_ID, TEST_LOCATION, QUEUE_NAME[2]
- )
- result = snippets.retry_task(
- TEST_PROJECT_ID, TEST_LOCATION, QUEUE_NAME[0], QUEUE_NAME[1], QUEUE_NAME[2]
- )
- assert name in result.name
-
- for i in range(QUEUE_SIZE):
- snippets.delete_queue(
- project=TEST_PROJECT_ID, location=TEST_LOCATION, queue=QUEUE_NAME[i]
- )
diff --git a/appengine/flexible_python37_and_earlier/tasks/templates/index.html b/appengine/flexible_python37_and_earlier/tasks/templates/index.html
deleted file mode 100644
index 7e4efc7b336..00000000000
--- a/appengine/flexible_python37_and_earlier/tasks/templates/index.html
+++ /dev/null
@@ -1,21 +0,0 @@
-
-
-
-
- Tasks Sample
-
-
-
Printed task payload: {{ payload }}
-
-
\ No newline at end of file
diff --git a/appengine/flexible_python37_and_earlier/twilio/README.md b/appengine/flexible_python37_and_earlier/twilio/README.md
deleted file mode 100644
index 9a62b8400b5..00000000000
--- a/appengine/flexible_python37_and_earlier/twilio/README.md
+++ /dev/null
@@ -1,34 +0,0 @@
-# Python Twilio voice and SMS sample for Google App Engine Flexible Environment
-
-[![Open in Cloud Shell][shell_img]][shell_link]
-
-[shell_img]: http://gstatic.com/cloudssh/images/open-btn.png
-[shell_link]: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=appengine/flexible_python37_and_earlier/twilio/README.md
-
-This sample demonstrates how to use [Twilio](https://www.twilio.com) on [Google App Engine Flexible Environment](https://cloud.google.com/appengine).
-
-For more information about Twilio, see their [Python quickstart tutorials](https://www.twilio.com/docs/quickstart/python).
-
-## Setup
-
-Before you can run or deploy the sample, you will need to do the following:
-
-1. [Create a Twilio Account](http://ahoy.twilio.com/googlecloudplatform). Google App Engine
-customers receive a complimentary credit for SMS messages and inbound messages.
-
-2. Create a number on twilio, and configure the voice request URL to be ``https://your-app-id.appspot.com/call/receive``
-and the SMS request URL to be ``https://your-app-id.appspot.com/sms/receive``.
-
-3. Configure your Twilio settings in the environment variables section in ``app.yaml``.
-
-## Running locally
-
-Refer to the [top-level README](../README.md) for instructions on running and deploying.
-
-You can run the application locally to test the callbacks and SMS sending. You
-will need to set environment variables before starting your application:
-
- $ export TWILIO_ACCOUNT_SID=[your-twilio-account-sid]
- $ export TWILIO_AUTH_TOKEN=[your-twilio-auth-token]
- $ export TWILIO_NUMBER=[your-twilio-number]
- $ python main.py
diff --git a/appengine/flexible_python37_and_earlier/twilio/app.yaml b/appengine/flexible_python37_and_earlier/twilio/app.yaml
deleted file mode 100644
index 0e7de97eb19..00000000000
--- a/appengine/flexible_python37_and_earlier/twilio/app.yaml
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2021 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-runtime: python
-env: flex
-entrypoint: gunicorn -b :$PORT main:app
-
-runtime_config:
- python_version: 3
-
-# [START gae_flex_twilio_env]
-env_variables:
- TWILIO_ACCOUNT_SID: your-account-sid
- TWILIO_AUTH_TOKEN: your-auth-token
- TWILIO_NUMBER: your-twilio-number
-# [END gae_flex_twilio_env]
diff --git a/appengine/flexible_python37_and_earlier/twilio/main.py b/appengine/flexible_python37_and_earlier/twilio/main.py
deleted file mode 100644
index 6f2a3a6830f..00000000000
--- a/appengine/flexible_python37_and_earlier/twilio/main.py
+++ /dev/null
@@ -1,96 +0,0 @@
-# Copyright 2015 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import os
-
-from flask import Flask, request
-from twilio import rest
-from twilio.twiml import messaging_response, voice_response
-
-
-TWILIO_ACCOUNT_SID = os.environ["TWILIO_ACCOUNT_SID"]
-TWILIO_AUTH_TOKEN = os.environ["TWILIO_AUTH_TOKEN"]
-TWILIO_NUMBER = os.environ["TWILIO_NUMBER"]
-
-
-app = Flask(__name__)
-
-
-# [START gae_flex_twilio_receive_call]
-@app.route("/call/receive", methods=["POST"])
-def receive_call():
- """Answers a call and replies with a simple greeting."""
- response = voice_response.VoiceResponse()
- response.say("Hello from Twilio!")
- return str(response), 200, {"Content-Type": "application/xml"}
-
-
-# [END gae_flex_twilio_receive_call]
-
-
-# [START gae_flex_twilio_send_sms]
-@app.route("/sms/send")
-def send_sms():
- """Sends a simple SMS message."""
- to = request.args.get("to")
- if not to:
- return (
- 'Please provide the number to message in the "to" query string'
- " parameter."
- ), 400
-
- client = rest.Client(TWILIO_ACCOUNT_SID, TWILIO_AUTH_TOKEN)
- rv = client.messages.create(to=to, from_=TWILIO_NUMBER, body="Hello from Twilio!")
- return str(rv)
-
-
-# [END gae_flex_twilio_send_sms]
-
-
-# [START gae_flex_twilio_receive_sms]
-@app.route("/sms/receive", methods=["POST"])
-def receive_sms():
- """Receives an SMS message and replies with a simple greeting."""
- sender = request.values.get("From")
- body = request.values.get("Body")
-
- message = f"Hello, {sender}, you said: {body}"
-
- response = messaging_response.MessagingResponse()
- response.message(message)
- return str(response), 200, {"Content-Type": "application/xml"}
-
-
-# [END gae_flex_twilio_receive_sms]
-
-
-@app.errorhandler(500)
-def server_error(e):
- logging.exception("An error occurred during a request.")
- return (
- """
- An internal error occurred:
{}
- See logs for full stacktrace.
- """.format(
- e
- ),
- 500,
- )
-
-
-if __name__ == "__main__":
- # This is used when running locally. Gunicorn is used to run the
- # application on Google App Engine. See entrypoint in app.yaml.
- app.run(host="127.0.0.1", port=8080, debug=True)
diff --git a/appengine/flexible_python37_and_earlier/twilio/main_test.py b/appengine/flexible_python37_and_earlier/twilio/main_test.py
deleted file mode 100644
index 4878384f65a..00000000000
--- a/appengine/flexible_python37_and_earlier/twilio/main_test.py
+++ /dev/null
@@ -1,75 +0,0 @@
-# Copyright 2016 Google LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import re
-
-import pytest
-import responses
-
-
-@pytest.fixture
-def app(monkeypatch):
- monkeypatch.setenv("TWILIO_ACCOUNT_SID", "sid123")
- monkeypatch.setenv("TWILIO_AUTH_TOKEN", "auth123")
- monkeypatch.setenv("TWILIO_NUMBER", "0123456789")
-
- import main
-
- main.app.testing = True
- return main.app.test_client()
-
-
-def test_receive_call(app):
- r = app.post("/call/receive")
- assert "Hello from Twilio!" in r.data.decode("utf-8")
-
-
-@responses.activate
-def test_send_sms(app, monkeypatch):
- sample_response = {
- "sid": "sid",
- "date_created": "Wed, 20 Dec 2017 19:32:14 +0000",
- "date_updated": "Wed, 20 Dec 2017 19:32:14 +0000",
- "date_sent": None,
- "account_sid": "account_sid",
- "to": "+1234567890",
- "from": "+9876543210",
- "messaging_service_sid": None,
- "body": "Hello from Twilio!",
- "status": "queued",
- "num_segments": "1",
- "num_media": "0",
- "direction": "outbound-api",
- "api_version": "2010-04-01",
- "price": None,
- "price_unit": "USD",
- "error_code": None,
- "error_message": None,
- "uri": "/2010-04-01/Accounts/sample.json",
- "subresource_uris": {"media": "/2010-04-01/Accounts/sample/Media.json"},
- }
- responses.add(responses.POST, re.compile(".*"), json=sample_response, status=200)
-
- r = app.get("/sms/send")
- assert r.status_code == 400
-
- r = app.get("/sms/send?to=5558675309")
- assert r.status_code == 200
-
-
-def test_receive_sms(app):
- r = app.post(
- "/sms/receive", data={"From": "5558675309", "Body": "Jenny, I got your number."}
- )
- assert r.status_code == 200
- assert "Jenny, I got your number" in r.data.decode("utf-8")
diff --git a/appengine/flexible_python37_and_earlier/twilio/noxfile_config.py b/appengine/flexible_python37_and_earlier/twilio/noxfile_config.py
deleted file mode 100644
index 1665dd736f8..00000000000
--- a/appengine/flexible_python37_and_earlier/twilio/noxfile_config.py
+++ /dev/null
@@ -1,39 +0,0 @@
-# Copyright 2023 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Default TEST_CONFIG_OVERRIDE for python repos.
-
-# You can copy this file into your directory, then it will be imported from
-# the noxfile.py.
-
-# The source of truth:
-# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/noxfile_config.py
-
-TEST_CONFIG_OVERRIDE = {
- # You can opt out from the test for specific Python versions.
- # Skipping for Python 3.9 due to pyarrow compilation failure.
- "ignored_versions": ["2.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"],
- # Old samples are opted out of enforcing Python type hints
- # All new samples should feature them
- "enforce_type_hints": False,
- # An envvar key for determining the project id to use. Change it
- # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
- # build specific Cloud project. You can also use your own string
- # to use your own Cloud project.
- "gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
- # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
- # A dictionary you want to inject into your test. Don't put any
- # secrets here. These values will override predefined values.
- "envs": {},
-}
diff --git a/appengine/flexible_python37_and_earlier/twilio/requirements-test.txt b/appengine/flexible_python37_and_earlier/twilio/requirements-test.txt
deleted file mode 100644
index e89f6031ad7..00000000000
--- a/appengine/flexible_python37_and_earlier/twilio/requirements-test.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-pytest==8.2.0
-responses==0.17.0; python_version < '3.7'
-responses==0.23.1; python_version > '3.6'
diff --git a/appengine/flexible_python37_and_earlier/twilio/requirements.txt b/appengine/flexible_python37_and_earlier/twilio/requirements.txt
deleted file mode 100644
index cfa80d12edf..00000000000
--- a/appengine/flexible_python37_and_earlier/twilio/requirements.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-Flask==3.0.3; python_version > '3.6'
-Flask==2.0.3; python_version < '3.7'
-gunicorn==23.0.0
-twilio==9.0.3
-Werkzeug==3.0.3; python_version >= '3.7'
-Werkzeug==2.3.8; python_version < '3.7'
diff --git a/appengine/flexible_python37_and_earlier/websockets/README.md b/appengine/flexible_python37_and_earlier/websockets/README.md
deleted file mode 100644
index fabd0995a40..00000000000
--- a/appengine/flexible_python37_and_earlier/websockets/README.md
+++ /dev/null
@@ -1,11 +0,0 @@
-# Python websockets sample for Google App Engine Flexible Environment
-
-This sample demonstrates how to use websockets on [Google App Engine Flexible Environment](https://cloud.google.com/appengine).
-
-## Running locally
-
-Refer to the [top-level README](../README.md) for instructions on running and deploying.
-
-To run locally, you need to use gunicorn with the ``flask_socket`` worker:
-
- $ gunicorn -b 127.0.0.1:8080 -k flask_sockets.worker main:app
diff --git a/appengine/flexible_python37_and_earlier/websockets/app.yaml b/appengine/flexible_python37_and_earlier/websockets/app.yaml
deleted file mode 100644
index 8a323ffe30f..00000000000
--- a/appengine/flexible_python37_and_earlier/websockets/app.yaml
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2021 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-runtime: python
-env: flex
-
-# Use a special gunicorn worker class to support websockets.
-entrypoint: gunicorn -b :$PORT -k flask_sockets.worker main:app
-
-runtime_config:
- python_version: 3
-
-# Use only a single instance, so that this local-memory-only chat app will work
-# consistently with multiple users. To work across multiple instances, an
-# extra-instance messaging system or data store would be needed.
-manual_scaling:
- instances: 1
-
-
-# For applications which can take advantage of session affinity
-# (where the load balancer will attempt to route multiple connections from
-# the same user to the same App Engine instance), uncomment the folowing:
-
-# network:
-# session_affinity: true
diff --git a/appengine/flexible_python37_and_earlier/websockets/main.py b/appengine/flexible_python37_and_earlier/websockets/main.py
deleted file mode 100644
index 132160d9ab5..00000000000
--- a/appengine/flexible_python37_and_earlier/websockets/main.py
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2018 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-# [START gae_flex_websockets_app]
-from flask import Flask, render_template
-from flask_sockets import Sockets
-
-
-app = Flask(__name__)
-sockets = Sockets(app)
-
-
-@sockets.route("/chat")
-def chat_socket(ws):
- while not ws.closed:
- message = ws.receive()
- if message is None: # message is "None" if the client has closed.
- continue
- # Send the message to all clients connected to this webserver
- # process. (To support multiple processes or instances, an
- # extra-instance storage or messaging system would be required.)
- clients = ws.handler.server.clients.values()
- for client in clients:
- client.ws.send(message)
-
-
-# [END gae_flex_websockets_app]
-
-
-@app.route("/")
-def index():
- return render_template("index.html")
-
-
-if __name__ == "__main__":
- print(
- """
-This can not be run directly because the Flask development server does not
-support web sockets. Instead, use gunicorn:
-
-gunicorn -b 127.0.0.1:8080 -k flask_sockets.worker main:app
-
-"""
- )
diff --git a/appengine/flexible_python37_and_earlier/websockets/main_test.py b/appengine/flexible_python37_and_earlier/websockets/main_test.py
deleted file mode 100644
index 597f2416d1c..00000000000
--- a/appengine/flexible_python37_and_earlier/websockets/main_test.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# Copyright 2018 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import socket
-import subprocess
-
-import pytest
-import requests
-from retrying import retry
-import websocket
-
-
-@pytest.fixture(scope="module")
-def server():
- """Provides the address of a test HTTP/websocket server.
- The test server is automatically created before
- a test and destroyed at the end.
- """
- # Ask the OS to allocate a port.
- sock = socket.socket()
- sock.bind(("127.0.0.1", 0))
- port = sock.getsockname()[1]
-
- # Free the port and pass it to a subprocess.
- sock.close()
-
- bind_to = f"127.0.0.1:{port}"
- server = subprocess.Popen(
- ["gunicorn", "-b", bind_to, "-k" "flask_sockets.worker", "main:app"],
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- )
-
- # With btlr, there can be many processes are spawned and the
- # server might be in a tight memory situation, so let's wait for 2
- # mins.
- # Wait until the server responds before proceeding.
- @retry(wait_fixed=50, stop_max_delay=120000)
- def check_server(url):
- requests.get(url)
-
- check_server(f"http://{bind_to}/")
-
- yield bind_to
-
- server.kill()
-
- # Dump the logs for debugging
- out, err = server.communicate()
- print(f"gunicorn stdout: {out}")
- print(f"gunicorn stderr: {err}")
-
-
-def test_http(server):
- result = requests.get(f"http://{server}/")
- assert "Python Websockets Chat" in result.text
-
-
-def test_websocket(server):
- url = f"ws://{server}/chat"
- ws_one = websocket.WebSocket()
- ws_one.connect(url)
-
- ws_two = websocket.WebSocket()
- ws_two.connect(url)
-
- message = "Hello, World"
- ws_one.send(message)
-
- assert ws_one.recv() == message
- assert ws_two.recv() == message
diff --git a/appengine/flexible_python37_and_earlier/websockets/noxfile_config.py b/appengine/flexible_python37_and_earlier/websockets/noxfile_config.py
deleted file mode 100644
index 1665dd736f8..00000000000
--- a/appengine/flexible_python37_and_earlier/websockets/noxfile_config.py
+++ /dev/null
@@ -1,39 +0,0 @@
-# Copyright 2023 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Default TEST_CONFIG_OVERRIDE for python repos.
-
-# You can copy this file into your directory, then it will be imported from
-# the noxfile.py.
-
-# The source of truth:
-# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/noxfile_config.py
-
-TEST_CONFIG_OVERRIDE = {
- # You can opt out from the test for specific Python versions.
- # Skipping for Python 3.9 due to pyarrow compilation failure.
- "ignored_versions": ["2.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"],
- # Old samples are opted out of enforcing Python type hints
- # All new samples should feature them
- "enforce_type_hints": False,
- # An envvar key for determining the project id to use. Change it
- # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
- # build specific Cloud project. You can also use your own string
- # to use your own Cloud project.
- "gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
- # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
- # A dictionary you want to inject into your test. Don't put any
- # secrets here. These values will override predefined values.
- "envs": {},
-}
diff --git a/appengine/flexible_python37_and_earlier/websockets/requirements-test.txt b/appengine/flexible_python37_and_earlier/websockets/requirements-test.txt
deleted file mode 100644
index 92b9194cf63..00000000000
--- a/appengine/flexible_python37_and_earlier/websockets/requirements-test.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-pytest==8.2.0
-retrying==1.3.4
-websocket-client==1.7.0
diff --git a/appengine/flexible_python37_and_earlier/websockets/requirements.txt b/appengine/flexible_python37_and_earlier/websockets/requirements.txt
deleted file mode 100644
index c1525d36077..00000000000
--- a/appengine/flexible_python37_and_earlier/websockets/requirements.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-Flask==1.1.4 # it seems like Flask-sockets doesn't play well with 2.0+
-Flask-Sockets==0.2.1
-gunicorn==23.0.0
-requests==2.31.0
-markupsafe==2.0.1
-Werkzeug==1.0.1;
diff --git a/appengine/flexible_python37_and_earlier/websockets/templates/index.html b/appengine/flexible_python37_and_earlier/websockets/templates/index.html
deleted file mode 100644
index af6d791f148..00000000000
--- a/appengine/flexible_python37_and_earlier/websockets/templates/index.html
+++ /dev/null
@@ -1,96 +0,0 @@
-{#
-# Copyright 2018 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#}
-
-
-
- Google App Engine Flexible Environment - Python Websockets Chat
-
-
-
-
-
-
Chat demo
-
-
-
-
Messages:
-
-
-
-
-
Status:
-
-
-
-
-
-
-
-
diff --git a/appengine/standard/memcache/guestbook/main.py b/appengine/standard/memcache/guestbook/main.py
index 8c6352ce434..01e5ef60018 100644
--- a/appengine/standard/memcache/guestbook/main.py
+++ b/appengine/standard/memcache/guestbook/main.py
@@ -19,11 +19,12 @@
"""
# [START gae_memcache_guestbook_all]
-import cgi
-import cStringIO
import logging
import urllib
+import cgi
+import cStringIO
+
from google.appengine.api import memcache
from google.appengine.api import users
from google.appengine.ext import ndb
diff --git a/appengine/standard/ndb/overview/main.py b/appengine/standard/ndb/overview/main.py
index a502ab1c8fe..25e38e75500 100644
--- a/appengine/standard/ndb/overview/main.py
+++ b/appengine/standard/ndb/overview/main.py
@@ -21,10 +21,11 @@
"""
# [START gae_ndb_overview]
-import cgi
import textwrap
import urllib
+import cgi
+
from google.appengine.ext import ndb
import webapp2
diff --git a/appengine/standard/ndb/transactions/main.py b/appengine/standard/ndb/transactions/main.py
index bb7dc8b6a37..0a42de7feda 100644
--- a/appengine/standard/ndb/transactions/main.py
+++ b/appengine/standard/ndb/transactions/main.py
@@ -12,10 +12,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import cgi
import random
import urllib
+import cgi
+
import flask
# [START gae_ndb_transactions_import]
diff --git a/appengine/standard/noxfile-template.py b/appengine/standard/noxfile-template.py
deleted file mode 100644
index f96f3288d70..00000000000
--- a/appengine/standard/noxfile-template.py
+++ /dev/null
@@ -1,246 +0,0 @@
-# Copyright 2019 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import print_function
-
-import os
-from pathlib import Path
-import sys
-
-import nox
-import tempfile
-
-
-# WARNING - WARNING - WARNING - WARNING - WARNING
-# WARNING - WARNING - WARNING - WARNING - WARNING
-# DO NOT EDIT THIS FILE EVER!
-# WARNING - WARNING - WARNING - WARNING - WARNING
-# WARNING - WARNING - WARNING - WARNING - WARNING
-
-# Copy `noxfile_config.py` to your directory and modify it instead.
-
-# `TEST_CONFIG` dict is a configuration hook that allows users to
-# modify the test configurations. The values here should be in sync
-# with `noxfile_config.py`. Users will copy `noxfile_config.py` into
-# their directory and modify it.
-
-TEST_CONFIG = {
- # You can opt out from the test for specific Python versions.
- "ignored_versions": ["2.7", "3.7", "3.8", "3.10", "3.12", "3.13"],
- # An envvar key for determining the project id to use. Change it
- # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
- # build specific Cloud project. You can also use your own string
- # to use your own Cloud project.
- "gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
- # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
- # A dictionary you want to inject into your test. Don't put any
- # secrets here. These values will override predefined values.
- "envs": {},
-}
-
-
-try:
- # Ensure we can import noxfile_config in the project's directory.
- sys.path.append(".")
- from noxfile_config import TEST_CONFIG_OVERRIDE
-except ImportError as e:
- print("No user noxfile_config found: detail: {}".format(e))
- TEST_CONFIG_OVERRIDE = {}
-
-# Update the TEST_CONFIG with the user supplied values.
-TEST_CONFIG.update(TEST_CONFIG_OVERRIDE)
-
-
-def get_pytest_env_vars():
- """Returns a dict for pytest invocation."""
- ret = {}
-
- # Override the GCLOUD_PROJECT and the alias.
- env_key = TEST_CONFIG["gcloud_project_env"]
- # This should error out if not set.
- ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key]
- ret["GCLOUD_PROJECT"] = os.environ[env_key] # deprecated
-
- # Apply user supplied envs.
- ret.update(TEST_CONFIG["envs"])
- return ret
-
-
-# DO NOT EDIT - automatically generated.
-# All versions used to tested samples.
-ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"]
-
-# Any default versions that should be ignored.
-IGNORED_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"]
-
-TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS])
-
-INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False))
-#
-# Style Checks
-#
-
-
-# Ignore I202 "Additional newline in a section of imports." to accommodate
-# region tags in import blocks. Since we specify an explicit ignore, we also
-# have to explicitly ignore the list of default ignores:
-# `E121,E123,E126,E226,E24,E704,W503,W504` as shown by `flake8 --help`.
-def _determine_local_import_names(start_dir):
- """Determines all import names that should be considered "local".
-
- This is used when running the linter to insure that import order is
- properly checked.
- """
- file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)]
- return [
- basename
- for basename, extension in file_ext_pairs
- if extension == ".py"
- or os.path.isdir(os.path.join(start_dir, basename))
- and basename not in ("__pycache__")
- ]
-
-
-FLAKE8_COMMON_ARGS = [
- "--show-source",
- "--builtin=gettext",
- "--max-complexity=20",
- "--import-order-style=google",
- "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py",
- "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I100,I201,I202",
- "--max-line-length=88",
-]
-
-
-@nox.session
-def lint(session):
- session.install("flake8", "flake8-import-order")
-
- local_names = _determine_local_import_names(".")
- args = FLAKE8_COMMON_ARGS + [
- "--application-import-names",
- ",".join(local_names),
- ".",
- ]
- session.run("flake8", *args)
-
-
-#
-# Sample Tests
-#
-
-
-PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"]
-
-
-def _session_tests(session, post_install=None):
- """Runs py.test for a particular project."""
- if os.path.exists("requirements.txt"):
- session.install("-r", "requirements.txt")
-
- if os.path.exists("requirements-test.txt"):
- session.install("-r", "requirements-test.txt")
-
- if post_install:
- post_install(session)
-
- session.run(
- "pytest",
- *(PYTEST_COMMON_ARGS + session.posargs),
- # Pytest will return 5 when no tests are collected. This can happen
- # on travis where slow and flaky tests are excluded.
- # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html
- success_codes=[0, 5],
- env=get_pytest_env_vars()
- )
-
-
-_GAE_ROOT = os.environ.get("GAE_ROOT")
-if _GAE_ROOT is None:
- _GAE_ROOT = tempfile.mkdtemp()
-
-
-def find_download_appengine_sdk_py(filename):
- """Find a file with the given name upwards."""
- d = os.getcwd()
- while d != "/":
- fullpath = os.path.join(d, filename)
- if os.path.isfile(fullpath):
- return fullpath
- d = os.path.abspath(d + "/../")
-
-
-def _setup_appengine_sdk(session):
- """Installs the App Engine SDK, if needed."""
- session.env["GAE_SDK_PATH"] = os.path.join(_GAE_ROOT, "google_appengine")
- download_appengine_sdk_py = find_download_appengine_sdk_py(
- "download-appengine-sdk.py"
- )
- session.install("requests")
- session.run("python", download_appengine_sdk_py, _GAE_ROOT)
-
-
-@nox.session(python=ALL_VERSIONS)
-def py(session):
- """Runs py.test for a sample using the specified version of Python."""
- if session.python in TESTED_VERSIONS:
- # Create a lib directory if needed,
- # otherwise the App Engine vendor library will complain.
- if not os.path.isdir("lib"):
- os.mkdir("lib")
-
- # mailjet_rest has an issue with requests being required pre install
- # https://github.com/mailjet/mailjet-apiv3-python/issues/38
- if "appengine/standard/mailjet" in os.getcwd():
- session.install("requests")
-
- _session_tests(session, post_install=_setup_appengine_sdk)
- else:
- print("SKIPPED: {} tests are disabled for this sample.".format(session.python))
-
-
-#
-# Readmegen
-#
-
-
-def _get_repo_root():
- """Returns the root folder of the project."""
- # Get root of this repository. Assume we don't have directories nested deeper than 10 items.
- p = Path(os.getcwd())
- for i in range(10):
- if p is None:
- break
- if Path(p / ".git").exists():
- return str(p)
- p = p.parent
- raise Exception("Unable to detect repository root.")
-
-
-GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")])
-
-
-@nox.session
-@nox.parametrize("path", GENERATED_READMES)
-def readmegen(session, path):
- """(Re-)generates the readme for a sample."""
- session.install("jinja2", "pyyaml")
-
- if os.path.exists(os.path.join(path, "requirements.txt")):
- session.install("-r", os.path.join(path, "requirements.txt"))
-
- in_file = os.path.join(path, "README.rst.in")
- session.run(
- "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file
- )
diff --git a/appengine/standard/noxfile_config.py b/appengine/standard/noxfile_config.py
index 9d81eb86207..f39811085fa 100644
--- a/appengine/standard/noxfile_config.py
+++ b/appengine/standard/noxfile_config.py
@@ -24,7 +24,7 @@
TEST_CONFIG_OVERRIDE = {
# You can opt out from the test for specific Python versions.
- "ignored_versions": ["3.6", "3.7", "3.8", "3.9", "3.10", "3.11"],
+ "ignored_versions": ["3.6", "3.7", "3.8", "3.10", "3.11", "3.12", "3.13"],
# Old samples are opted out of enforcing Python type hints
# All new samples should feature them
"enforce_type_hints": False,
diff --git a/appengine/standard/urlfetch/snippets/main.py b/appengine/standard/urlfetch/snippets/main.py
index 7081510a465..95dca24aae9 100644
--- a/appengine/standard/urlfetch/snippets/main.py
+++ b/appengine/standard/urlfetch/snippets/main.py
@@ -19,14 +19,15 @@
import logging
import urllib
-# [START gae_urlfetch_snippets_imports_urllib2]
-import urllib2
-# [END gae_urlfetch_snippets_imports_urllib2]
# [START gae_urlfetch_snippets_imports_urlfetch]
from google.appengine.api import urlfetch
# [END gae_urlfetch_snippets_imports_urlfetch]
+# [START gae_urlfetch_snippets_imports_urllib2]
+import urllib2
+# [END gae_urlfetch_snippets_imports_urllib2]
+
import webapp2
diff --git a/appengine/standard_python3/bigquery/app.yaml b/appengine/standard_python3/bigquery/app.yaml
index 83c91f5b872..472f1f0c034 100644
--- a/appengine/standard_python3/bigquery/app.yaml
+++ b/appengine/standard_python3/bigquery/app.yaml
@@ -12,4 +12,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-runtime: python39
+runtime: python313
diff --git a/appengine/standard_python3/building-an-app/building-an-app-1/app.yaml b/appengine/standard_python3/building-an-app/building-an-app-1/app.yaml
index 100d540982b..2ecf42a0f4f 100644
--- a/appengine/standard_python3/building-an-app/building-an-app-1/app.yaml
+++ b/appengine/standard_python3/building-an-app/building-an-app-1/app.yaml
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-runtime: python313
+runtime: python314
handlers:
# This configures Google App Engine to serve the files in the app's static
diff --git a/appengine/standard_python3/building-an-app/building-an-app-1/requirements-test.txt b/appengine/standard_python3/building-an-app/building-an-app-1/requirements-test.txt
index c2845bffbe8..c987bcfee7e 100644
--- a/appengine/standard_python3/building-an-app/building-an-app-1/requirements-test.txt
+++ b/appengine/standard_python3/building-an-app/building-an-app-1/requirements-test.txt
@@ -1 +1,2 @@
-pytest==7.0.1
+pytest==7.0.1; python_version == '3.9'
+pytest==9.0.2; python_version >= '3.10'
diff --git a/appengine/standard_python3/building-an-app/building-an-app-2/app.yaml b/appengine/standard_python3/building-an-app/building-an-app-2/app.yaml
index a0931a8a5d9..100d540982b 100644
--- a/appengine/standard_python3/building-an-app/building-an-app-2/app.yaml
+++ b/appengine/standard_python3/building-an-app/building-an-app-2/app.yaml
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-runtime: python39
+runtime: python313
handlers:
# This configures Google App Engine to serve the files in the app's static
diff --git a/appengine/standard_python3/building-an-app/building-an-app-3/app.yaml b/appengine/standard_python3/building-an-app/building-an-app-3/app.yaml
index a0931a8a5d9..100d540982b 100644
--- a/appengine/standard_python3/building-an-app/building-an-app-3/app.yaml
+++ b/appengine/standard_python3/building-an-app/building-an-app-3/app.yaml
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-runtime: python39
+runtime: python313
handlers:
# This configures Google App Engine to serve the files in the app's static
diff --git a/appengine/standard_python3/building-an-app/building-an-app-4/app.yaml b/appengine/standard_python3/building-an-app/building-an-app-4/app.yaml
index a0931a8a5d9..100d540982b 100644
--- a/appengine/standard_python3/building-an-app/building-an-app-4/app.yaml
+++ b/appengine/standard_python3/building-an-app/building-an-app-4/app.yaml
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-runtime: python39
+runtime: python313
handlers:
# This configures Google App Engine to serve the files in the app's static
diff --git a/appengine/standard_python3/bundled-services/blobstore/django/app.yaml b/appengine/standard_python3/bundled-services/blobstore/django/app.yaml
index 96e1c924ee3..6994339e157 100644
--- a/appengine/standard_python3/bundled-services/blobstore/django/app.yaml
+++ b/appengine/standard_python3/bundled-services/blobstore/django/app.yaml
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-runtime: python39
+runtime: python313
app_engine_apis: true
handlers:
diff --git a/appengine/standard_python3/bundled-services/blobstore/flask/app.yaml b/appengine/standard_python3/bundled-services/blobstore/flask/app.yaml
index 96e1c924ee3..6994339e157 100644
--- a/appengine/standard_python3/bundled-services/blobstore/flask/app.yaml
+++ b/appengine/standard_python3/bundled-services/blobstore/flask/app.yaml
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-runtime: python39
+runtime: python313
app_engine_apis: true
handlers:
diff --git a/appengine/standard_python3/bundled-services/blobstore/wsgi/app.yaml b/appengine/standard_python3/bundled-services/blobstore/wsgi/app.yaml
index 96e1c924ee3..6994339e157 100644
--- a/appengine/standard_python3/bundled-services/blobstore/wsgi/app.yaml
+++ b/appengine/standard_python3/bundled-services/blobstore/wsgi/app.yaml
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-runtime: python39
+runtime: python313
app_engine_apis: true
handlers:
diff --git a/appengine/standard_python3/bundled-services/deferred/django/app.yaml b/appengine/standard_python3/bundled-services/deferred/django/app.yaml
index 84314e1d25b..c2226a56b67 100644
--- a/appengine/standard_python3/bundled-services/deferred/django/app.yaml
+++ b/appengine/standard_python3/bundled-services/deferred/django/app.yaml
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-runtime: python39
+runtime: python313
app_engine_apis: true
env_variables:
NDB_USE_CROSS_COMPATIBLE_PICKLE_PROTOCOL: "True"
diff --git a/appengine/standard_python3/bundled-services/deferred/flask/app.yaml b/appengine/standard_python3/bundled-services/deferred/flask/app.yaml
index 84314e1d25b..c2226a56b67 100644
--- a/appengine/standard_python3/bundled-services/deferred/flask/app.yaml
+++ b/appengine/standard_python3/bundled-services/deferred/flask/app.yaml
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-runtime: python39
+runtime: python313
app_engine_apis: true
env_variables:
NDB_USE_CROSS_COMPATIBLE_PICKLE_PROTOCOL: "True"
diff --git a/appengine/standard_python3/bundled-services/deferred/wsgi/app.yaml b/appengine/standard_python3/bundled-services/deferred/wsgi/app.yaml
index 84314e1d25b..c2226a56b67 100644
--- a/appengine/standard_python3/bundled-services/deferred/wsgi/app.yaml
+++ b/appengine/standard_python3/bundled-services/deferred/wsgi/app.yaml
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-runtime: python39
+runtime: python313
app_engine_apis: true
env_variables:
NDB_USE_CROSS_COMPATIBLE_PICKLE_PROTOCOL: "True"
diff --git a/appengine/standard_python3/bundled-services/mail/django/app.yaml b/appengine/standard_python3/bundled-services/mail/django/app.yaml
index ff79a69182c..902fe897910 100644
--- a/appengine/standard_python3/bundled-services/mail/django/app.yaml
+++ b/appengine/standard_python3/bundled-services/mail/django/app.yaml
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-runtime: python39
+runtime: python313
app_engine_apis: true
inbound_services:
diff --git a/appengine/standard_python3/bundled-services/mail/django/requirements.txt b/appengine/standard_python3/bundled-services/mail/django/requirements.txt
index 4922ec66011..bdd07a4620e 100644
--- a/appengine/standard_python3/bundled-services/mail/django/requirements.txt
+++ b/appengine/standard_python3/bundled-services/mail/django/requirements.txt
@@ -1,4 +1,4 @@
-Django==5.1.10; python_version >= "3.10"
+Django==5.1.13; python_version >= "3.10"
Django==4.2.16; python_version >= "3.8" and python_version < "3.10"
Django==3.2.25; python_version < "3.8"
django-environ==0.10.0
diff --git a/appengine/standard_python3/bundled-services/mail/flask/app.yaml b/appengine/standard_python3/bundled-services/mail/flask/app.yaml
index ff79a69182c..79f6d993358 100644
--- a/appengine/standard_python3/bundled-services/mail/flask/app.yaml
+++ b/appengine/standard_python3/bundled-services/mail/flask/app.yaml
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-runtime: python39
+runtime: python312
app_engine_apis: true
inbound_services:
diff --git a/appengine/standard_python3/bundled-services/mail/wsgi/app.yaml b/appengine/standard_python3/bundled-services/mail/wsgi/app.yaml
index ff79a69182c..79f6d993358 100644
--- a/appengine/standard_python3/bundled-services/mail/wsgi/app.yaml
+++ b/appengine/standard_python3/bundled-services/mail/wsgi/app.yaml
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-runtime: python39
+runtime: python312
app_engine_apis: true
inbound_services:
diff --git a/appengine/standard_python3/cloudsql/app.yaml b/appengine/standard_python3/cloudsql/app.yaml
index 496b60f231b..dfb14663846 100644
--- a/appengine/standard_python3/cloudsql/app.yaml
+++ b/appengine/standard_python3/cloudsql/app.yaml
@@ -14,7 +14,7 @@
# [START gae_python38_cloudsql_config]
# [START gae_python3_cloudsql_config]
-runtime: python39
+runtime: python313
env_variables:
CLOUD_SQL_USERNAME: YOUR-USERNAME
diff --git a/appengine/standard_python3/cloudsql/requirements.txt b/appengine/standard_python3/cloudsql/requirements.txt
index 7ca534fe2e0..7fe39c1a1b2 100644
--- a/appengine/standard_python3/cloudsql/requirements.txt
+++ b/appengine/standard_python3/cloudsql/requirements.txt
@@ -1,6 +1,6 @@
flask==3.0.0
# psycopg2==2.8.4
# you will need either the binary or the regular - for more info see http://initd.org/psycopg/docs/install.html
-psycopg2-binary==2.9.9
+psycopg2-binary==2.9.11
PyMySQL==1.1.1
-SQLAlchemy==2.0.10
\ No newline at end of file
+SQLAlchemy==2.0.44
diff --git a/appengine/standard_python3/custom-server/app.yaml b/appengine/standard_python3/custom-server/app.yaml
index ff2f64b2b26..b67aef4f96e 100644
--- a/appengine/standard_python3/custom-server/app.yaml
+++ b/appengine/standard_python3/custom-server/app.yaml
@@ -14,7 +14,7 @@
# [START gae_python38_custom_runtime]
# [START gae_python3_custom_runtime]
-runtime: python39
+runtime: python313
entrypoint: uwsgi --http-socket :$PORT --wsgi-file main.py --callable app --master --processes 1 --threads 2
# [END gae_python3_custom_runtime]
# [END gae_python38_custom_runtime]
diff --git a/appengine/standard_python3/django/app.yaml b/appengine/standard_python3/django/app.yaml
index 5a7255118c8..ddf86e23823 100644
--- a/appengine/standard_python3/django/app.yaml
+++ b/appengine/standard_python3/django/app.yaml
@@ -15,7 +15,7 @@
#
# [START gaestd_py_django_app_yaml]
-runtime: python39
+runtime: python313
env_variables:
# This setting is used in settings.py to configure your ALLOWED_HOSTS
diff --git a/appengine/standard_python3/django/requirements.txt b/appengine/standard_python3/django/requirements.txt
index cdd4b54cf3e..60b4408e6b4 100644
--- a/appengine/standard_python3/django/requirements.txt
+++ b/appengine/standard_python3/django/requirements.txt
@@ -1,4 +1,4 @@
-Django==5.1.8; python_version >= "3.10"
+Django==5.1.15; python_version >= "3.10"
Django==4.2.17; python_version >= "3.8" and python_version < "3.10"
Django==3.2.25; python_version < "3.8"
django-environ==0.10.0
diff --git a/appengine/standard_python3/migration/urlfetch/app.yaml b/appengine/standard_python3/migration/urlfetch/app.yaml
index dd75aa47c69..3aa9d9d2207 100644
--- a/appengine/standard_python3/migration/urlfetch/app.yaml
+++ b/appengine/standard_python3/migration/urlfetch/app.yaml
@@ -12,4 +12,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-runtime: python39
+runtime: python313
diff --git a/appengine/standard_python3/pubsub/app.yaml b/appengine/standard_python3/pubsub/app.yaml
index 9e3e948e4db..3c36b4bfb3c 100644
--- a/appengine/standard_python3/pubsub/app.yaml
+++ b/appengine/standard_python3/pubsub/app.yaml
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-runtime: python39
+runtime: python313
# [START gae_standard_pubsub_env]
env_variables:
diff --git a/appengine/standard_python3/redis/app.yaml b/appengine/standard_python3/redis/app.yaml
index 2797ed154f7..138895c3737 100644
--- a/appengine/standard_python3/redis/app.yaml
+++ b/appengine/standard_python3/redis/app.yaml
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-runtime: python39
+runtime: python313
env_variables:
REDIS_HOST: your-redis-host
diff --git a/appengine/standard_python3/spanner/app.yaml b/appengine/standard_python3/spanner/app.yaml
index a4e3167ec08..59a31baca33 100644
--- a/appengine/standard_python3/spanner/app.yaml
+++ b/appengine/standard_python3/spanner/app.yaml
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-runtime: python39
+runtime: python313
env_variables:
SPANNER_INSTANCE: "YOUR-SPANNER-INSTANCE-ID"
diff --git a/appengine/standard_python3/warmup/app.yaml b/appengine/standard_python3/warmup/app.yaml
index fdda19a79b1..3cc59533b01 100644
--- a/appengine/standard_python3/warmup/app.yaml
+++ b/appengine/standard_python3/warmup/app.yaml
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-runtime: python39
+runtime: python313
inbound_services:
- warmup
diff --git a/auth/cloud-client-temp/authenticate_explicit_with_adc.py b/auth/cloud-client-temp/authenticate_explicit_with_adc.py
new file mode 100644
index 00000000000..c9ce2f02af3
--- /dev/null
+++ b/auth/cloud-client-temp/authenticate_explicit_with_adc.py
@@ -0,0 +1,55 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# [START auth_cloud_explicit_adc]
+
+
+import google.auth
+from google.cloud import storage
+import google.oauth2.credentials
+
+
+def authenticate_explicit_with_adc() -> None:
+ """
+ List storage buckets by authenticating with ADC.
+
+ // TODO(Developer):
+ // 1. Before running this sample,
+ // set up ADC as described in https://cloud.google.com/docs/authentication/external/set-up-adc
+ // 2. Replace the project variable.
+ // 3. Make sure you have the necessary permission to list storage buckets: "storage.buckets.list"
+ """
+
+ # Construct the Google credentials object which obtains the default configuration from your
+ # working environment.
+ # google.auth.default() will give you ComputeEngineCredentials
+ # if you are on a GCE (or other metadata server supported environments).
+ credentials, project_id = google.auth.default()
+ # If you are authenticating to a Cloud API, you can let the library include the default scope,
+ # https://www.googleapis.com/auth/cloud-platform, because IAM is used to provide fine-grained
+ # permissions for Cloud.
+ # If you need to provide a scope, specify it as follows:
+ # credentials = google.auth.default(scopes=scope)
+ # For more information on scopes to use,
+ # see: https://developers.google.com/identity/protocols/oauth2/scopes
+
+ # Construct the Storage client.
+ storage_client = storage.Client(credentials=credentials, project=project_id)
+ buckets = storage_client.list_buckets()
+ print("Buckets:")
+ for bucket in buckets:
+ print(bucket.name)
+ print("Listed all storage buckets.")
+
+# [END auth_cloud_explicit_adc]
diff --git a/auth/cloud-client-temp/authenticate_implicit_with_adc.py b/auth/cloud-client-temp/authenticate_implicit_with_adc.py
new file mode 100644
index 00000000000..ed967ab880a
--- /dev/null
+++ b/auth/cloud-client-temp/authenticate_implicit_with_adc.py
@@ -0,0 +1,46 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# [START auth_cloud_implicit_adc]
+
+from google.cloud import storage
+
+
+def authenticate_implicit_with_adc(project_id: str = "your-google-cloud-project-id") -> None:
+ """
+ When interacting with Google Cloud Client libraries, the library can auto-detect the
+ credentials to use.
+
+ // TODO(Developer):
+ // 1. Before running this sample,
+ // set up ADC as described in https://cloud.google.com/docs/authentication/external/set-up-adc
+ // 2. Replace the project variable.
+ // 3. Make sure that the user account or service account that you are using
+ // has the required permissions. For this sample, you must have "storage.buckets.list".
+ Args:
+ project_id: The project id of your Google Cloud project.
+ """
+
+ # This snippet demonstrates how to list buckets.
+ # *NOTE*: Replace the client created below with the client required for your application.
+ # Note that the credentials are not specified when constructing the client.
+ # Hence, the client library will look for credentials using ADC.
+ storage_client = storage.Client(project=project_id)
+ buckets = storage_client.list_buckets()
+ print("Buckets:")
+ for bucket in buckets:
+ print(bucket.name)
+ print("Listed all storage buckets.")
+
+# [END auth_cloud_implicit_adc]
diff --git a/auth/cloud-client-temp/custom_aws_supplier.py b/auth/cloud-client-temp/custom_aws_supplier.py
new file mode 100644
index 00000000000..abe858eb5b5
--- /dev/null
+++ b/auth/cloud-client-temp/custom_aws_supplier.py
@@ -0,0 +1,119 @@
+# Copyright 2025 Google LLC
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import os
+import sys
+
+import boto3
+from dotenv import load_dotenv
+from google.auth.aws import AwsSecurityCredentials, AwsSecurityCredentialsSupplier
+from google.auth.aws import Credentials as AwsCredentials
+from google.auth.exceptions import GoogleAuthError
+from google.auth.transport.requests import AuthorizedSession
+
+load_dotenv()
+
+
+class CustomAwsSupplier(AwsSecurityCredentialsSupplier):
+ """Custom AWS Security Credentials Supplier."""
+
+ def __init__(self) -> None:
+ """Initializes the Boto3 session, prioritizing environment variables for region."""
+ # Explicitly read the region from the environment first. This ensures that
+ # a value from a .env file is picked up reliably for local testing.
+ region = os.getenv("AWS_REGION") or os.getenv("AWS_DEFAULT_REGION")
+
+ # If region is None, Boto3's discovery chain will be used when needed.
+ self.session = boto3.Session(region_name=region)
+ self._cached_region = None
+ print(f"[INFO] CustomAwsSupplier initialized. Region from env: {region}")
+
+ def get_aws_region(self, context: object, request: object) -> str:
+ """Returns the AWS region using Boto3's default provider chain."""
+ if self._cached_region:
+ return self._cached_region
+
+ # Accessing region_name will use the value from the constructor if provided,
+ # otherwise it triggers Boto3's lazy-loading discovery (e.g., metadata service).
+ self._cached_region = self.session.region_name
+
+ if not self._cached_region:
+ print("[ERROR] Boto3 was unable to resolve an AWS region.", file=sys.stderr)
+ raise GoogleAuthError("Boto3 was unable to resolve an AWS region.")
+
+ print(f"[INFO] Boto3 resolved AWS Region: {self._cached_region}")
+ return self._cached_region
+
+ def get_aws_security_credentials(self, context: object, request: object = None) -> AwsSecurityCredentials:
+ """Retrieves AWS security credentials using Boto3's default provider chain."""
+ aws_credentials = self.session.get_credentials()
+ if not aws_credentials:
+ print("[ERROR] Unable to resolve AWS credentials.", file=sys.stderr)
+ raise GoogleAuthError("Unable to resolve AWS credentials from the provider chain.")
+
+ # Instead of printing the whole key, mask everything but the last 4 characters
+ masked_access_key = f"{'*' * 16}{aws_credentials.access_key[-4:]}"
+ print(f"[INFO] Resolved AWS Access Key ID: {masked_access_key}")
+
+ return AwsSecurityCredentials(
+ access_key_id=aws_credentials.access_key,
+ secret_access_key=aws_credentials.secret_key,
+ session_token=aws_credentials.token,
+ )
+
+
+def main() -> None:
+ """Main function to demonstrate the custom AWS supplier."""
+ print("--- Starting Script ---")
+
+ gcp_audience = os.getenv("GCP_WORKLOAD_AUDIENCE")
+ sa_impersonation_url = os.getenv("GCP_SERVICE_ACCOUNT_IMPERSONATION_URL")
+ gcs_bucket_name = os.getenv("GCS_BUCKET_NAME")
+
+ print(f"GCP_WORKLOAD_AUDIENCE: {gcp_audience}")
+ print(f"GCS_BUCKET_NAME: {gcs_bucket_name}")
+
+ if not all([gcp_audience, sa_impersonation_url, gcs_bucket_name]):
+ print("[ERROR] Missing required environment variables.", file=sys.stderr)
+ raise GoogleAuthError("Missing required environment variables.")
+
+ custom_supplier = CustomAwsSupplier()
+
+ credentials = AwsCredentials(
+ audience=gcp_audience,
+ subject_token_type="urn:ietf:params:aws:token-type:aws4_request",
+ service_account_impersonation_url=sa_impersonation_url,
+ aws_security_credentials_supplier=custom_supplier,
+ scopes=['https://www.googleapis.com/auth/devstorage.read_write'],
+ )
+
+ bucket_url = f"https://storage.googleapis.com/storage/v1/b/{gcs_bucket_name}"
+ print(f"Request URL: {bucket_url}")
+
+ authed_session = AuthorizedSession(credentials)
+ try:
+ print("Attempting to make authenticated request to Google Cloud Storage...")
+ res = authed_session.get(bucket_url)
+ res.raise_for_status()
+ print("\n--- SUCCESS! ---")
+ print("Successfully authenticated and retrieved bucket data:")
+ print(json.dumps(res.json(), indent=2))
+ except Exception as e:
+ print("--- FAILED --- ", file=sys.stderr)
+ print(e, file=sys.stderr)
+ exit(1)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/auth/cloud-client-temp/custom_okta_supplier.py b/auth/cloud-client-temp/custom_okta_supplier.py
new file mode 100644
index 00000000000..c2b35fd406f
--- /dev/null
+++ b/auth/cloud-client-temp/custom_okta_supplier.py
@@ -0,0 +1,190 @@
+# Copyright 2025 Google LLC
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import os
+import time
+import urllib.parse
+
+from dotenv import load_dotenv
+from google.auth.exceptions import GoogleAuthError
+from google.auth.identity_pool import Credentials as IdentityPoolClient
+from google.auth.transport.requests import AuthorizedSession
+import requests
+
+load_dotenv()
+
+# Workload Identity Pool Configuration
+GCP_WORKLOAD_AUDIENCE = os.getenv("GCP_WORKLOAD_AUDIENCE")
+SERVICE_ACCOUNT_IMPERSONATION_URL = os.getenv("GCP_SERVICE_ACCOUNT_IMPERSONATION_URL")
+GCS_BUCKET_NAME = os.getenv("GCS_BUCKET_NAME")
+
+# Okta Configuration
+OKTA_DOMAIN = os.getenv("OKTA_DOMAIN")
+OKTA_CLIENT_ID = os.getenv("OKTA_CLIENT_ID")
+OKTA_CLIENT_SECRET = os.getenv("OKTA_CLIENT_SECRET")
+
+# Constants
+TOKEN_URL = "https://sts.googleapis.com/v1/token"
+SUBJECT_TOKEN_TYPE = "urn:ietf:params:oauth:token-type:jwt"
+
+
+class OktaClientCredentialsSupplier:
+ """A custom SubjectTokenSupplier that authenticates with Okta.
+
+ This supplier uses the Client Credentials grant flow for machine-to-machine
+ (M2M) authentication with Okta.
+ """
+
+ def __init__(self, domain: str, client_id: str, client_secret: str) -> None:
+ self.okta_token_url = f"{domain}/oauth2/default/v1/token"
+ self.client_id = client_id
+ self.client_secret = client_secret
+ self.access_token = None
+ self.expiry_time = 0
+ print("OktaClientCredentialsSupplier initialized.")
+
+ def get_subject_token(self, context: object, request: object = None) -> str:
+ """Fetches a new token if the current one is expired or missing.
+
+ Args:
+ context: The context object, not used in this implementation.
+
+ Returns:
+ The Okta Access token.
+ """
+ # Check if the current token is still valid (with a 60-second buffer).
+ is_token_valid = self.access_token and time.time() < self.expiry_time - 60
+
+ if is_token_valid:
+ print("[Supplier] Returning cached Okta Access token.")
+ return self.access_token
+
+ print(
+ "[Supplier] Token is missing or expired. Fetching new Okta Access token..."
+ )
+ self._fetch_okta_access_token()
+ return self.access_token
+
+ def _fetch_okta_access_token(self) -> None:
+ """Performs the Client Credentials grant flow with Okta."""
+ headers = {
+ "Content-Type": "application/x-www-form-urlencoded",
+ "Accept": "application/json",
+ }
+ data = {
+ "grant_type": "client_credentials",
+ "scope": "gcp.test.read",
+ }
+ encoded_data = urllib.parse.urlencode(data)
+
+ try:
+ response = requests.post(
+ self.okta_token_url,
+ headers=headers,
+ data=encoded_data,
+ auth=(self.client_id, self.client_secret),
+ )
+ response.raise_for_status()
+ token_data = response.json()
+
+ if "access_token" in token_data and "expires_in" in token_data:
+ self.access_token = token_data["access_token"]
+ self.expiry_time = time.time() + token_data["expires_in"]
+ print(
+ f"[Supplier] Successfully received Access Token from Okta. "
+ f"Expires in {token_data['expires_in']} seconds."
+ )
+ else:
+ raise GoogleAuthError(
+ "Access token or expires_in not found in Okta response."
+ )
+ except requests.exceptions.RequestException as e:
+ print(f"[Supplier] Error fetching token from Okta: {e}")
+ if e.response:
+ print(f"[Supplier] Okta response: {e.response.text}")
+ raise GoogleAuthError(
+ "Failed to authenticate with Okta using Client Credentials grant."
+ ) from e
+
+
+def main() -> None:
+ """Main function to demonstrate the custom Okta supplier.
+
+ TODO(Developer):
+ 1. Before running this sample, set up your environment variables. You can do
+ this by creating a .env file in the same directory as this script and
+ populating it with the following variables:
+ - GCP_WORKLOAD_AUDIENCE: The audience for the GCP workload identity pool.
+ - GCP_SERVICE_ACCOUNT_IMPERSONATION_URL: The URL for service account impersonation (optional).
+ - GCS_BUCKET_NAME: The name of the GCS bucket to access.
+ - OKTA_DOMAIN: Your Okta domain (e.g., https://dev-12345.okta.com).
+ - OKTA_CLIENT_ID: The Client ID of your Okta M2M application.
+ - OKTA_CLIENT_SECRET: The Client Secret of your Okta M2M application.
+ """
+ if not all(
+ [
+ GCP_WORKLOAD_AUDIENCE,
+ GCS_BUCKET_NAME,
+ OKTA_DOMAIN,
+ OKTA_CLIENT_ID,
+ OKTA_CLIENT_SECRET,
+ ]
+ ):
+ raise GoogleAuthError(
+ "Missing required environment variables. Please check your .env file."
+ )
+
+ # 1. Instantiate the custom supplier with Okta credentials.
+ okta_supplier = OktaClientCredentialsSupplier(
+ OKTA_DOMAIN, OKTA_CLIENT_ID, OKTA_CLIENT_SECRET
+ )
+
+ # 2. Instantiate an IdentityPoolClient.
+ client = IdentityPoolClient(
+ audience=GCP_WORKLOAD_AUDIENCE,
+ subject_token_type=SUBJECT_TOKEN_TYPE,
+ token_url=TOKEN_URL,
+ subject_token_supplier=okta_supplier,
+ # If you choose to provide explicit scopes: use the `scopes` parameter.
+ default_scopes=['https://www.googleapis.com/auth/cloud-platform'],
+ service_account_impersonation_url=SERVICE_ACCOUNT_IMPERSONATION_URL,
+ )
+
+ # 3. Construct the URL for the Cloud Storage JSON API.
+ bucket_url = f"https://storage.googleapis.com/storage/v1/b/{GCS_BUCKET_NAME}"
+ print(f"[Test] Getting metadata for bucket: {GCS_BUCKET_NAME}...")
+ print(f"[Test] Request URL: {bucket_url}")
+
+ # 4. Use the client to make an authenticated request.
+ authed_session = AuthorizedSession(client)
+ try:
+ res = authed_session.get(bucket_url)
+ res.raise_for_status()
+ print("\n--- SUCCESS! ---")
+ print("Successfully authenticated and retrieved bucket data:")
+ print(json.dumps(res.json(), indent=2))
+ except requests.exceptions.RequestException as e:
+ print("\n--- FAILED ---")
+ print(f"Request failed: {e}")
+ if e.response:
+ print(f"Response: {e.response.text}")
+ exit(1)
+ except GoogleAuthError as e:
+ print("\n--- FAILED ---")
+ print(f"Authentication or request failed: {e}")
+ exit(1)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/auth/cloud-client-temp/idtoken_from_impersonated_credentials.py b/auth/cloud-client-temp/idtoken_from_impersonated_credentials.py
new file mode 100644
index 00000000000..7819072d927
--- /dev/null
+++ b/auth/cloud-client-temp/idtoken_from_impersonated_credentials.py
@@ -0,0 +1,75 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# [auth_cloud_idtoken_impersonated_credentials]
+
+import google
+from google.auth import impersonated_credentials
+import google.auth.transport.requests
+
+
+def idtoken_from_impersonated_credentials(
+ impersonated_service_account: str, scope: str, target_audience: str) -> None:
+ """
+ Use a service account (SA1) to impersonate as another service account (SA2) and obtain id token
+ for the impersonated account.
+ To obtain token for SA2, SA1 should have the "roles/iam.serviceAccountTokenCreator" permission
+ on SA2.
+
+ Args:
+ impersonated_service_account: The name of the privilege-bearing service account for whom the credential is created.
+ Examples: name@project.service.gserviceaccount.com
+
+ scope: Provide the scopes that you might need to request to access Google APIs,
+ depending on the level of access you need.
+ For this example, we use the cloud-wide scope and use IAM to narrow the permissions.
+ https://cloud.google.com/docs/authentication#authorization_for_services
+ For more information, see: https://developers.google.com/identity/protocols/oauth2/scopes
+
+ target_audience: The service name for which the id token is requested. Service name refers to the
+ logical identifier of an API service, such as "iap.googleapis.com".
+ Examples: iap.googleapis.com
+ """
+
+ # Construct the GoogleCredentials object which obtains the default configuration from your
+ # working environment.
+ credentials, project_id = google.auth.default()
+
+ # Create the impersonated credential.
+ target_credentials = impersonated_credentials.Credentials(
+ source_credentials=credentials,
+ target_principal=impersonated_service_account,
+ # delegates: The chained list of delegates required to grant the final accessToken.
+ # For more information, see:
+ # https://cloud.google.com/iam/docs/create-short-lived-credentials-direct#sa-credentials-permissions
+ # Delegate is NOT USED here.
+ delegates=[],
+ target_scopes=[scope],
+ lifetime=300)
+
+ # Set the impersonated credential, target audience and token options.
+ id_creds = impersonated_credentials.IDTokenCredentials(
+ target_credentials,
+ target_audience=target_audience,
+ include_email=True)
+
+ # Get the ID token.
+ # Once you've obtained the ID token, use it to make an authenticated call
+ # to the target audience.
+ request = google.auth.transport.requests.Request()
+ id_creds.refresh(request)
+ # token = id_creds.token
+ print("Generated ID token.")
+
+# [auth_cloud_idtoken_impersonated_credentials]
diff --git a/auth/cloud-client-temp/idtoken_from_metadata_server.py b/auth/cloud-client-temp/idtoken_from_metadata_server.py
new file mode 100644
index 00000000000..7c9277f349e
--- /dev/null
+++ b/auth/cloud-client-temp/idtoken_from_metadata_server.py
@@ -0,0 +1,50 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# [START auth_cloud_idtoken_metadata_server]
+
+import google
+from google.auth import compute_engine
+import google.auth.transport.requests
+import google.oauth2.credentials
+
+
+def idtoken_from_metadata_server(url: str) -> None:
+ """
+ Use the Google Cloud metadata server in the Cloud Run (or AppEngine or Kubernetes etc.,)
+ environment to create an identity token and add it to the HTTP request as part of an
+ Authorization header.
+
+ Args:
+ url: The url or target audience to obtain the ID token for.
+ Examples: http://www.example.com
+ """
+
+ request = google.auth.transport.requests.Request()
+ # Set the target audience.
+ # Setting "use_metadata_identity_endpoint" to "True" will make the request use the default application
+ # credentials. Optionally, you can also specify a specific service account to use by mentioning
+ # the service_account_email.
+ credentials = compute_engine.IDTokenCredentials(
+ request=request, target_audience=url, use_metadata_identity_endpoint=True
+ )
+
+ # Get the ID token.
+ # Once you've obtained the ID token, use it to make an authenticated call
+ # to the target audience.
+ credentials.refresh(request)
+ # print(credentials.token)
+ print("Generated ID token.")
+
+# [END auth_cloud_idtoken_metadata_server]
diff --git a/auth/cloud-client-temp/idtoken_from_service_account.py b/auth/cloud-client-temp/idtoken_from_service_account.py
new file mode 100644
index 00000000000..d96a4862a8b
--- /dev/null
+++ b/auth/cloud-client-temp/idtoken_from_service_account.py
@@ -0,0 +1,50 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# [START auth_cloud_idtoken_service_account]
+
+import google.auth
+import google.auth.transport.requests
+
+from google.oauth2 import service_account
+
+
+def get_idToken_from_serviceaccount(json_credential_path: str, target_audience: str) -> None:
+ """
+ TODO(Developer): Replace the below variables before running the code.
+
+ *NOTE*:
+ Using service account keys introduces risk; they are long-lived, and can be used by anyone
+ that obtains the key. Proper rotation and storage reduce this risk but do not eliminate it.
+ For these reasons, you should consider an alternative approach that
+ does not use a service account key. Several alternatives to service account keys
+ are described here:
+ https://cloud.google.com/docs/authentication/external/set-up-adc
+
+ Args:
+ json_credential_path: Path to the service account json credential file.
+ target_audience: The url or target audience to obtain the ID token for.
+ Examples: http://www.abc.com
+ """
+
+ # Obtain the id token by providing the json file path and target audience.
+ credentials = service_account.IDTokenCredentials.from_service_account_file(
+ filename=json_credential_path,
+ target_audience=target_audience)
+
+ request = google.auth.transport.requests.Request()
+ credentials.refresh(request)
+ print("Generated ID token.")
+
+# [END auth_cloud_idtoken_service_account]
diff --git a/auth/cloud-client-temp/noxfile.py b/auth/cloud-client-temp/noxfile.py
new file mode 100644
index 00000000000..3cdf3cf3bdb
--- /dev/null
+++ b/auth/cloud-client-temp/noxfile.py
@@ -0,0 +1,85 @@
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pathlib
+
+import nox
+
+CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute()
+
+# https://github.com/psf/black/issues/2964, pin click version to 8.0.4 to
+# avoid incompatiblity with black.
+CLICK_VERSION = "click==8.0.4"
+BLACK_VERSION = "black==19.3b0"
+BLACK_PATHS = [
+ "google",
+ "tests",
+ "tests_async",
+ "noxfile.py",
+ "setup.py",
+ "docs/conf.py",
+]
+
+
+# Error if a python version is missing
+nox.options.error_on_missing_interpreters = True
+
+#
+# Style Checks
+#
+
+
+# Linting with flake8.
+#
+# We ignore the following rules:
+# E203: whitespace before ‘:’
+# E266: too many leading ‘#’ for block comment
+# E501: line too long
+# I202: Additional newline in a section of imports
+#
+# We also need to specify the rules which are ignored by default:
+# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121']
+FLAKE8_COMMON_ARGS = [
+ "--show-source",
+ "--builtin=gettext",
+ "--max-complexity=20",
+ "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py",
+ "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202",
+ "--max-line-length=88",
+]
+
+
+@nox.session(python=["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "3.14"])
+def unit(session):
+ # constraints_path = str(
+ # CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
+ # )
+ session.install("-r", "requirements.txt")
+ # session.install("-e", ".")
+ session.run(
+ "pytest",
+ f"--junitxml=unit_{session.python}_sponge_log.xml",
+ "snippets_test.py",
+ # "tests_async",
+ )
+
+
+@nox.session
+def lint(session: nox.sessions.Session) -> None:
+ session.install("flake8")
+
+ args = FLAKE8_COMMON_ARGS + [
+ ".",
+ ]
+ session.run("flake8", *args)
diff --git a/appengine/flexible_python37_and_earlier/scipy/noxfile_config.py b/auth/cloud-client-temp/noxfile_config.py
similarity index 80%
rename from appengine/flexible_python37_and_earlier/scipy/noxfile_config.py
rename to auth/cloud-client-temp/noxfile_config.py
index 887244766fd..e892b338fce 100644
--- a/appengine/flexible_python37_and_earlier/scipy/noxfile_config.py
+++ b/auth/cloud-client-temp/noxfile_config.py
@@ -14,25 +14,24 @@
# Default TEST_CONFIG_OVERRIDE for python repos.
-# You can copy this file into your directory, then it will be imported from
+# You can copy this file into your directory, then it will be inported from
# the noxfile.py.
# The source of truth:
-# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/noxfile_config.py
+# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/noxfile_config.py
TEST_CONFIG_OVERRIDE = {
# You can opt out from the test for specific Python versions.
- # Skipping for Python 3.9 due to pyarrow compilation failure.
- "ignored_versions": ["2.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"],
+ "ignored_versions": ["2.7"],
# Old samples are opted out of enforcing Python type hints
# All new samples should feature them
- "enforce_type_hints": False,
+ "enforce_type_hints": True,
# An envvar key for determining the project id to use. Change it
# to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
# build specific Cloud project. You can also use your own string
# to use your own Cloud project.
+ # "gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
"gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
- # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
# A dictionary you want to inject into your test. Don't put any
# secrets here. These values will override predefined values.
"envs": {},
diff --git a/auth/cloud-client-temp/requirements.txt b/auth/cloud-client-temp/requirements.txt
new file mode 100644
index 00000000000..8dafe853ea0
--- /dev/null
+++ b/auth/cloud-client-temp/requirements.txt
@@ -0,0 +1,8 @@
+google-cloud-compute==1.42.0
+google-cloud-storage==3.8.0
+google-auth==2.47.0
+pytest===8.4.2; python_version == '3.9'
+pytest==9.0.2; python_version > '3.9'
+boto3>=1.26.0
+requests==2.32.5
+python-dotenv==1.2.1
diff --git a/auth/cloud-client-temp/snippets_test.py b/auth/cloud-client-temp/snippets_test.py
new file mode 100644
index 00000000000..940f27e553c
--- /dev/null
+++ b/auth/cloud-client-temp/snippets_test.py
@@ -0,0 +1,76 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import os
+import re
+
+from _pytest.capture import CaptureFixture
+import google
+import google.auth.transport.requests
+from google.oauth2 import service_account
+
+import authenticate_explicit_with_adc
+import authenticate_implicit_with_adc
+import idtoken_from_metadata_server
+import idtoken_from_service_account
+# from system_tests.noxfile import SERVICE_ACCOUNT_FILE
+import verify_google_idtoken
+
+CREDENTIALS, PROJECT = google.auth.default()
+SERVICE_ACCOUNT_FILE = os.getenv("GOOGLE_APPLICATION_CREDENTIALS")
+
+
+def test_authenticate_explicit_with_adc(capsys: CaptureFixture) -> None:
+ authenticate_explicit_with_adc.authenticate_explicit_with_adc()
+ out, err = capsys.readouterr()
+ assert re.search("Listed all storage buckets.", out)
+
+
+def test_authenticate_implicit_with_adc(capsys: CaptureFixture) -> None:
+ authenticate_implicit_with_adc.authenticate_implicit_with_adc(PROJECT)
+ out, err = capsys.readouterr()
+ assert re.search("Listed all storage buckets.", out)
+
+
+def test_idtoken_from_metadata_server(capsys: CaptureFixture) -> None:
+ idtoken_from_metadata_server.idtoken_from_metadata_server("https://www.google.com")
+ out, err = capsys.readouterr()
+ assert re.search("Generated ID token.", out)
+
+
+def test_idtoken_from_service_account(capsys: CaptureFixture) -> None:
+ idtoken_from_service_account.get_idToken_from_serviceaccount(
+ SERVICE_ACCOUNT_FILE,
+ "iap.googleapis.com")
+ out, err = capsys.readouterr()
+ assert re.search("Generated ID token.", out)
+
+
+def test_verify_google_idtoken() -> None:
+ idtoken = get_idtoken_from_service_account(SERVICE_ACCOUNT_FILE, "iap.googleapis.com")
+
+ verify_google_idtoken.verify_google_idtoken(
+ idtoken,
+ "iap.googleapis.com",
+ "https://www.googleapis.com/oauth2/v3/certs"
+ )
+
+
+def get_idtoken_from_service_account(json_credential_path: str, target_audience: str) -> str:
+ credentials = service_account.IDTokenCredentials.from_service_account_file(
+ filename=json_credential_path,
+ target_audience=target_audience)
+
+ request = google.auth.transport.requests.Request()
+ credentials.refresh(request)
+ return credentials.token
diff --git a/auth/cloud-client-temp/verify_google_idtoken.py b/auth/cloud-client-temp/verify_google_idtoken.py
new file mode 100644
index 00000000000..8bb4c075fd7
--- /dev/null
+++ b/auth/cloud-client-temp/verify_google_idtoken.py
@@ -0,0 +1,62 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# [START auth_cloud_verify_google_idtoken]
+
+import google
+import google.auth.transport.requests
+from google.oauth2 import id_token
+
+
+def verify_google_idtoken(idtoken: str, audience: str = "iap.googleapis.com",
+ jwk_url: str = "https://www.googleapis.com/oauth2/v3/certs") -> None:
+ """
+ Verifies the obtained Google id token. This is done at the receiving end of the OIDC endpoint.
+ The most common use case for verifying the ID token is when you are protecting
+ your own APIs with IAP. Google services already verify credentials as a platform,
+ so verifying ID tokens before making Google API calls is usually unnecessary.
+
+ Args:
+ idtoken: The Google ID token to verify.
+
+ audience: The service name for which the id token is requested. Service name refers to the
+ logical identifier of an API service, such as "iap.googleapis.com".
+
+ jwk_url: To verify id tokens, get the Json Web Key endpoint (jwk).
+ OpenID Connect allows the use of a "Discovery document," a JSON document found at a
+ well-known location containing key-value pairs which provide details about the
+ OpenID Connect provider's configuration.
+ For more information on validating the jwt, see:
+ https://developers.google.com/identity/protocols/oauth2/openid-connect#validatinganidtoken
+
+ Here, we validate Google's token using Google's OpenID Connect service (jwkUrl).
+ For more information on jwk,see:
+ https://auth0.com/docs/secure/tokens/json-web-tokens/json-web-key-sets
+ """
+
+ request = google.auth.transport.requests.Request()
+ # Set the parameters and verify the token.
+ # Setting "certs_url" is optional. When verifying a Google ID token, this is set by default.
+ result = id_token.verify_token(idtoken, request, audience, clock_skew_in_seconds=10)
+
+ # Verify that the token contains subject and email claims.
+ # Get the User id.
+ if not result["sub"] is None:
+ print(f"User id: {result['sub']}")
+ # Optionally, if "INCLUDE_EMAIL" was set in the token options, check if the
+ # email was verified.
+ if result.get('email_verified'):
+ print(f"Email verified {result['email']}")
+
+# [END auth_cloud_verify_google_idtoken]
diff --git a/auth/custom-credentials/aws/Dockerfile b/auth/custom-credentials/aws/Dockerfile
new file mode 100644
index 00000000000..d90d88aa0a8
--- /dev/null
+++ b/auth/custom-credentials/aws/Dockerfile
@@ -0,0 +1,15 @@
+FROM python:3.13-slim
+
+RUN useradd -m appuser
+
+WORKDIR /app
+
+COPY --chown=appuser:appuser requirements.txt .
+
+USER appuser
+RUN pip install --no-cache-dir -r requirements.txt
+
+COPY --chown=appuser:appuser snippets.py .
+
+
+CMD ["python3", "snippets.py"]
diff --git a/auth/custom-credentials/aws/README.md b/auth/custom-credentials/aws/README.md
new file mode 100644
index 00000000000..551c95ef691
--- /dev/null
+++ b/auth/custom-credentials/aws/README.md
@@ -0,0 +1,127 @@
+# Running the Custom AWS Credential Supplier Sample
+
+This sample demonstrates how to use a custom AWS security credential supplier to authenticate with Google Cloud using AWS as an external identity provider. It uses Boto3 (the AWS SDK for Python) to fetch credentials from sources like Amazon Elastic Kubernetes Service (EKS) with IAM Roles for Service Accounts(IRSA), Elastic Container Service (ECS), or Fargate.
+
+## Prerequisites
+
+* An AWS account.
+* A Google Cloud project with the IAM API enabled.
+* A GCS bucket.
+* Python 3.10 or later installed.
+
+If you want to use AWS security credentials that cannot be retrieved using methods supported natively by the [google-auth](https://github.com/googleapis/google-auth-library-python) library, a custom `AwsSecurityCredentialsSupplier` implementation may be specified. The supplier must return valid, unexpired AWS security credentials when called by the Google Cloud Auth library.
+
+
+## Running Locally
+
+For local development, you can provide credentials and configuration in a JSON file.
+
+### Install Dependencies
+
+Ensure you have Python installed, then install the required libraries:
+
+```bash
+pip install -r requirements.txt
+```
+
+### Configure Credentials for Local Development
+
+1. Copy the example secrets file to a new file named `custom-credentials-aws-secrets.json`:
+ ```bash
+ cp custom-credentials-aws-secrets.json.example custom-credentials-aws-secrets.json
+ ```
+2. Open `custom-credentials-aws-secrets.json` and fill in the required values for your AWS and Google Cloud configuration. Do not check your `custom-credentials-aws-secrets.json` file into version control.
+
+**Note:** This file is only used for local development and is not needed when running in a containerized environment like EKS with IRSA.
+
+
+### Run the Script
+
+```bash
+python3 snippets.py
+```
+
+When run locally, the script will detect the `custom-credentials-aws-secrets.json` file and use it to configure the necessary environment variables for the Boto3 client.
+
+## Running in a Containerized Environment (EKS)
+
+This section provides a brief overview of how to run the sample in an Amazon EKS cluster.
+
+### EKS Cluster Setup
+
+First, you need an EKS cluster. You can create one using `eksctl` or the AWS Management Console. For detailed instructions, refer to the [Amazon EKS documentation](https://docs.aws.amazon.com/eks/latest/userguide/create-cluster.html).
+
+### Configure IAM Roles for Service Accounts (IRSA)
+
+IRSA enables you to associate an IAM role with a Kubernetes service account. This provides a secure way for your pods to access AWS services without hardcoding long-lived credentials.
+
+Run the following command to create the IAM role and bind it to a Kubernetes Service Account:
+
+```bash
+eksctl create iamserviceaccount \
+ --name your-k8s-service-account \
+ --namespace default \
+ --cluster your-cluster-name \
+ --region your-aws-region \
+ --role-name your-role-name \
+ --attach-policy-arn arn:aws:iam::aws:policy/AmazonS3ReadOnlyAccess \
+ --approve
+```
+
+> **Note**: The `--attach-policy-arn` flag is used here to demonstrate attaching permissions. Update this with the specific AWS policy ARN your application requires.
+
+For a deep dive into how this works without using `eksctl`, refer to the [IAM Roles for Service Accounts](https://docs.aws.amazon.com/eks/latest/userguide/iam-roles-for-service-accounts.html) documentation.
+
+### Configure Google Cloud to Trust the AWS Role
+
+To allow your AWS role to authenticate as a Google Cloud service account, you need to configure Workload Identity Federation. This process involves these key steps:
+
+1. **Create a Workload Identity Pool and an AWS Provider:** The pool holds the configuration, and the provider is set up to trust your AWS account.
+
+2. **Create or select a Google Cloud Service Account:** This service account will be impersonated by your AWS role.
+
+3. **Bind the AWS Role to the Google Cloud Service Account:** Create an IAM policy binding that gives your AWS role the `Workload Identity User` (`roles/iam.workloadIdentityUser`) role on the Google Cloud service account.
+
+For more detailed information, see the documentation on [Configuring Workload Identity Federation](https://cloud.google.com/iam/docs/workload-identity-federation-with-other-clouds).
+
+**Alternative: Direct Access**
+
+> For supported resources, you can grant roles directly to the AWS identity, bypassing service account impersonation. To do this, grant a role (like `roles/storage.objectViewer`) to the workload identity principal (`principalSet://...`) directly on the resource's IAM policy.
+
+For more detailed information, see the documentation on [Configuring Workload Identity Federation](https://cloud.google.com/iam/docs/workload-identity-federation-with-other-clouds).
+
+### Containerize and Package the Application
+
+Create a `Dockerfile` for the Python application and push the image to a container registry (for example Amazon ECR) that your EKS cluster can access.
+
+**Note:** The provided [`Dockerfile`](Dockerfile) is an example and may need to be modified for your specific needs.
+
+Build and push the image:
+```bash
+docker build -t your-container-image:latest .
+docker push your-container-image:latest
+```
+
+### Deploy to EKS
+
+Create a Kubernetes deployment manifest to deploy your application to the EKS cluster. See the [`pod.yaml`](pod.yaml) file for an example.
+
+**Note:** The provided [`pod.yaml`](pod.yaml) is an example and may need to be modified for your specific needs.
+
+Deploy the pod:
+
+```bash
+kubectl apply -f pod.yaml
+```
+
+### Clean Up
+
+To clean up the resources, delete the EKS cluster and any other AWS and Google Cloud resources you created.
+
+```bash
+eksctl delete cluster --name your-cluster-name
+```
+
+## Testing
+
+This sample is not continuously tested. It is provided for instructional purposes and may require modifications to work in your environment.
diff --git a/auth/custom-credentials/aws/custom-credentials-aws-secrets.json.example b/auth/custom-credentials/aws/custom-credentials-aws-secrets.json.example
new file mode 100644
index 00000000000..300dc70c138
--- /dev/null
+++ b/auth/custom-credentials/aws/custom-credentials-aws-secrets.json.example
@@ -0,0 +1,8 @@
+{
+ "aws_access_key_id": "YOUR_AWS_ACCESS_KEY_ID",
+ "aws_secret_access_key": "YOUR_AWS_SECRET_ACCESS_KEY",
+ "aws_region": "YOUR_AWS_REGION",
+ "gcp_workload_audience": "YOUR_GCP_WORKLOAD_AUDIENCE",
+ "gcs_bucket_name": "YOUR_GCS_BUCKET_NAME",
+ "gcp_service_account_impersonation_url": "YOUR_GCP_SERVICE_ACCOUNT_IMPERSONATION_URL"
+}
diff --git a/appengine/flexible_python37_and_earlier/hello_world_django/app.yaml b/auth/custom-credentials/aws/noxfile_config.py
similarity index 79%
rename from appengine/flexible_python37_and_earlier/hello_world_django/app.yaml
rename to auth/custom-credentials/aws/noxfile_config.py
index 62b74a9c27e..0ed973689f7 100644
--- a/appengine/flexible_python37_and_earlier/hello_world_django/app.yaml
+++ b/auth/custom-credentials/aws/noxfile_config.py
@@ -1,4 +1,4 @@
-# Copyright 2021 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -12,9 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-runtime: python
-env: flex
-entrypoint: gunicorn -b :$PORT project_name.wsgi
-
-runtime_config:
- python_version: 3
+TEST_CONFIG_OVERRIDE = {
+ "ignored_versions": ["2.7", "3.6", "3.7", "3.8", "3.9", "3.10", "3.11", "3.12"],
+}
diff --git a/auth/custom-credentials/aws/pod.yaml b/auth/custom-credentials/aws/pod.yaml
new file mode 100644
index 00000000000..70b94bf25e2
--- /dev/null
+++ b/auth/custom-credentials/aws/pod.yaml
@@ -0,0 +1,40 @@
+# Copyright 2025 Google LLC
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+apiVersion: v1
+kind: Pod
+metadata:
+ name: custom-credential-pod
+spec:
+ # The Kubernetes Service Account that is annotated with the corresponding
+ # AWS IAM role ARN. See the README for instructions on setting up IAM
+ # Roles for Service Accounts (IRSA).
+ serviceAccountName: your-k8s-service-account
+ containers:
+ - name: gcp-auth-sample
+ # The container image pushed to the container registry
+ # For example, Amazon Elastic Container Registry
+ image: your-container-image:latest
+ env:
+ # REQUIRED: The AWS region. Boto3 requires this to be set explicitly
+ # in containers.
+ - name: AWS_REGION
+ value: "your-aws-region"
+ # REQUIRED: The full identifier of the Workload Identity Pool provider
+ - name: GCP_WORKLOAD_AUDIENCE
+ value: "your-gcp-workload-audience"
+ # OPTIONAL: Enable Google Cloud service account impersonation
+ # - name: GCP_SERVICE_ACCOUNT_IMPERSONATION_URL
+ # value: "your-gcp-service-account-impersonation-url"
+ - name: GCS_BUCKET_NAME
+ value: "your-gcs-bucket-name"
diff --git a/auth/custom-credentials/aws/requirements-test.txt b/auth/custom-credentials/aws/requirements-test.txt
new file mode 100644
index 00000000000..43b24059d3e
--- /dev/null
+++ b/auth/custom-credentials/aws/requirements-test.txt
@@ -0,0 +1,2 @@
+-r requirements.txt
+pytest==8.2.0
diff --git a/auth/custom-credentials/aws/requirements.txt b/auth/custom-credentials/aws/requirements.txt
new file mode 100644
index 00000000000..2c302888ed7
--- /dev/null
+++ b/auth/custom-credentials/aws/requirements.txt
@@ -0,0 +1,5 @@
+boto3==1.40.53
+google-auth==2.43.0
+google-cloud-storage==2.19.0
+python-dotenv==1.1.1
+requests==2.32.3
diff --git a/auth/custom-credentials/aws/snippets.py b/auth/custom-credentials/aws/snippets.py
new file mode 100644
index 00000000000..2d77a123015
--- /dev/null
+++ b/auth/custom-credentials/aws/snippets.py
@@ -0,0 +1,153 @@
+# Copyright 2025 Google LLC
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# [START auth_custom_credential_supplier_aws]
+import json
+import os
+import sys
+
+import boto3
+from google.auth import aws
+from google.auth import exceptions
+from google.cloud import storage
+
+
+class CustomAwsSupplier(aws.AwsSecurityCredentialsSupplier):
+ """Custom AWS Security Credentials Supplier using Boto3."""
+
+ def __init__(self):
+ """Initializes the Boto3 session, prioritizing environment variables for region."""
+ # Explicitly read the region from the environment first.
+ region = os.getenv("AWS_REGION") or os.getenv("AWS_DEFAULT_REGION")
+
+ # If region is None, Boto3's discovery chain will be used when needed.
+ self.session = boto3.Session(region_name=region)
+ self._cached_region = None
+
+ def get_aws_region(self, context, request) -> str:
+ """Returns the AWS region using Boto3's default provider chain."""
+ if self._cached_region:
+ return self._cached_region
+
+ self._cached_region = self.session.region_name
+
+ if not self._cached_region:
+ raise exceptions.GoogleAuthError(
+ "Boto3 was unable to resolve an AWS region."
+ )
+
+ return self._cached_region
+
+ def get_aws_security_credentials(
+ self, context, request=None
+ ) -> aws.AwsSecurityCredentials:
+ """Retrieves AWS security credentials using Boto3's default provider chain."""
+ creds = self.session.get_credentials()
+ if not creds:
+ raise exceptions.GoogleAuthError(
+ "Unable to resolve AWS credentials from Boto3."
+ )
+
+ return aws.AwsSecurityCredentials(
+ access_key_id=creds.access_key,
+ secret_access_key=creds.secret_key,
+ session_token=creds.token,
+ )
+
+
+def authenticate_with_aws_credentials(bucket_name, audience, impersonation_url=None):
+ """Authenticates using the custom AWS supplier and gets bucket metadata.
+
+ Returns:
+ dict: The bucket metadata response from the Google Cloud Storage API.
+ """
+
+ custom_supplier = CustomAwsSupplier()
+
+ credentials = aws.Credentials(
+ audience=audience,
+ subject_token_type="urn:ietf:params:aws:token-type:aws4_request",
+ service_account_impersonation_url=impersonation_url,
+ aws_security_credentials_supplier=custom_supplier,
+ scopes=["https://www.googleapis.com/auth/devstorage.read_only"],
+ )
+
+ storage_client = storage.Client(credentials=credentials)
+
+ bucket = storage_client.get_bucket(bucket_name)
+
+ return bucket._properties
+
+
+# [END auth_custom_credential_supplier_aws]
+
+
+def _load_config_from_file():
+ """
+ If a local secrets file is present, load it into the environment.
+ This is a "just-in-time" configuration for local development. These
+ variables are only set for the current process and are not exposed to the
+ shell.
+ """
+ secrets_file = "custom-credentials-aws-secrets.json"
+ if os.path.exists(secrets_file):
+ with open(secrets_file, "r") as f:
+ try:
+ secrets = json.load(f)
+ except json.JSONDecodeError:
+ print(f"Error: '{secrets_file}' is not valid JSON.", file=sys.stderr)
+ return
+
+ os.environ["AWS_ACCESS_KEY_ID"] = secrets.get("aws_access_key_id", "")
+ os.environ["AWS_SECRET_ACCESS_KEY"] = secrets.get("aws_secret_access_key", "")
+ os.environ["AWS_REGION"] = secrets.get("aws_region", "")
+ os.environ["GCP_WORKLOAD_AUDIENCE"] = secrets.get("gcp_workload_audience", "")
+ os.environ["GCS_BUCKET_NAME"] = secrets.get("gcs_bucket_name", "")
+ os.environ["GCP_SERVICE_ACCOUNT_IMPERSONATION_URL"] = secrets.get(
+ "gcp_service_account_impersonation_url", ""
+ )
+
+
+def main():
+
+ # Reads the custom-credentials-aws-secrets.json if running locally.
+ _load_config_from_file()
+
+ # Now, read the configuration from the environment. In a local run, these
+ # will be the values we just set. In a containerized run, they will be
+ # the values provided by the environment.
+ gcp_audience = os.getenv("GCP_WORKLOAD_AUDIENCE")
+ sa_impersonation_url = os.getenv("GCP_SERVICE_ACCOUNT_IMPERSONATION_URL")
+ gcs_bucket_name = os.getenv("GCS_BUCKET_NAME")
+
+ if not all([gcp_audience, gcs_bucket_name]):
+ print(
+ "Required configuration missing. Please provide it in a "
+ "custom-credentials-aws-secrets.json file or as environment variables: "
+ "GCP_WORKLOAD_AUDIENCE, GCS_BUCKET_NAME"
+ )
+ return
+
+ try:
+ print(f"Retrieving metadata for bucket: {gcs_bucket_name}...")
+ metadata = authenticate_with_aws_credentials(
+ gcs_bucket_name, gcp_audience, sa_impersonation_url
+ )
+ print("--- SUCCESS! ---")
+ print(json.dumps(metadata, indent=2))
+ except Exception as e:
+ print(f"Authentication or Request failed: {e}")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/auth/custom-credentials/aws/snippets_test.py b/auth/custom-credentials/aws/snippets_test.py
new file mode 100644
index 00000000000..e0382cfc6f5
--- /dev/null
+++ b/auth/custom-credentials/aws/snippets_test.py
@@ -0,0 +1,130 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import os
+from unittest import mock
+
+import pytest
+
+import snippets
+
+# --- Unit Tests ---
+
+
+@mock.patch.dict(os.environ, {"AWS_REGION": "us-west-2"})
+@mock.patch("boto3.Session")
+def test_init_priority_env_var(mock_boto_session):
+ """Test that AWS_REGION env var takes priority during init."""
+ snippets.CustomAwsSupplier()
+ mock_boto_session.assert_called_with(region_name="us-west-2")
+
+
+@mock.patch.dict(os.environ, {}, clear=True)
+@mock.patch("boto3.Session")
+def test_get_aws_region_caching(mock_boto_session):
+ """Test that get_aws_region caches the result from Boto3."""
+ mock_session_instance = mock_boto_session.return_value
+ mock_session_instance.region_name = "us-east-1"
+
+ supplier = snippets.CustomAwsSupplier()
+
+ # First call should hit the session
+ region = supplier.get_aws_region(None, None)
+ assert region == "us-east-1"
+
+ # Change the mock to ensure we aren't calling it again
+ mock_session_instance.region_name = "us-west-2"
+
+ # Second call should return the cached value
+ region2 = supplier.get_aws_region(None, None)
+ assert region2 == "us-east-1"
+
+
+@mock.patch("boto3.Session")
+def test_get_aws_security_credentials_success(mock_boto_session):
+ """Test successful retrieval of AWS credentials."""
+ mock_session_instance = mock_boto_session.return_value
+
+ mock_creds = mock.MagicMock()
+ mock_creds.access_key = "test-key"
+ mock_creds.secret_key = "test-secret"
+ mock_creds.token = "test-token"
+ mock_session_instance.get_credentials.return_value = mock_creds
+
+ supplier = snippets.CustomAwsSupplier()
+ creds = supplier.get_aws_security_credentials(None)
+
+ assert creds.access_key_id == "test-key"
+ assert creds.secret_access_key == "test-secret"
+ assert creds.session_token == "test-token"
+
+
+@mock.patch("snippets.auth_requests.AuthorizedSession")
+@mock.patch("snippets.aws.Credentials")
+@mock.patch("snippets.CustomAwsSupplier")
+def test_authenticate_unit_success(MockSupplier, MockAwsCreds, MockSession):
+ """Unit test for the main flow using mocks."""
+ mock_response = mock.MagicMock()
+ mock_response.status_code = 200
+ mock_response.json.return_value = {"name": "my-bucket"}
+
+ mock_session_instance = MockSession.return_value
+ mock_session_instance.get.return_value = mock_response
+
+ result = snippets.authenticate_with_aws_credentials(
+ bucket_name="my-bucket",
+ audience="//iam.googleapis.com/...",
+ impersonation_url=None,
+ )
+
+ assert result == {"name": "my-bucket"}
+ MockSupplier.assert_called_once()
+ MockAwsCreds.assert_called_once()
+
+
+# --- System Test (Integration) ---
+
+
+def test_authenticate_system():
+ """
+ System test that runs against the real API.
+ Skips automatically if custom-credentials-aws-secrets.json is missing or incomplete.
+ """
+ if not os.path.exists("custom-credentials-aws-secrets.json"):
+ pytest.skip(
+ "Skipping system test: custom-credentials-aws-secrets.json not found."
+ )
+
+ with open("custom-credentials-aws-secrets.json", "r") as f:
+ secrets = json.load(f)
+
+ required_keys = [
+ "gcp_workload_audience",
+ "gcs_bucket_name",
+ "aws_access_key_id",
+ "aws_secret_access_key",
+ "aws_region",
+ ]
+ if not all(key in secrets and secrets[key] for key in required_keys):
+ pytest.skip(
+ "Skipping system test: custom-credentials-aws-secrets.json is missing or has empty required keys."
+ )
+
+ metadata = snippets.main()
+
+ # Verify that the returned metadata is a dictionary with expected keys.
+ assert isinstance(metadata, dict)
+ assert "name" in metadata
+ assert metadata["name"] == secrets["gcs_bucket_name"]
diff --git a/auth/custom-credentials/okta/README.md b/auth/custom-credentials/okta/README.md
new file mode 100644
index 00000000000..96d444e85a4
--- /dev/null
+++ b/auth/custom-credentials/okta/README.md
@@ -0,0 +1,83 @@
+# Running the Custom Okta Credential Supplier Sample
+
+This sample demonstrates how to use a custom subject token supplier to authenticate with Google Cloud using Okta as an external identity provider. It uses the Client Credentials flow for machine-to-machine (M2M) authentication.
+
+## Prerequisites
+
+* An Okta developer account.
+* A Google Cloud project with the IAM API enabled.
+* A Google Cloud Storage bucket. Ensure that the authenticated user has access to this bucket.
+* Python 3.10 or later installed.
+*
+## Okta Configuration
+
+Before running the sample, you need to configure an Okta application for Machine-to-Machine (M2M) communication.
+
+### Create an M2M Application in Okta
+
+1. Log in to your Okta developer console.
+2. Navigate to **Applications** > **Applications** and click **Create App Integration**.
+3. Select **API Services** as the sign-on method and click **Next**.
+4. Give your application a name and click **Save**.
+
+### Obtain Okta Credentials
+
+Once the application is created, you will find the following information in the **General** tab:
+
+* **Okta Domain**: Your Okta developer domain (e.g., `https://dev-123456.okta.com`).
+* **Client ID**: The client ID for your application.
+* **Client Secret**: The client secret for your application.
+
+You will need these values to configure the sample.
+
+## Google Cloud Configuration
+
+You need to configure a Workload Identity Pool in Google Cloud to trust the Okta application.
+
+### Set up Workload Identity Federation
+
+1. In the Google Cloud Console, navigate to **IAM & Admin** > **Workload Identity Federation**.
+2. Click **Create Pool** to create a new Workload Identity Pool.
+3. Add a new **OIDC provider** to the pool.
+4. Configure the provider with your Okta domain as the issuer URL.
+5. Map the Okta `sub` (subject) assertion to a GCP principal.
+
+For detailed instructions, refer to the [Workload Identity Federation documentation](https://cloud.google.com/iam/docs/workload-identity-federation).
+
+## 3. Running the Script
+
+To run the sample on your local system, you need to install the dependencies and configure your credentials.
+
+### Install Dependencies
+
+```bash
+pip install -r requirements.txt
+```
+
+### Configure Credentials
+
+1. Copy the example secrets file to a new file named `custom-credentials-okta-secrets.json`:
+ ```bash
+ cp custom-credentials-okta-secrets.json.example custom-credentials-okta-secrets.json
+ ```
+2. Open `custom-credentials-okta-secrets.json` and fill in the following values:
+
+ * `okta_domain`: Your Okta developer domain (for example `https://dev-123456.okta.com`).
+ * `okta_client_id`: The client ID for your application.
+ * `okta_client_secret`: The client secret for your application.
+ * `gcp_workload_audience`: The audience for the Google Cloud Workload Identity Pool. This is the full identifier of the Workload Identity Pool provider.
+ * `gcs_bucket_name`: The name of the Google Cloud Storage bucket to access.
+ * `gcp_service_account_impersonation_url`: (Optional) The URL for service account impersonation.
+
+
+### Run the Application
+
+```bash
+python3 snippets.py
+```
+
+The script authenticates with Okta to get an OIDC token, exchanges that token for a Google Cloud federated token, and uses it to list metadata for the specified Google Cloud Storage bucket.
+
+## Testing
+
+This sample is not continuously tested. It is provided for instructional purposes and may require modifications to work in your environment.
diff --git a/auth/custom-credentials/okta/custom-credentials-okta-secrets.json.example b/auth/custom-credentials/okta/custom-credentials-okta-secrets.json.example
new file mode 100644
index 00000000000..fa04fda7cb2
--- /dev/null
+++ b/auth/custom-credentials/okta/custom-credentials-okta-secrets.json.example
@@ -0,0 +1,8 @@
+{
+ "okta_domain": "https://your-okta-domain.okta.com",
+ "okta_client_id": "your-okta-client-id",
+ "okta_client_secret": "your-okta-client-secret",
+ "gcp_workload_audience": "//iam.googleapis.com/projects/123456789/locations/global/workloadIdentityPools/my-pool/providers/my-provider",
+ "gcs_bucket_name": "your-gcs-bucket-name",
+ "gcp_service_account_impersonation_url": "https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/my-service-account@my-project.iam.gserviceaccount.com:generateAccessToken"
+}
diff --git a/appengine/flexible_python37_and_earlier/metadata/app.yaml b/auth/custom-credentials/okta/noxfile_config.py
similarity index 79%
rename from appengine/flexible_python37_and_earlier/metadata/app.yaml
rename to auth/custom-credentials/okta/noxfile_config.py
index ca76f83fc3b..0ed973689f7 100644
--- a/appengine/flexible_python37_and_earlier/metadata/app.yaml
+++ b/auth/custom-credentials/okta/noxfile_config.py
@@ -1,4 +1,4 @@
-# Copyright 2021 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -12,9 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-runtime: python
-env: flex
-entrypoint: gunicorn -b :$PORT main:app
-
-runtime_config:
- python_version: 3
+TEST_CONFIG_OVERRIDE = {
+ "ignored_versions": ["2.7", "3.6", "3.7", "3.8", "3.9", "3.10", "3.11", "3.12"],
+}
diff --git a/auth/custom-credentials/okta/requirements-test.txt b/auth/custom-credentials/okta/requirements-test.txt
new file mode 100644
index 00000000000..f47609d2651
--- /dev/null
+++ b/auth/custom-credentials/okta/requirements-test.txt
@@ -0,0 +1,2 @@
+-r requirements.txt
+pytest==7.1.2
diff --git a/auth/custom-credentials/okta/requirements.txt b/auth/custom-credentials/okta/requirements.txt
new file mode 100644
index 00000000000..d9669ebee9f
--- /dev/null
+++ b/auth/custom-credentials/okta/requirements.txt
@@ -0,0 +1,4 @@
+requests==2.32.3
+google-cloud-storage==2.19.0
+google-auth==2.43.0
+python-dotenv==1.1.1
diff --git a/auth/custom-credentials/okta/snippets.py b/auth/custom-credentials/okta/snippets.py
new file mode 100644
index 00000000000..02af2dadc93
--- /dev/null
+++ b/auth/custom-credentials/okta/snippets.py
@@ -0,0 +1,138 @@
+# Copyright 2025 Google LLC
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# [START auth_custom_credential_supplier_okta]
+import json
+import time
+import urllib.parse
+
+from google.auth import identity_pool
+from google.cloud import storage
+import requests
+
+
+class OktaClientCredentialsSupplier:
+ """A custom SubjectTokenSupplier that authenticates with Okta.
+
+ This supplier uses the Client Credentials grant flow for machine-to-machine
+ (M2M) authentication with Okta.
+ """
+
+ def __init__(self, domain, client_id, client_secret):
+ self.okta_token_url = f"{domain.rstrip('/')}/oauth2/default/v1/token"
+ self.client_id = client_id
+ self.client_secret = client_secret
+ self.access_token = None
+ self.expiry_time = 0
+
+ def get_subject_token(self, context, request=None) -> str:
+ """Fetches a new token if the current one is expired or missing."""
+ if self.access_token and time.time() < self.expiry_time - 60:
+ return self.access_token
+ self._fetch_okta_access_token()
+ return self.access_token
+
+ def _fetch_okta_access_token(self):
+ """Performs the Client Credentials grant flow with Okta."""
+ headers = {
+ "Content-Type": "application/x-www-form-urlencoded",
+ "Accept": "application/json",
+ }
+ data = {
+ "grant_type": "client_credentials",
+ "scope": "gcp.test.read", # Set scope as per Okta app config.
+ }
+
+ response = requests.post(
+ self.okta_token_url,
+ headers=headers,
+ data=urllib.parse.urlencode(data),
+ auth=(self.client_id, self.client_secret),
+ )
+ response.raise_for_status()
+
+ token_data = response.json()
+ self.access_token = token_data["access_token"]
+ self.expiry_time = time.time() + token_data["expires_in"]
+
+
+def authenticate_with_okta_credentials(
+ bucket_name, audience, domain, client_id, client_secret, impersonation_url=None
+):
+ """Authenticates using the custom Okta supplier and gets bucket metadata.
+
+ Returns:
+ dict: The bucket metadata response from the Google Cloud Storage API.
+ """
+
+ okta_supplier = OktaClientCredentialsSupplier(domain, client_id, client_secret)
+
+ credentials = identity_pool.Credentials(
+ audience=audience,
+ subject_token_type="urn:ietf:params:oauth:token-type:jwt",
+ token_url="https://sts.googleapis.com/v1/token",
+ subject_token_supplier=okta_supplier,
+ default_scopes=["https://www.googleapis.com/auth/devstorage.read_only"],
+ service_account_impersonation_url=impersonation_url,
+ )
+
+ storage_client = storage.Client(credentials=credentials)
+
+ bucket = storage_client.get_bucket(bucket_name)
+
+ return bucket._properties
+
+
+# [END auth_custom_credential_supplier_okta]
+
+
+def main():
+ try:
+ with open("custom-credentials-okta-secrets.json") as f:
+ secrets = json.load(f)
+ except FileNotFoundError:
+ print("Could not find custom-credentials-okta-secrets.json.")
+ return
+
+ gcp_audience = secrets.get("gcp_workload_audience")
+ gcs_bucket_name = secrets.get("gcs_bucket_name")
+ sa_impersonation_url = secrets.get("gcp_service_account_impersonation_url")
+
+ okta_domain = secrets.get("okta_domain")
+ okta_client_id = secrets.get("okta_client_id")
+ okta_client_secret = secrets.get("okta_client_secret")
+
+ if not all(
+ [gcp_audience, gcs_bucket_name, okta_domain, okta_client_id, okta_client_secret]
+ ):
+ print("Missing required values in secrets.json.")
+ return
+
+ try:
+ print(f"Retrieving metadata for bucket: {gcs_bucket_name}...")
+ metadata = authenticate_with_okta_credentials(
+ bucket_name=gcs_bucket_name,
+ audience=gcp_audience,
+ domain=okta_domain,
+ client_id=okta_client_id,
+ client_secret=okta_client_secret,
+ impersonation_url=sa_impersonation_url,
+ )
+ print("--- SUCCESS! ---")
+ print(json.dumps(metadata, indent=2))
+ except Exception as e:
+ print(f"Authentication or Request failed: {e}")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/auth/custom-credentials/okta/snippets_test.py b/auth/custom-credentials/okta/snippets_test.py
new file mode 100644
index 00000000000..1f05c4ad7bf
--- /dev/null
+++ b/auth/custom-credentials/okta/snippets_test.py
@@ -0,0 +1,134 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import os
+import time
+from unittest import mock
+import urllib.parse
+
+import pytest
+
+import snippets
+
+# --- Unit Tests ---
+
+
+def test_init_url_cleaning():
+ """Test that the token URL strips trailing slashes."""
+ s1 = snippets.OktaClientCredentialsSupplier("https://okta.com/", "id", "sec")
+ assert s1.okta_token_url == "https://okta.com/oauth2/default/v1/token"
+
+ s2 = snippets.OktaClientCredentialsSupplier("https://okta.com", "id", "sec")
+ assert s2.okta_token_url == "https://okta.com/oauth2/default/v1/token"
+
+
+@mock.patch("requests.post")
+def test_get_subject_token_fetch(mock_post):
+ """Test fetching a new token from Okta."""
+ supplier = snippets.OktaClientCredentialsSupplier("https://okta.com", "id", "sec")
+
+ mock_response = mock.MagicMock()
+ mock_response.status_code = 200
+ mock_response.json.return_value = {"access_token": "new-token", "expires_in": 3600}
+ mock_post.return_value = mock_response
+
+ token = supplier.get_subject_token(None, None)
+
+ assert token == "new-token"
+ mock_post.assert_called_once()
+
+ # Verify args
+ _, kwargs = mock_post.call_args
+ assert kwargs["auth"] == ("id", "sec")
+
+ sent_data = urllib.parse.parse_qs(kwargs["data"])
+ assert sent_data["grant_type"][0] == "client_credentials"
+
+
+@mock.patch("requests.post")
+def test_get_subject_token_cached(mock_post):
+ """Test that cached token is returned if valid."""
+ supplier = snippets.OktaClientCredentialsSupplier("https://okta.com", "id", "sec")
+ supplier.access_token = "cached-token"
+ supplier.expiry_time = time.time() + 3600
+
+ token = supplier.get_subject_token(None, None)
+
+ assert token == "cached-token"
+ mock_post.assert_not_called()
+
+
+@mock.patch("snippets.auth_requests.AuthorizedSession")
+@mock.patch("snippets.identity_pool.Credentials")
+@mock.patch("snippets.OktaClientCredentialsSupplier")
+def test_authenticate_unit_success(MockSupplier, MockCreds, MockSession):
+ """Unit test for the main Okta auth flow."""
+ mock_response = mock.MagicMock()
+ mock_response.status_code = 200
+ mock_response.json.return_value = {"name": "test-bucket"}
+
+ mock_session_instance = MockSession.return_value
+ mock_session_instance.get.return_value = mock_response
+
+ metadata = snippets.authenticate_with_okta_credentials(
+ bucket_name="test-bucket",
+ audience="test-aud",
+ domain="https://okta.com",
+ client_id="id",
+ client_secret="sec",
+ impersonation_url=None,
+ )
+
+ assert metadata == {"name": "test-bucket"}
+ MockSupplier.assert_called_once()
+ MockCreds.assert_called_once()
+
+
+# --- System Test ---
+
+
+def test_authenticate_system():
+ """
+ System test that runs against the real API.
+ Skips automatically if custom-credentials-okta-secrets.json is missing or incomplete.
+ """
+ if not os.path.exists("custom-credentials-okta-secrets.json"):
+ pytest.skip(
+ "Skipping system test: custom-credentials-okta-secrets.json not found."
+ )
+
+ with open("custom-credentials-okta-secrets.json", "r") as f:
+ secrets = json.load(f)
+
+ required_keys = [
+ "gcp_workload_audience",
+ "gcs_bucket_name",
+ "okta_domain",
+ "okta_client_id",
+ "okta_client_secret",
+ ]
+ if not all(key in secrets for key in required_keys):
+ pytest.skip(
+ "Skipping system test: custom-credentials-okta-secrets.json is missing required keys."
+ )
+
+ # The main() function handles the auth flow and printing.
+ # We mock the print function to verify the output.
+ with mock.patch("builtins.print") as mock_print:
+ snippets.main()
+
+ # Check for the success message in the print output.
+ output = "\n".join([call.args[0] for call in mock_print.call_args_list])
+ assert "--- SUCCESS! ---" in output
diff --git a/bigquery-datatransfer/snippets/conftest.py b/bigquery-datatransfer/snippets/conftest.py
index 1248a9407f7..30dd52f3ce6 100644
--- a/bigquery-datatransfer/snippets/conftest.py
+++ b/bigquery-datatransfer/snippets/conftest.py
@@ -123,7 +123,7 @@ def transfer_client(default_credentials, project_id):
@pytest.fixture(scope="session")
def transfer_config_name(transfer_client, project_id, dataset_id, service_account_name):
- from . import manage_transfer_configs, scheduled_query
+ from . import scheduled_query
# Use the transfer_client fixture so we know quota is attributed to the
# correct project.
@@ -140,9 +140,10 @@ def transfer_config_name(transfer_client, project_id, dataset_id, service_accoun
}
)
yield transfer_config.name
- manage_transfer_configs.delete_config(
- {"transfer_config_name": transfer_config.name}
- )
+ try:
+ transfer_client.delete_transfer_config(name=transfer_config.name)
+ except google.api_core.exceptions.NotFound:
+ pass
@pytest.fixture
diff --git a/bigquery-datatransfer/snippets/manage_transfer_configs.py b/bigquery-datatransfer/snippets/manage_transfer_configs.py
index cd865455c10..726b4caf8f2 100644
--- a/bigquery-datatransfer/snippets/manage_transfer_configs.py
+++ b/bigquery-datatransfer/snippets/manage_transfer_configs.py
@@ -13,62 +13,6 @@
# limitations under the License.
-def list_configs(override_values={}):
- # [START bigquerydatatransfer_list_configs]
- from google.cloud import bigquery_datatransfer
-
- transfer_client = bigquery_datatransfer.DataTransferServiceClient()
-
- project_id = "my-project"
- # [END bigquerydatatransfer_list_configs]
- # To facilitate testing, we replace values with alternatives
- # provided by the testing harness.
- project_id = override_values.get("project_id", project_id)
- # [START bigquerydatatransfer_list_configs]
- parent = transfer_client.common_project_path(project_id)
-
- configs = transfer_client.list_transfer_configs(parent=parent)
- print("Got the following configs:")
- for config in configs:
- print(f"\tID: {config.name}, Schedule: {config.schedule}")
- # [END bigquerydatatransfer_list_configs]
-
-
-def update_config(override_values={}):
- # [START bigquerydatatransfer_update_config]
- from google.cloud import bigquery_datatransfer
- from google.protobuf import field_mask_pb2
-
- transfer_client = bigquery_datatransfer.DataTransferServiceClient()
-
- transfer_config_name = "projects/1234/locations/us/transferConfigs/abcd"
- new_display_name = "My Transfer Config"
- # [END bigquerydatatransfer_update_config]
- # To facilitate testing, we replace values with alternatives
- # provided by the testing harness.
- new_display_name = override_values.get("new_display_name", new_display_name)
- transfer_config_name = override_values.get(
- "transfer_config_name", transfer_config_name
- )
- # [START bigquerydatatransfer_update_config]
-
- transfer_config = bigquery_datatransfer.TransferConfig(name=transfer_config_name)
- transfer_config.display_name = new_display_name
-
- transfer_config = transfer_client.update_transfer_config(
- {
- "transfer_config": transfer_config,
- "update_mask": field_mask_pb2.FieldMask(paths=["display_name"]),
- }
- )
-
- print(f"Updated config: '{transfer_config.name}'")
- print(f"New display name: '{transfer_config.display_name}'")
- # [END bigquerydatatransfer_update_config]
- # Return the config name for testing purposes, so that it can be deleted.
- return transfer_config
-
-
def update_credentials_with_service_account(override_values={}):
# [START bigquerydatatransfer_update_credentials]
from google.cloud import bigquery_datatransfer
@@ -159,27 +103,3 @@ def schedule_backfill_manual_transfer(override_values={}):
print(f"backfill: {run.run_time} run: {run.name}")
# [END bigquerydatatransfer_schedule_backfill]
return response.runs
-
-
-def delete_config(override_values={}):
- # [START bigquerydatatransfer_delete_transfer]
- import google.api_core.exceptions
- from google.cloud import bigquery_datatransfer
-
- transfer_client = bigquery_datatransfer.DataTransferServiceClient()
-
- transfer_config_name = "projects/1234/locations/us/transferConfigs/abcd"
- # [END bigquerydatatransfer_delete_transfer]
- # To facilitate testing, we replace values with alternatives
- # provided by the testing harness.
- transfer_config_name = override_values.get(
- "transfer_config_name", transfer_config_name
- )
- # [START bigquerydatatransfer_delete_transfer]
- try:
- transfer_client.delete_transfer_config(name=transfer_config_name)
- except google.api_core.exceptions.NotFound:
- print("Transfer config not found.")
- else:
- print(f"Deleted transfer config: {transfer_config_name}")
- # [END bigquerydatatransfer_delete_transfer]
diff --git a/bigquery-datatransfer/snippets/manage_transfer_configs_test.py b/bigquery-datatransfer/snippets/manage_transfer_configs_test.py
index 5504f19cbf9..505c61d269c 100644
--- a/bigquery-datatransfer/snippets/manage_transfer_configs_test.py
+++ b/bigquery-datatransfer/snippets/manage_transfer_configs_test.py
@@ -15,26 +15,6 @@
from . import manage_transfer_configs
-def test_list_configs(capsys, project_id, transfer_config_name):
- manage_transfer_configs.list_configs({"project_id": project_id})
- out, _ = capsys.readouterr()
- assert "Got the following configs:" in out
- assert transfer_config_name in out
-
-
-def test_update_config(capsys, transfer_config_name):
- manage_transfer_configs.update_config(
- {
- "new_display_name": "name from test_update_config",
- "transfer_config_name": transfer_config_name,
- }
- )
- out, _ = capsys.readouterr()
- assert "Updated config:" in out
- assert transfer_config_name in out
- assert "name from test_update_config" in out
-
-
def test_update_credentials_with_service_account(
capsys, project_id, service_account_name, transfer_config_name
):
@@ -60,9 +40,3 @@ def test_schedule_backfill_manual_transfer(capsys, transfer_config_name):
assert transfer_config_name in out
# Check that there are three runs for between 2 and 5 days ago.
assert len(runs) == 3
-
-
-def test_delete_config(capsys, transfer_config_name):
- # transfer_config_name fixture in conftest.py calls the delete config
- # sample. To conserve limited BQ-DTS quota we only make basic checks.
- assert len(transfer_config_name) != 0
diff --git a/bigquery-migration/snippets/requirements.txt b/bigquery-migration/snippets/requirements.txt
index 2d38587c2e9..767450fe41a 100644
--- a/bigquery-migration/snippets/requirements.txt
+++ b/bigquery-migration/snippets/requirements.txt
@@ -1 +1 @@
-google-cloud-bigquery-migration==0.11.14
+google-cloud-bigquery-migration==0.11.15
diff --git a/bigquery/continuous-queries/requirements.txt b/bigquery/continuous-queries/requirements.txt
index e21b7f4683c..244b3dea27d 100644
--- a/bigquery/continuous-queries/requirements.txt
+++ b/bigquery/continuous-queries/requirements.txt
@@ -1,4 +1,4 @@
-functions-framework==3.8.2
+functions-framework==3.9.2
google-cloud-bigquery==3.30.0
google-auth==2.38.0
requests==2.32.4
diff --git a/bigquery/pandas-gbq-migration/requirements.txt b/bigquery/pandas-gbq-migration/requirements.txt
index 00692744ede..2e8f1a6e66d 100644
--- a/bigquery/pandas-gbq-migration/requirements.txt
+++ b/bigquery/pandas-gbq-migration/requirements.txt
@@ -3,6 +3,7 @@ google-cloud-bigquery-storage==2.27.0
pandas==2.0.3; python_version == '3.8'
pandas==2.2.3; python_version > '3.8'
pandas-gbq==0.24.0
-grpcio==1.69.0
+grpcio==1.70.0; python_version == '3.8'
+grpcio==1.74.0; python_version > '3.8'
pyarrow==17.0.0; python_version <= '3.8'
pyarrow==20.0.0; python_version > '3.9'
diff --git a/appengine/flexible_python37_and_earlier/tasks/app.yaml b/bigquery/python-db-dtypes-pandas/__init__.py
similarity index 91%
rename from appengine/flexible_python37_and_earlier/tasks/app.yaml
rename to bigquery/python-db-dtypes-pandas/__init__.py
index 15ac0d97205..7e1ec16ec8c 100644
--- a/appengine/flexible_python37_and_earlier/tasks/app.yaml
+++ b/bigquery/python-db-dtypes-pandas/__init__.py
@@ -1,4 +1,4 @@
-# Copyright 2019 Google LLC.
+# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -11,5 +11,3 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-runtime: python37
diff --git a/appengine/flexible_python37_and_earlier/django_cloudsql/mysite/__init__.py b/bigquery/python-db-dtypes-pandas/pytest.ini
similarity index 100%
rename from appengine/flexible_python37_and_earlier/django_cloudsql/mysite/__init__.py
rename to bigquery/python-db-dtypes-pandas/pytest.ini
diff --git a/.github/flakybot.yaml b/bigquery/python-db-dtypes-pandas/snippets/__init__.py
similarity index 83%
rename from .github/flakybot.yaml
rename to bigquery/python-db-dtypes-pandas/snippets/__init__.py
index 55543bcd50c..7e1ec16ec8c 100644
--- a/.github/flakybot.yaml
+++ b/bigquery/python-db-dtypes-pandas/snippets/__init__.py
@@ -1,15 +1,13 @@
-# Copyright 2023 Google LLC
+# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
-# http://www.apache.org/licenses/LICENSE-2.0
+# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-issuePriority: p2
\ No newline at end of file
diff --git a/appengine/flexible_python37_and_earlier/analytics/noxfile_config.py b/bigquery/python-db-dtypes-pandas/snippets/noxconfig.py
similarity index 83%
rename from appengine/flexible_python37_and_earlier/analytics/noxfile_config.py
rename to bigquery/python-db-dtypes-pandas/snippets/noxconfig.py
index 1665dd736f8..b9d835eefee 100644
--- a/appengine/flexible_python37_and_earlier/analytics/noxfile_config.py
+++ b/bigquery/python-db-dtypes-pandas/snippets/noxconfig.py
@@ -1,4 +1,4 @@
-# Copyright 2023 Google LLC
+# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -22,17 +22,20 @@
TEST_CONFIG_OVERRIDE = {
# You can opt out from the test for specific Python versions.
- # Skipping for Python 3.9 due to pyarrow compilation failure.
- "ignored_versions": ["2.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"],
+ "ignored_versions": ["2.7", "3.7", "3.8"],
# Old samples are opted out of enforcing Python type hints
# All new samples should feature them
- "enforce_type_hints": False,
+ "enforce_type_hints": True,
# An envvar key for determining the project id to use. Change it
# to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
# build specific Cloud project. You can also use your own string
# to use your own Cloud project.
"gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
# 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
+ # If you need to use a specific version of pip,
+ # change pip_version_override to the string representation
+ # of the version number, for example, "20.2.4"
+ "pip_version_override": None,
# A dictionary you want to inject into your test. Don't put any
# secrets here. These values will override predefined values.
"envs": {},
diff --git a/bigquery/python-db-dtypes-pandas/snippets/pandas_date_and_time.py b/bigquery/python-db-dtypes-pandas/snippets/pandas_date_and_time.py
new file mode 100644
index 00000000000..b6e55813064
--- /dev/null
+++ b/bigquery/python-db-dtypes-pandas/snippets/pandas_date_and_time.py
@@ -0,0 +1,79 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def pandas_date_and_time():
+ # [START bigquery_pandas_date_create]
+
+ import datetime
+
+ import pandas as pd
+
+ import db_dtypes # noqa import to register dtypes
+
+ dates = pd.Series([datetime.date(2021, 9, 17), "2021-9-18"], dtype="dbdate")
+
+ # [END bigquery_pandas_date_create]
+ # [START bigquery_pandas_date_as_datetime]
+
+ datetimes = dates.astype("datetime64")
+
+ # [END bigquery_pandas_date_as_datetime]
+ # [START bigquery_pandas_date_sub]
+
+ dates2 = pd.Series(["2021-1-1", "2021-1-2"], dtype="dbdate")
+ diffs = dates - dates2
+
+ # [END bigquery_pandas_date_sub]
+ # [START bigquery_pandas_date_add_offset]
+
+ do = pd.DateOffset(days=1)
+ after = dates + do
+ before = dates - do
+
+ # [END bigquery_pandas_date_add_offset]
+ # [START bigquery_pandas_time_create]
+
+ times = pd.Series([datetime.time(1, 2, 3, 456789), "12:00:00.6"], dtype="dbtime")
+
+ # [END bigquery_pandas_time_create]
+ # [START bigquery_pandas_time_as_timedelta]
+
+ timedeltas = times.astype("timedelta64")
+
+ # [END bigquery_pandas_time_as_timedelta]
+
+ # Combine datetime64 and timedelta64 to confirm adding dates and times are
+ # equivalent.
+ combined0 = datetimes + timedeltas
+
+ # [START bigquery_pandas_combine_date_time]
+
+ combined = dates + times
+
+ # [END bigquery_pandas_combine_date_time]
+
+ return (
+ dates,
+ datetimes,
+ dates2,
+ diffs,
+ do,
+ after,
+ before,
+ times,
+ timedeltas,
+ combined,
+ combined0,
+ )
diff --git a/bigquery/python-db-dtypes-pandas/snippets/pandas_date_and_time_test.py b/bigquery/python-db-dtypes-pandas/snippets/pandas_date_and_time_test.py
new file mode 100644
index 00000000000..56641765c30
--- /dev/null
+++ b/bigquery/python-db-dtypes-pandas/snippets/pandas_date_and_time_test.py
@@ -0,0 +1,60 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import datetime
+
+import numpy as np
+from pandas import Timestamp
+
+
+def test_pandas_date_and_time():
+ from .pandas_date_and_time import pandas_date_and_time
+
+ (
+ dates,
+ _,
+ dates2,
+ diffs,
+ do,
+ after,
+ before,
+ times,
+ _,
+ combined,
+ combined0,
+ ) = pandas_date_and_time()
+
+ assert str(dates.dtype) == "dbdate"
+ assert list(dates) == [datetime.date(2021, 9, 17), datetime.date(2021, 9, 18)]
+
+ assert np.array_equal(
+ diffs,
+ dates.astype("datetime64") - dates2.astype("datetime64"),
+ )
+
+ assert np.array_equal(after, dates.astype("object") + do)
+ assert np.array_equal(before, dates.astype("object") - do)
+
+ assert str(times.dtype) == "dbtime"
+ assert list(times) == [
+ datetime.time(1, 2, 3, 456789),
+ datetime.time(12, 0, 0, 600000),
+ ]
+
+ for c in combined0, combined:
+ assert str(c.dtype) == "datetime64[ns]"
+ assert list(c) == [
+ Timestamp("2021-09-17 01:02:03.456789"),
+ Timestamp("2021-09-18 12:00:00.600000"),
+ ]
diff --git a/bigquery/python-db-dtypes-pandas/snippets/requirements-test.txt b/bigquery/python-db-dtypes-pandas/snippets/requirements-test.txt
new file mode 100644
index 00000000000..9471b3d92fb
--- /dev/null
+++ b/bigquery/python-db-dtypes-pandas/snippets/requirements-test.txt
@@ -0,0 +1 @@
+pytest==8.4.2
diff --git a/bigquery/python-db-dtypes-pandas/snippets/requirements.txt b/bigquery/python-db-dtypes-pandas/snippets/requirements.txt
new file mode 100644
index 00000000000..5a18bf31224
--- /dev/null
+++ b/bigquery/python-db-dtypes-pandas/snippets/requirements.txt
@@ -0,0 +1,4 @@
+db-dtypes
+numpy
+pandas
+pyarrow
diff --git a/bigquery/remote-function/document/requirements-test.txt b/bigquery/remote-function/document/requirements-test.txt
index abfacf9940c..254febb7aba 100644
--- a/bigquery/remote-function/document/requirements-test.txt
+++ b/bigquery/remote-function/document/requirements-test.txt
@@ -1,4 +1,4 @@
Flask==2.2.2
-functions-framework==3.8.2
+functions-framework==3.9.2
google-cloud-documentai==3.0.1
pytest==8.2.0
diff --git a/bigquery/remote-function/document/requirements.txt b/bigquery/remote-function/document/requirements.txt
index 262e1f0b6a2..5d039df280e 100644
--- a/bigquery/remote-function/document/requirements.txt
+++ b/bigquery/remote-function/document/requirements.txt
@@ -1,4 +1,4 @@
Flask==2.2.2
-functions-framework==3.8.2
+functions-framework==3.9.2
google-cloud-documentai==3.0.1
Werkzeug==2.3.8
diff --git a/bigquery/remote-function/translate/requirements-test.txt b/bigquery/remote-function/translate/requirements-test.txt
index 74c88279a29..2048a36731f 100644
--- a/bigquery/remote-function/translate/requirements-test.txt
+++ b/bigquery/remote-function/translate/requirements-test.txt
@@ -1,4 +1,4 @@
Flask==2.2.2
-functions-framework==3.8.2
+functions-framework==3.9.2
google-cloud-translate==3.18.0
pytest==8.2.0
diff --git a/bigquery/remote-function/translate/requirements.txt b/bigquery/remote-function/translate/requirements.txt
index dc8662d5ab6..8f3760f3846 100644
--- a/bigquery/remote-function/translate/requirements.txt
+++ b/bigquery/remote-function/translate/requirements.txt
@@ -1,4 +1,4 @@
Flask==2.2.2
-functions-framework==3.8.2
+functions-framework==3.9.2
google-cloud-translate==3.18.0
Werkzeug==2.3.8
diff --git a/bigquery/remote-function/vision/requirements-test.txt b/bigquery/remote-function/vision/requirements-test.txt
index fd0200a49dd..62634fcffc0 100644
--- a/bigquery/remote-function/vision/requirements-test.txt
+++ b/bigquery/remote-function/vision/requirements-test.txt
@@ -1,4 +1,4 @@
Flask==2.2.2
-functions-framework==3.8.2
+functions-framework==3.9.2
google-cloud-vision==3.8.1
pytest==8.2.0
diff --git a/bigquery/remote-function/vision/requirements.txt b/bigquery/remote-function/vision/requirements.txt
index fc87b4eaa5f..6737756c476 100644
--- a/bigquery/remote-function/vision/requirements.txt
+++ b/bigquery/remote-function/vision/requirements.txt
@@ -1,4 +1,4 @@
Flask==2.2.2
-functions-framework==3.8.2
+functions-framework==3.9.2
google-cloud-vision==3.8.1
Werkzeug==2.3.8
diff --git a/appengine/flexible_python37_and_earlier/django_cloudsql/polls/__init__.py b/bigquery_storage/__init__.py
similarity index 100%
rename from appengine/flexible_python37_and_earlier/django_cloudsql/polls/__init__.py
rename to bigquery_storage/__init__.py
diff --git a/bigquery_storage/conftest.py b/bigquery_storage/conftest.py
new file mode 100644
index 00000000000..63d53531471
--- /dev/null
+++ b/bigquery_storage/conftest.py
@@ -0,0 +1,46 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import datetime
+import os
+import random
+from typing import Generator
+
+from google.cloud import bigquery
+
+import pytest
+
+
+@pytest.fixture(scope="session")
+def project_id() -> str:
+ return os.environ["GOOGLE_CLOUD_PROJECT"]
+
+
+@pytest.fixture(scope="session")
+def dataset(project_id: str) -> Generator[bigquery.Dataset, None, None]:
+ client = bigquery.Client()
+
+ # Add a random suffix to dataset name to avoid conflict, because we run
+ # a samples test on each supported Python version almost at the same time.
+ dataset_time = datetime.datetime.now().strftime("%y%m%d_%H%M%S")
+ suffix = f"_{(random.randint(0, 99)):02d}"
+ dataset_name = "samples_tests_" + dataset_time + suffix
+
+ dataset_id = "{}.{}".format(project_id, dataset_name)
+ dataset = bigquery.Dataset(dataset_id)
+ dataset.location = "us-east7"
+ created_dataset = client.create_dataset(dataset)
+ yield created_dataset
+
+ client.delete_dataset(created_dataset, delete_contents=True)
diff --git a/appengine/flexible_python37_and_earlier/django_cloudsql/polls/apps.py b/bigquery_storage/pyarrow/__init__.py
similarity index 75%
rename from appengine/flexible_python37_and_earlier/django_cloudsql/polls/apps.py
rename to bigquery_storage/pyarrow/__init__.py
index 88bdacda7c7..a2a70562f48 100644
--- a/appengine/flexible_python37_and_earlier/django_cloudsql/polls/apps.py
+++ b/bigquery_storage/pyarrow/__init__.py
@@ -1,19 +1,15 @@
-# Copyright 2015 Google LLC.
+# -*- coding: utf-8 -*-
+#
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
-# http://www.apache.org/licenses/LICENSE-2.0
+# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-from django.apps import AppConfig
-
-
-class PollsConfig(AppConfig):
- name = "polls"
diff --git a/bigquery_storage/pyarrow/append_rows_with_arrow.py b/bigquery_storage/pyarrow/append_rows_with_arrow.py
new file mode 100644
index 00000000000..78cb0a57573
--- /dev/null
+++ b/bigquery_storage/pyarrow/append_rows_with_arrow.py
@@ -0,0 +1,224 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from concurrent.futures import Future
+import datetime
+import decimal
+from typing import Iterable
+
+from google.cloud import bigquery
+from google.cloud import bigquery_storage_v1
+from google.cloud.bigquery import enums
+from google.cloud.bigquery_storage_v1 import types as gapic_types
+from google.cloud.bigquery_storage_v1.writer import AppendRowsStream
+import pandas as pd
+import pyarrow as pa
+
+
+TABLE_LENGTH = 100_000
+
+BQ_SCHEMA = [
+ bigquery.SchemaField("bool_col", enums.SqlTypeNames.BOOLEAN),
+ bigquery.SchemaField("int64_col", enums.SqlTypeNames.INT64),
+ bigquery.SchemaField("float64_col", enums.SqlTypeNames.FLOAT64),
+ bigquery.SchemaField("numeric_col", enums.SqlTypeNames.NUMERIC),
+ bigquery.SchemaField("bignumeric_col", enums.SqlTypeNames.BIGNUMERIC),
+ bigquery.SchemaField("string_col", enums.SqlTypeNames.STRING),
+ bigquery.SchemaField("bytes_col", enums.SqlTypeNames.BYTES),
+ bigquery.SchemaField("date_col", enums.SqlTypeNames.DATE),
+ bigquery.SchemaField("datetime_col", enums.SqlTypeNames.DATETIME),
+ bigquery.SchemaField("time_col", enums.SqlTypeNames.TIME),
+ bigquery.SchemaField("timestamp_col", enums.SqlTypeNames.TIMESTAMP),
+ bigquery.SchemaField("geography_col", enums.SqlTypeNames.GEOGRAPHY),
+ bigquery.SchemaField(
+ "range_date_col", enums.SqlTypeNames.RANGE, range_element_type="DATE"
+ ),
+ bigquery.SchemaField(
+ "range_datetime_col",
+ enums.SqlTypeNames.RANGE,
+ range_element_type="DATETIME",
+ ),
+ bigquery.SchemaField(
+ "range_timestamp_col",
+ enums.SqlTypeNames.RANGE,
+ range_element_type="TIMESTAMP",
+ ),
+]
+
+PYARROW_SCHEMA = pa.schema(
+ [
+ pa.field("bool_col", pa.bool_()),
+ pa.field("int64_col", pa.int64()),
+ pa.field("float64_col", pa.float64()),
+ pa.field("numeric_col", pa.decimal128(38, scale=9)),
+ pa.field("bignumeric_col", pa.decimal256(76, scale=38)),
+ pa.field("string_col", pa.string()),
+ pa.field("bytes_col", pa.binary()),
+ pa.field("date_col", pa.date32()),
+ pa.field("datetime_col", pa.timestamp("us")),
+ pa.field("time_col", pa.time64("us")),
+ pa.field("timestamp_col", pa.timestamp("us")),
+ pa.field("geography_col", pa.string()),
+ pa.field(
+ "range_date_col",
+ pa.struct([("start", pa.date32()), ("end", pa.date32())]),
+ ),
+ pa.field(
+ "range_datetime_col",
+ pa.struct([("start", pa.timestamp("us")), ("end", pa.timestamp("us"))]),
+ ),
+ pa.field(
+ "range_timestamp_col",
+ pa.struct([("start", pa.timestamp("us")), ("end", pa.timestamp("us"))]),
+ ),
+ ]
+)
+
+
+def bqstorage_write_client() -> bigquery_storage_v1.BigQueryWriteClient:
+ return bigquery_storage_v1.BigQueryWriteClient()
+
+
+def make_table(project_id: str, dataset_id: str, bq_client: bigquery.Client) -> bigquery.Table:
+ table_id = "append_rows_w_arrow_test"
+ table_id_full = f"{project_id}.{dataset_id}.{table_id}"
+ bq_table = bigquery.Table(table_id_full, schema=BQ_SCHEMA)
+ created_table = bq_client.create_table(bq_table)
+
+ return created_table
+
+
+def create_stream(bqstorage_write_client: bigquery_storage_v1.BigQueryWriteClient, table: bigquery.Table) -> AppendRowsStream:
+ stream_name = f"projects/{table.project}/datasets/{table.dataset_id}/tables/{table.table_id}/_default"
+ request_template = gapic_types.AppendRowsRequest()
+ request_template.write_stream = stream_name
+
+ # Add schema to the template.
+ arrow_data = gapic_types.AppendRowsRequest.ArrowData()
+ arrow_data.writer_schema.serialized_schema = PYARROW_SCHEMA.serialize().to_pybytes()
+ request_template.arrow_rows = arrow_data
+
+ append_rows_stream = AppendRowsStream(
+ bqstorage_write_client,
+ request_template,
+ )
+ return append_rows_stream
+
+
+def generate_pyarrow_table(num_rows: int = TABLE_LENGTH) -> pa.Table:
+ date_1 = datetime.date(2020, 10, 1)
+ date_2 = datetime.date(2021, 10, 1)
+
+ datetime_1 = datetime.datetime(2016, 12, 3, 14, 11, 27, 123456)
+ datetime_2 = datetime.datetime(2017, 12, 3, 14, 11, 27, 123456)
+
+ timestamp_1 = datetime.datetime(
+ 1999, 12, 31, 23, 59, 59, 999999, tzinfo=datetime.timezone.utc
+ )
+ timestamp_2 = datetime.datetime(
+ 2000, 12, 31, 23, 59, 59, 999999, tzinfo=datetime.timezone.utc
+ )
+
+ # Pandas Dataframe.
+ rows = []
+ for i in range(num_rows):
+ row = {
+ "bool_col": True,
+ "int64_col": i,
+ "float64_col": float(i),
+ "numeric_col": decimal.Decimal("0.000000001"),
+ "bignumeric_col": decimal.Decimal("0.1234567891"),
+ "string_col": "data as string",
+ "bytes_col": str.encode("data in bytes"),
+ "date_col": datetime.date(2019, 5, 10),
+ "datetime_col": datetime_1,
+ "time_col": datetime.time(23, 59, 59, 999999),
+ "timestamp_col": timestamp_1,
+ "geography_col": "POINT(-121 41)",
+ "range_date_col": {"start": date_1, "end": date_2},
+ "range_datetime_col": {"start": datetime_1, "end": datetime_2},
+ "range_timestamp_col": {"start": timestamp_1, "end": timestamp_2},
+ }
+ rows.append(row)
+ df = pd.DataFrame(rows)
+
+ # Dataframe to PyArrow Table.
+ table = pa.Table.from_pandas(df, schema=PYARROW_SCHEMA)
+
+ return table
+
+
+def generate_write_requests(
+ pyarrow_table: pa.Table,
+) -> Iterable[gapic_types.AppendRowsRequest]:
+ # Determine max_chunksize of the record batches. Because max size of
+ # AppendRowsRequest is 10 MB, we need to split the table if it's too big.
+ # See: https://cloud.google.com/bigquery/docs/reference/storage/rpc/google.cloud.bigquery.storage.v1#appendrowsrequest
+ max_request_bytes = 10 * 2**20 # 10 MB
+ chunk_num = int(pyarrow_table.nbytes / max_request_bytes) + 1
+ chunk_size = int(pyarrow_table.num_rows / chunk_num)
+
+ # Construct request(s).
+ for batch in pyarrow_table.to_batches(max_chunksize=chunk_size):
+ request = gapic_types.AppendRowsRequest()
+ request.arrow_rows.rows.serialized_record_batch = batch.serialize().to_pybytes()
+ yield request
+
+
+def verify_result(
+ client: bigquery.Client, table: bigquery.Table, futures: "list[Future]"
+) -> None:
+ bq_table = client.get_table(table)
+
+ # Verify table schema.
+ assert bq_table.schema == BQ_SCHEMA
+
+ # Verify table size.
+ query = client.query(f"SELECT COUNT(1) FROM `{bq_table}`;")
+ query_result = query.result().to_dataframe()
+
+ # There might be extra rows due to retries.
+ assert query_result.iloc[0, 0] >= TABLE_LENGTH
+
+ # Verify that table was split into multiple requests.
+ assert len(futures) == 2
+
+
+def main(project_id: str, dataset: bigquery.Dataset) -> None:
+ # Initialize clients.
+ write_client = bqstorage_write_client()
+ bq_client = bigquery.Client()
+
+ # Create BigQuery table.
+ bq_table = make_table(project_id, dataset.dataset_id, bq_client)
+
+ # Generate local PyArrow table.
+ pa_table = generate_pyarrow_table()
+
+ # Convert PyArrow table to Protobuf requests.
+ requests = generate_write_requests(pa_table)
+
+ # Create writing stream to the BigQuery table.
+ stream = create_stream(write_client, bq_table)
+
+ # Send requests.
+ futures = []
+ for request in requests:
+ future = stream.send(request)
+ futures.append(future)
+ future.result() # Optional, will block until writing is complete.
+
+ # Verify results.
+ verify_result(bq_client, bq_table, futures)
diff --git a/appengine/flexible_python37_and_earlier/hello_world/main_test.py b/bigquery_storage/pyarrow/append_rows_with_arrow_test.py
similarity index 69%
rename from appengine/flexible_python37_and_earlier/hello_world/main_test.py
rename to bigquery_storage/pyarrow/append_rows_with_arrow_test.py
index a6049b094f9..f31de43b51f 100644
--- a/appengine/flexible_python37_and_earlier/hello_world/main_test.py
+++ b/bigquery_storage/pyarrow/append_rows_with_arrow_test.py
@@ -1,4 +1,4 @@
-# Copyright 2015 Google LLC.
+# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -12,13 +12,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import main
+from google.cloud import bigquery
+from . import append_rows_with_arrow
-def test_index():
- main.app.testing = True
- client = main.app.test_client()
- r = client.get("/")
- assert r.status_code == 200
- assert "Hello World" in r.data.decode("utf-8")
+def test_append_rows_with_arrow(project_id: str, dataset: bigquery.Dataset) -> None:
+ append_rows_with_arrow.main(project_id, dataset)
diff --git a/bigquery_storage/pyarrow/noxfile_config.py b/bigquery_storage/pyarrow/noxfile_config.py
new file mode 100644
index 00000000000..29edb31ffe8
--- /dev/null
+++ b/bigquery_storage/pyarrow/noxfile_config.py
@@ -0,0 +1,42 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You maye obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Default TEST_CONFIG_OVERRIDE for python repos.
+
+# You can copy this file into your directory, then it will be imported from
+# the noxfile.py.
+
+# The source of truth:
+# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/noxfile_config.py
+
+TEST_CONFIG_OVERRIDE = {
+ # You can opt out from the test for specific Python versions.
+ "ignored_versions": ["2.7"],
+ # Old samples are opted out of enforcing Python type hints
+ # All new samples should feature them
+ "enforce_type_hints": True,
+ # An envvar key for determining the project id to use. Change it
+ # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
+ # build specific Cloud project. You can also use your own string
+ # to use your own Cloud project.
+ "gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
+ # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
+ # If you need to use a specific version of pip,
+ # change pip_version_override to the string representation
+ # of the version number, for example, "20.2.4"
+ "pip_version_override": None,
+ # A dictionary you want to inject into your test. Don't put any
+ # secrets here. These values will override predefined values.
+ "envs": {},
+}
diff --git a/bigquery_storage/pyarrow/requirements-test.txt b/bigquery_storage/pyarrow/requirements-test.txt
new file mode 100644
index 00000000000..7561ed55ce2
--- /dev/null
+++ b/bigquery_storage/pyarrow/requirements-test.txt
@@ -0,0 +1,3 @@
+pytest===7.4.3; python_version == '3.7'
+pytest===8.3.5; python_version == '3.8'
+pytest==8.4.1; python_version >= '3.9'
diff --git a/bigquery_storage/pyarrow/requirements.txt b/bigquery_storage/pyarrow/requirements.txt
new file mode 100644
index 00000000000..a593373b829
--- /dev/null
+++ b/bigquery_storage/pyarrow/requirements.txt
@@ -0,0 +1,5 @@
+db_dtypes
+google-cloud-bigquery
+google-cloud-bigquery-storage
+pandas
+pyarrow
diff --git a/bigquery_storage/quickstart/__init__.py b/bigquery_storage/quickstart/__init__.py
new file mode 100644
index 00000000000..a2a70562f48
--- /dev/null
+++ b/bigquery_storage/quickstart/__init__.py
@@ -0,0 +1,15 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/appengine/flexible_python37_and_earlier/storage/noxfile_config.py b/bigquery_storage/quickstart/noxfile_config.py
similarity index 88%
rename from appengine/flexible_python37_and_earlier/storage/noxfile_config.py
rename to bigquery_storage/quickstart/noxfile_config.py
index 6c2c81fa22b..f1fa9e5618b 100644
--- a/appengine/flexible_python37_and_earlier/storage/noxfile_config.py
+++ b/bigquery_storage/quickstart/noxfile_config.py
@@ -22,8 +22,7 @@
TEST_CONFIG_OVERRIDE = {
# You can opt out from the test for specific Python versions.
- # Skipping for Python 3.9 due to pyarrow compilation failure.
- "ignored_versions": ["2.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"],
+ "ignored_versions": ["2.7"],
# Old samples are opted out of enforcing Python type hints
# All new samples should feature them
"enforce_type_hints": True,
@@ -39,5 +38,5 @@
"pip_version_override": None,
# A dictionary you want to inject into your test. Don't put any
# secrets here. These values will override predefined values.
- "envs": {"CLOUD_STORAGE_BUCKET": "python-docs-samples-tests-public"},
+ "envs": {},
}
diff --git a/bigquery_storage/quickstart/quickstart.py b/bigquery_storage/quickstart/quickstart.py
new file mode 100644
index 00000000000..6f120ce9a58
--- /dev/null
+++ b/bigquery_storage/quickstart/quickstart.py
@@ -0,0 +1,95 @@
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+
+
+def main(project_id: str = "your-project-id", snapshot_millis: int = 0) -> None:
+ # [START bigquerystorage_quickstart]
+ from google.cloud.bigquery_storage import BigQueryReadClient, types
+
+ # TODO(developer): Set the project_id variable.
+ # project_id = 'your-project-id'
+ #
+ # The read session is created in this project. This project can be
+ # different from that which contains the table.
+
+ client = BigQueryReadClient()
+
+ # This example reads baby name data from the public datasets.
+ table = "projects/{}/datasets/{}/tables/{}".format(
+ "bigquery-public-data", "usa_names", "usa_1910_current"
+ )
+
+ requested_session = types.ReadSession()
+ requested_session.table = table
+ # This API can also deliver data serialized in Apache Arrow format.
+ # This example leverages Apache Avro.
+ requested_session.data_format = types.DataFormat.AVRO
+
+ # We limit the output columns to a subset of those allowed in the table,
+ # and set a simple filter to only report names from the state of
+ # Washington (WA).
+ requested_session.read_options.selected_fields = ["name", "number", "state"]
+ requested_session.read_options.row_restriction = 'state = "WA"'
+
+ # Set a snapshot time if it's been specified.
+ if snapshot_millis > 0:
+ snapshot_time = types.Timestamp()
+ snapshot_time.FromMilliseconds(snapshot_millis)
+ requested_session.table_modifiers.snapshot_time = snapshot_time
+
+ parent = "projects/{}".format(project_id)
+ session = client.create_read_session(
+ parent=parent,
+ read_session=requested_session,
+ # We'll use only a single stream for reading data from the table. However,
+ # if you wanted to fan out multiple readers you could do so by having a
+ # reader process each individual stream.
+ max_stream_count=1,
+ )
+ reader = client.read_rows(session.streams[0].name)
+
+ # The read stream contains blocks of Avro-encoded bytes. The rows() method
+ # uses the fastavro library to parse these blocks as an iterable of Python
+ # dictionaries. Install fastavro with the following command:
+ #
+ # pip install google-cloud-bigquery-storage[fastavro]
+ rows = reader.rows(session)
+
+ # Do any local processing by iterating over the rows. The
+ # google-cloud-bigquery-storage client reconnects to the API after any
+ # transient network errors or timeouts.
+ names = set()
+ states = set()
+
+ # fastavro returns EOFError instead of StopIterationError starting v1.8.4.
+ # See https://github.com/googleapis/python-bigquery-storage/pull/687
+ try:
+ for row in rows:
+ names.add(row["name"])
+ states.add(row["state"])
+ except EOFError:
+ pass
+
+ print("Got {} unique names in states: {}".format(len(names), ", ".join(states)))
+ # [END bigquerystorage_quickstart]
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser()
+ parser.add_argument("project_id")
+ parser.add_argument("--snapshot_millis", default=0, type=int)
+ args = parser.parse_args()
+ main(project_id=args.project_id, snapshot_millis=args.snapshot_millis)
diff --git a/bigquery_storage/quickstart/quickstart_test.py b/bigquery_storage/quickstart/quickstart_test.py
new file mode 100644
index 00000000000..3380c923847
--- /dev/null
+++ b/bigquery_storage/quickstart/quickstart_test.py
@@ -0,0 +1,40 @@
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import datetime
+
+import pytest
+
+from . import quickstart
+
+
+def now_millis() -> int:
+ return int(
+ (datetime.datetime.utcnow() - datetime.datetime(1970, 1, 1)).total_seconds()
+ * 1000
+ )
+
+
+def test_quickstart_wo_snapshot(capsys: pytest.CaptureFixture, project_id: str) -> None:
+ quickstart.main(project_id)
+ out, _ = capsys.readouterr()
+ assert "unique names in states: WA" in out
+
+
+def test_quickstart_with_snapshot(
+ capsys: pytest.CaptureFixture, project_id: str
+) -> None:
+ quickstart.main(project_id, now_millis() - 5000)
+ out, _ = capsys.readouterr()
+ assert "unique names in states: WA" in out
diff --git a/bigquery_storage/quickstart/requirements-test.txt b/bigquery_storage/quickstart/requirements-test.txt
new file mode 100644
index 00000000000..7561ed55ce2
--- /dev/null
+++ b/bigquery_storage/quickstart/requirements-test.txt
@@ -0,0 +1,3 @@
+pytest===7.4.3; python_version == '3.7'
+pytest===8.3.5; python_version == '3.8'
+pytest==8.4.1; python_version >= '3.9'
diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt
new file mode 100644
index 00000000000..9d69822935d
--- /dev/null
+++ b/bigquery_storage/quickstart/requirements.txt
@@ -0,0 +1,3 @@
+fastavro
+google-cloud-bigquery
+google-cloud-bigquery-storage==2.32.0
diff --git a/appengine/flexible_python37_and_earlier/datastore/app.yaml b/bigquery_storage/snippets/__init__.py
similarity index 77%
rename from appengine/flexible_python37_and_earlier/datastore/app.yaml
rename to bigquery_storage/snippets/__init__.py
index ca76f83fc3b..0098709d195 100644
--- a/appengine/flexible_python37_and_earlier/datastore/app.yaml
+++ b/bigquery_storage/snippets/__init__.py
@@ -1,20 +1,15 @@
+# -*- coding: utf-8 -*-
+#
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
-# http://www.apache.org/licenses/LICENSE-2.0
+# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-runtime: python
-env: flex
-entrypoint: gunicorn -b :$PORT main:app
-
-runtime_config:
- python_version: 3
diff --git a/bigquery_storage/snippets/append_rows_pending.py b/bigquery_storage/snippets/append_rows_pending.py
new file mode 100644
index 00000000000..3c34b472cde
--- /dev/null
+++ b/bigquery_storage/snippets/append_rows_pending.py
@@ -0,0 +1,132 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# [START bigquerystorage_append_rows_pending]
+"""
+This code sample demonstrates how to write records in pending mode
+using the low-level generated client for Python.
+"""
+
+from google.cloud import bigquery_storage_v1
+from google.cloud.bigquery_storage_v1 import types, writer
+from google.protobuf import descriptor_pb2
+
+# If you update the customer_record.proto protocol buffer definition, run:
+#
+# protoc --python_out=. customer_record.proto
+#
+# from the samples/snippets directory to generate the customer_record_pb2.py module.
+from . import customer_record_pb2
+
+
+def create_row_data(row_num: int, name: str) -> bytes:
+ row = customer_record_pb2.CustomerRecord()
+ row.row_num = row_num
+ row.customer_name = name
+ return row.SerializeToString()
+
+
+def append_rows_pending(project_id: str, dataset_id: str, table_id: str) -> None:
+ """Create a write stream, write some sample data, and commit the stream."""
+ write_client = bigquery_storage_v1.BigQueryWriteClient()
+ parent = write_client.table_path(project_id, dataset_id, table_id)
+ write_stream = types.WriteStream()
+
+ # When creating the stream, choose the type. Use the PENDING type to wait
+ # until the stream is committed before it is visible. See:
+ # https://cloud.google.com/bigquery/docs/reference/storage/rpc/google.cloud.bigquery.storage.v1#google.cloud.bigquery.storage.v1.WriteStream.Type
+ write_stream.type_ = types.WriteStream.Type.PENDING
+ write_stream = write_client.create_write_stream(
+ parent=parent, write_stream=write_stream
+ )
+ stream_name = write_stream.name
+
+ # Create a template with fields needed for the first request.
+ request_template = types.AppendRowsRequest()
+
+ # The initial request must contain the stream name.
+ request_template.write_stream = stream_name
+
+ # So that BigQuery knows how to parse the serialized_rows, generate a
+ # protocol buffer representation of your message descriptor.
+ proto_schema = types.ProtoSchema()
+ proto_descriptor = descriptor_pb2.DescriptorProto()
+ customer_record_pb2.CustomerRecord.DESCRIPTOR.CopyToProto(proto_descriptor)
+ proto_schema.proto_descriptor = proto_descriptor
+ proto_data = types.AppendRowsRequest.ProtoData()
+ proto_data.writer_schema = proto_schema
+ request_template.proto_rows = proto_data
+
+ # Some stream types support an unbounded number of requests. Construct an
+ # AppendRowsStream to send an arbitrary number of requests to a stream.
+ append_rows_stream = writer.AppendRowsStream(write_client, request_template)
+
+ # Create a batch of row data by appending proto2 serialized bytes to the
+ # serialized_rows repeated field.
+ proto_rows = types.ProtoRows()
+ proto_rows.serialized_rows.append(create_row_data(1, "Alice"))
+ proto_rows.serialized_rows.append(create_row_data(2, "Bob"))
+
+ # Set an offset to allow resuming this stream if the connection breaks.
+ # Keep track of which requests the server has acknowledged and resume the
+ # stream at the first non-acknowledged message. If the server has already
+ # processed a message with that offset, it will return an ALREADY_EXISTS
+ # error, which can be safely ignored.
+ #
+ # The first request must always have an offset of 0.
+ request = types.AppendRowsRequest()
+ request.offset = 0
+ proto_data = types.AppendRowsRequest.ProtoData()
+ proto_data.rows = proto_rows
+ request.proto_rows = proto_data
+
+ response_future_1 = append_rows_stream.send(request)
+
+ # Send another batch.
+ proto_rows = types.ProtoRows()
+ proto_rows.serialized_rows.append(create_row_data(3, "Charles"))
+
+ # Since this is the second request, you only need to include the row data.
+ # The name of the stream and protocol buffers DESCRIPTOR is only needed in
+ # the first request.
+ request = types.AppendRowsRequest()
+ proto_data = types.AppendRowsRequest.ProtoData()
+ proto_data.rows = proto_rows
+ request.proto_rows = proto_data
+
+ # Offset must equal the number of rows that were previously sent.
+ request.offset = 2
+
+ response_future_2 = append_rows_stream.send(request)
+
+ print(response_future_1.result())
+ print(response_future_2.result())
+
+ # Shutdown background threads and close the streaming connection.
+ append_rows_stream.close()
+
+ # A PENDING type stream must be "finalized" before being committed. No new
+ # records can be written to the stream after this method has been called.
+ write_client.finalize_write_stream(name=write_stream.name)
+
+ # Commit the stream you created earlier.
+ batch_commit_write_streams_request = types.BatchCommitWriteStreamsRequest()
+ batch_commit_write_streams_request.parent = parent
+ batch_commit_write_streams_request.write_streams = [write_stream.name]
+ write_client.batch_commit_write_streams(batch_commit_write_streams_request)
+
+ print(f"Writes to stream: '{write_stream.name}' have been committed.")
+
+
+# [END bigquerystorage_append_rows_pending]
diff --git a/bigquery_storage/snippets/append_rows_pending_test.py b/bigquery_storage/snippets/append_rows_pending_test.py
new file mode 100644
index 00000000000..791e9609779
--- /dev/null
+++ b/bigquery_storage/snippets/append_rows_pending_test.py
@@ -0,0 +1,72 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pathlib
+import random
+
+from google.cloud import bigquery
+import pytest
+
+from . import append_rows_pending
+
+DIR = pathlib.Path(__file__).parent
+
+
+regions = ["US", "non-US"]
+
+
+@pytest.fixture(params=regions)
+def sample_data_table(
+ request: pytest.FixtureRequest,
+ bigquery_client: bigquery.Client,
+ project_id: str,
+ dataset_id: str,
+ dataset_id_non_us: str,
+) -> str:
+ dataset = dataset_id
+ if request.param != "US":
+ dataset = dataset_id_non_us
+ schema = bigquery_client.schema_from_json(str(DIR / "customer_record_schema.json"))
+ table_id = f"append_rows_proto2_{random.randrange(10000)}"
+ full_table_id = f"{project_id}.{dataset}.{table_id}"
+ table = bigquery.Table(full_table_id, schema=schema)
+ table = bigquery_client.create_table(table, exists_ok=True)
+ yield full_table_id
+ bigquery_client.delete_table(table, not_found_ok=True)
+
+
+def test_append_rows_pending(
+ capsys: pytest.CaptureFixture,
+ bigquery_client: bigquery.Client,
+ sample_data_table: str,
+) -> None:
+ project_id, dataset_id, table_id = sample_data_table.split(".")
+ append_rows_pending.append_rows_pending(
+ project_id=project_id, dataset_id=dataset_id, table_id=table_id
+ )
+ out, _ = capsys.readouterr()
+ assert "have been committed" in out
+
+ rows = bigquery_client.query(
+ f"SELECT * FROM `{project_id}.{dataset_id}.{table_id}`"
+ ).result()
+ row_items = [
+ # Convert to sorted tuple of items to more easily search for expected rows.
+ tuple(sorted(row.items()))
+ for row in rows
+ ]
+
+ assert (("customer_name", "Alice"), ("row_num", 1)) in row_items
+ assert (("customer_name", "Bob"), ("row_num", 2)) in row_items
+ assert (("customer_name", "Charles"), ("row_num", 3)) in row_items
diff --git a/bigquery_storage/snippets/append_rows_proto2.py b/bigquery_storage/snippets/append_rows_proto2.py
new file mode 100644
index 00000000000..d610b31faa2
--- /dev/null
+++ b/bigquery_storage/snippets/append_rows_proto2.py
@@ -0,0 +1,256 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# [START bigquerystorage_append_rows_raw_proto2]
+"""
+This code sample demonstrates using the low-level generated client for Python.
+"""
+
+import datetime
+import decimal
+
+from google.cloud import bigquery_storage_v1
+from google.cloud.bigquery_storage_v1 import types, writer
+from google.protobuf import descriptor_pb2
+
+# If you make updates to the sample_data.proto protocol buffers definition,
+# run:
+#
+# protoc --python_out=. sample_data.proto
+#
+# from the samples/snippets directory to generate the sample_data_pb2 module.
+from . import sample_data_pb2
+
+
+def append_rows_proto2(project_id: str, dataset_id: str, table_id: str) -> None:
+ """Create a write stream, write some sample data, and commit the stream."""
+ write_client = bigquery_storage_v1.BigQueryWriteClient()
+ parent = write_client.table_path(project_id, dataset_id, table_id)
+ write_stream = types.WriteStream()
+
+ # When creating the stream, choose the type. Use the PENDING type to wait
+ # until the stream is committed before it is visible. See:
+ # https://cloud.google.com/bigquery/docs/reference/storage/rpc/google.cloud.bigquery.storage.v1#google.cloud.bigquery.storage.v1.WriteStream.Type
+ write_stream.type_ = types.WriteStream.Type.PENDING
+ write_stream = write_client.create_write_stream(
+ parent=parent, write_stream=write_stream
+ )
+ stream_name = write_stream.name
+
+ # Create a template with fields needed for the first request.
+ request_template = types.AppendRowsRequest()
+
+ # The initial request must contain the stream name.
+ request_template.write_stream = stream_name
+
+ # So that BigQuery knows how to parse the serialized_rows, generate a
+ # protocol buffer representation of your message descriptor.
+ proto_schema = types.ProtoSchema()
+ proto_descriptor = descriptor_pb2.DescriptorProto()
+ sample_data_pb2.SampleData.DESCRIPTOR.CopyToProto(proto_descriptor)
+ proto_schema.proto_descriptor = proto_descriptor
+ proto_data = types.AppendRowsRequest.ProtoData()
+ proto_data.writer_schema = proto_schema
+ request_template.proto_rows = proto_data
+
+ # Some stream types support an unbounded number of requests. Construct an
+ # AppendRowsStream to send an arbitrary number of requests to a stream.
+ append_rows_stream = writer.AppendRowsStream(write_client, request_template)
+
+ # Create a batch of row data by appending proto2 serialized bytes to the
+ # serialized_rows repeated field.
+ proto_rows = types.ProtoRows()
+
+ row = sample_data_pb2.SampleData()
+ row.row_num = 1
+ row.bool_col = True
+ row.bytes_col = b"Hello, World!"
+ row.float64_col = float("+inf")
+ row.int64_col = 123
+ row.string_col = "Howdy!"
+ proto_rows.serialized_rows.append(row.SerializeToString())
+
+ row = sample_data_pb2.SampleData()
+ row.row_num = 2
+ row.bool_col = False
+ proto_rows.serialized_rows.append(row.SerializeToString())
+
+ row = sample_data_pb2.SampleData()
+ row.row_num = 3
+ row.bytes_col = b"See you later!"
+ proto_rows.serialized_rows.append(row.SerializeToString())
+
+ row = sample_data_pb2.SampleData()
+ row.row_num = 4
+ row.float64_col = 1000000.125
+ proto_rows.serialized_rows.append(row.SerializeToString())
+
+ row = sample_data_pb2.SampleData()
+ row.row_num = 5
+ row.int64_col = 67000
+ proto_rows.serialized_rows.append(row.SerializeToString())
+
+ row = sample_data_pb2.SampleData()
+ row.row_num = 6
+ row.string_col = "Auf Wiedersehen!"
+ proto_rows.serialized_rows.append(row.SerializeToString())
+
+ # Set an offset to allow resuming this stream if the connection breaks.
+ # Keep track of which requests the server has acknowledged and resume the
+ # stream at the first non-acknowledged message. If the server has already
+ # processed a message with that offset, it will return an ALREADY_EXISTS
+ # error, which can be safely ignored.
+ #
+ # The first request must always have an offset of 0.
+ request = types.AppendRowsRequest()
+ request.offset = 0
+ proto_data = types.AppendRowsRequest.ProtoData()
+ proto_data.rows = proto_rows
+ request.proto_rows = proto_data
+
+ response_future_1 = append_rows_stream.send(request)
+
+ # Create a batch of rows containing scalar values that don't directly
+ # correspond to a protocol buffers scalar type. See the documentation for
+ # the expected data formats:
+ # https://cloud.google.com/bigquery/docs/write-api#data_type_conversions
+ proto_rows = types.ProtoRows()
+
+ row = sample_data_pb2.SampleData()
+ row.row_num = 7
+ date_value = datetime.date(2021, 8, 12)
+ epoch_value = datetime.date(1970, 1, 1)
+ delta = date_value - epoch_value
+ row.date_col = delta.days
+ proto_rows.serialized_rows.append(row.SerializeToString())
+
+ row = sample_data_pb2.SampleData()
+ row.row_num = 8
+ datetime_value = datetime.datetime(2021, 8, 12, 9, 46, 23, 987456)
+ row.datetime_col = datetime_value.strftime("%Y-%m-%d %H:%M:%S.%f")
+ proto_rows.serialized_rows.append(row.SerializeToString())
+
+ row = sample_data_pb2.SampleData()
+ row.row_num = 9
+ row.geography_col = "POINT(-122.347222 47.651111)"
+ proto_rows.serialized_rows.append(row.SerializeToString())
+
+ row = sample_data_pb2.SampleData()
+ row.row_num = 10
+ numeric_value = decimal.Decimal("1.23456789101112e+6")
+ row.numeric_col = str(numeric_value)
+ bignumeric_value = decimal.Decimal("-1.234567891011121314151617181920e+16")
+ row.bignumeric_col = str(bignumeric_value)
+ proto_rows.serialized_rows.append(row.SerializeToString())
+
+ row = sample_data_pb2.SampleData()
+ row.row_num = 11
+ time_value = datetime.time(11, 7, 48, 123456)
+ row.time_col = time_value.strftime("%H:%M:%S.%f")
+ proto_rows.serialized_rows.append(row.SerializeToString())
+
+ row = sample_data_pb2.SampleData()
+ row.row_num = 12
+ timestamp_value = datetime.datetime(
+ 2021, 8, 12, 16, 11, 22, 987654, tzinfo=datetime.timezone.utc
+ )
+ epoch_value = datetime.datetime(1970, 1, 1, tzinfo=datetime.timezone.utc)
+ delta = timestamp_value - epoch_value
+ row.timestamp_col = int(delta.total_seconds()) * 1000000 + int(delta.microseconds)
+ proto_rows.serialized_rows.append(row.SerializeToString())
+
+ # Since this is the second request, you only need to include the row data.
+ # The name of the stream and protocol buffers DESCRIPTOR is only needed in
+ # the first request.
+ request = types.AppendRowsRequest()
+ proto_data = types.AppendRowsRequest.ProtoData()
+ proto_data.rows = proto_rows
+ request.proto_rows = proto_data
+
+ # Offset must equal the number of rows that were previously sent.
+ request.offset = 6
+
+ response_future_2 = append_rows_stream.send(request)
+
+ # Create a batch of rows with STRUCT and ARRAY BigQuery data types. In
+ # protocol buffers, these correspond to nested messages and repeated
+ # fields, respectively.
+ proto_rows = types.ProtoRows()
+
+ row = sample_data_pb2.SampleData()
+ row.row_num = 13
+ row.int64_list.append(1)
+ row.int64_list.append(2)
+ row.int64_list.append(3)
+ proto_rows.serialized_rows.append(row.SerializeToString())
+
+ row = sample_data_pb2.SampleData()
+ row.row_num = 14
+ row.struct_col.sub_int_col = 7
+ proto_rows.serialized_rows.append(row.SerializeToString())
+
+ row = sample_data_pb2.SampleData()
+ row.row_num = 15
+ sub_message = sample_data_pb2.SampleData.SampleStruct()
+ sub_message.sub_int_col = -1
+ row.struct_list.append(sub_message)
+ sub_message = sample_data_pb2.SampleData.SampleStruct()
+ sub_message.sub_int_col = -2
+ row.struct_list.append(sub_message)
+ sub_message = sample_data_pb2.SampleData.SampleStruct()
+ sub_message.sub_int_col = -3
+ row.struct_list.append(sub_message)
+ proto_rows.serialized_rows.append(row.SerializeToString())
+
+ row = sample_data_pb2.SampleData()
+ row.row_num = 16
+ date_value = datetime.date(2021, 8, 8)
+ epoch_value = datetime.date(1970, 1, 1)
+ delta = date_value - epoch_value
+ row.range_date.start = delta.days
+ proto_rows.serialized_rows.append(row.SerializeToString())
+
+ request = types.AppendRowsRequest()
+ request.offset = 12
+ proto_data = types.AppendRowsRequest.ProtoData()
+ proto_data.rows = proto_rows
+ request.proto_rows = proto_data
+
+ # For each request sent, a message is expected in the responses iterable.
+ # This sample sends 3 requests, therefore expect exactly 3 responses.
+ response_future_3 = append_rows_stream.send(request)
+
+ # All three requests are in-flight, wait for them to finish being processed
+ # before finalizing the stream.
+ print(response_future_1.result())
+ print(response_future_2.result())
+ print(response_future_3.result())
+
+ # Shutdown background threads and close the streaming connection.
+ append_rows_stream.close()
+
+ # A PENDING type stream must be "finalized" before being committed. No new
+ # records can be written to the stream after this method has been called.
+ write_client.finalize_write_stream(name=write_stream.name)
+
+ # Commit the stream you created earlier.
+ batch_commit_write_streams_request = types.BatchCommitWriteStreamsRequest()
+ batch_commit_write_streams_request.parent = parent
+ batch_commit_write_streams_request.write_streams = [write_stream.name]
+ write_client.batch_commit_write_streams(batch_commit_write_streams_request)
+
+ print(f"Writes to stream: '{write_stream.name}' have been committed.")
+
+
+# [END bigquerystorage_append_rows_raw_proto2]
diff --git a/bigquery_storage/snippets/append_rows_proto2_test.py b/bigquery_storage/snippets/append_rows_proto2_test.py
new file mode 100644
index 00000000000..15e5b9d9105
--- /dev/null
+++ b/bigquery_storage/snippets/append_rows_proto2_test.py
@@ -0,0 +1,128 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import datetime
+import decimal
+import pathlib
+import random
+
+from google.cloud import bigquery
+import pytest
+
+from . import append_rows_proto2
+
+DIR = pathlib.Path(__file__).parent
+
+
+regions = ["US", "non-US"]
+
+
+@pytest.fixture(params=regions)
+def sample_data_table(
+ request: pytest.FixtureRequest,
+ bigquery_client: bigquery.Client,
+ project_id: str,
+ dataset_id: str,
+ dataset_id_non_us: str,
+) -> str:
+ dataset = dataset_id
+ if request.param != "US":
+ dataset = dataset_id_non_us
+ schema = bigquery_client.schema_from_json(str(DIR / "sample_data_schema.json"))
+ table_id = f"append_rows_proto2_{random.randrange(10000)}"
+ full_table_id = f"{project_id}.{dataset}.{table_id}"
+ table = bigquery.Table(full_table_id, schema=schema)
+ table = bigquery_client.create_table(table, exists_ok=True)
+ yield full_table_id
+ bigquery_client.delete_table(table, not_found_ok=True)
+
+
+def test_append_rows_proto2(
+ capsys: pytest.CaptureFixture,
+ bigquery_client: bigquery.Client,
+ sample_data_table: str,
+) -> None:
+ project_id, dataset_id, table_id = sample_data_table.split(".")
+ append_rows_proto2.append_rows_proto2(
+ project_id=project_id, dataset_id=dataset_id, table_id=table_id
+ )
+ out, _ = capsys.readouterr()
+ assert "have been committed" in out
+
+ rows = bigquery_client.query(
+ f"SELECT * FROM `{project_id}.{dataset_id}.{table_id}`"
+ ).result()
+ row_items = [
+ # Convert to sorted tuple of items, omitting NULL values, to make
+ # searching for expected rows easier.
+ tuple(
+ sorted(
+ item for item in row.items() if item[1] is not None and item[1] != []
+ )
+ )
+ for row in rows
+ ]
+
+ assert (
+ ("bool_col", True),
+ ("bytes_col", b"Hello, World!"),
+ ("float64_col", float("+inf")),
+ ("int64_col", 123),
+ ("row_num", 1),
+ ("string_col", "Howdy!"),
+ ) in row_items
+ assert (("bool_col", False), ("row_num", 2)) in row_items
+ assert (("bytes_col", b"See you later!"), ("row_num", 3)) in row_items
+ assert (("float64_col", 1000000.125), ("row_num", 4)) in row_items
+ assert (("int64_col", 67000), ("row_num", 5)) in row_items
+ assert (("row_num", 6), ("string_col", "Auf Wiedersehen!")) in row_items
+ assert (("date_col", datetime.date(2021, 8, 12)), ("row_num", 7)) in row_items
+ assert (
+ ("datetime_col", datetime.datetime(2021, 8, 12, 9, 46, 23, 987456)),
+ ("row_num", 8),
+ ) in row_items
+ assert (
+ ("geography_col", "POINT(-122.347222 47.651111)"),
+ ("row_num", 9),
+ ) in row_items
+ assert (
+ ("bignumeric_col", decimal.Decimal("-1.234567891011121314151617181920e+16")),
+ ("numeric_col", decimal.Decimal("1.23456789101112e+6")),
+ ("row_num", 10),
+ ) in row_items
+ assert (
+ ("row_num", 11),
+ ("time_col", datetime.time(11, 7, 48, 123456)),
+ ) in row_items
+ assert (
+ ("row_num", 12),
+ (
+ "timestamp_col",
+ datetime.datetime(
+ 2021, 8, 12, 16, 11, 22, 987654, tzinfo=datetime.timezone.utc
+ ),
+ ),
+ ) in row_items
+ assert (("int64_list", [1, 2, 3]), ("row_num", 13)) in row_items
+ assert (
+ ("row_num", 14),
+ ("struct_col", {"sub_int_col": 7}),
+ ) in row_items
+ assert (
+ ("row_num", 15),
+ (
+ "struct_list",
+ [{"sub_int_col": -1}, {"sub_int_col": -2}, {"sub_int_col": -3}],
+ ),
+ ) in row_items
diff --git a/bigquery_storage/snippets/conftest.py b/bigquery_storage/snippets/conftest.py
new file mode 100644
index 00000000000..5f1e958183c
--- /dev/null
+++ b/bigquery_storage/snippets/conftest.py
@@ -0,0 +1,65 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import Generator
+
+from google.cloud import bigquery
+import pytest
+import test_utils.prefixer
+
+prefixer = test_utils.prefixer.Prefixer("python-bigquery-storage", "samples/snippets")
+
+
+@pytest.fixture(scope="session", autouse=True)
+def cleanup_datasets(bigquery_client: bigquery.Client) -> None:
+ for dataset in bigquery_client.list_datasets():
+ if prefixer.should_cleanup(dataset.dataset_id):
+ bigquery_client.delete_dataset(
+ dataset, delete_contents=True, not_found_ok=True
+ )
+
+
+@pytest.fixture(scope="session")
+def bigquery_client() -> bigquery.Client:
+ return bigquery.Client()
+
+
+@pytest.fixture(scope="session")
+def project_id(bigquery_client: bigquery.Client) -> str:
+ return bigquery_client.project
+
+
+@pytest.fixture(scope="session")
+def dataset_id(
+ bigquery_client: bigquery.Client, project_id: str
+) -> Generator[str, None, None]:
+ dataset_id = prefixer.create_prefix()
+ full_dataset_id = f"{project_id}.{dataset_id}"
+ dataset = bigquery.Dataset(full_dataset_id)
+ bigquery_client.create_dataset(dataset)
+ yield dataset_id
+ bigquery_client.delete_dataset(dataset, delete_contents=True, not_found_ok=True)
+
+
+@pytest.fixture(scope="session")
+def dataset_id_non_us(
+ bigquery_client: bigquery.Client, project_id: str
+) -> Generator[str, None, None]:
+ dataset_id = prefixer.create_prefix()
+ full_dataset_id = f"{project_id}.{dataset_id}"
+ dataset = bigquery.Dataset(full_dataset_id)
+ dataset.location = "asia-northeast1"
+ bigquery_client.create_dataset(dataset)
+ yield dataset_id
+ bigquery_client.delete_dataset(dataset, delete_contents=True, not_found_ok=True)
diff --git a/bigquery_storage/snippets/customer_record.proto b/bigquery_storage/snippets/customer_record.proto
new file mode 100644
index 00000000000..6c79336b6fa
--- /dev/null
+++ b/bigquery_storage/snippets/customer_record.proto
@@ -0,0 +1,30 @@
+// Copyright 2021 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// [START bigquerystorage_append_rows_pending_customer_record]
+// The BigQuery Storage API expects protocol buffer data to be encoded in the
+// proto2 wire format. This allows it to disambiguate missing optional fields
+// from default values without the need for wrapper types.
+syntax = "proto2";
+
+// Define a message type representing the rows in your table. The message
+// cannot contain fields which are not present in the table.
+message CustomerRecord {
+
+ optional string customer_name = 1;
+
+ // Use the required keyword for client-side validation of required fields.
+ required int64 row_num = 2;
+}
+// [END bigquerystorage_append_rows_pending_customer_record]
diff --git a/bigquery_storage/snippets/customer_record_pb2.py b/bigquery_storage/snippets/customer_record_pb2.py
new file mode 100644
index 00000000000..457ead954d8
--- /dev/null
+++ b/bigquery_storage/snippets/customer_record_pb2.py
@@ -0,0 +1,51 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: customer_record.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n\x15\x63ustomer_record.proto"8\n\x0e\x43ustomerRecord\x12\x15\n\rcustomer_name\x18\x01 \x01(\t\x12\x0f\n\x07row_num\x18\x02 \x02(\x03'
+)
+
+
+_CUSTOMERRECORD = DESCRIPTOR.message_types_by_name["CustomerRecord"]
+CustomerRecord = _reflection.GeneratedProtocolMessageType(
+ "CustomerRecord",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _CUSTOMERRECORD,
+ "__module__": "customer_record_pb2"
+ # @@protoc_insertion_point(class_scope:CustomerRecord)
+ },
+)
+_sym_db.RegisterMessage(CustomerRecord)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+ DESCRIPTOR._options = None
+ _CUSTOMERRECORD._serialized_start = 25
+ _CUSTOMERRECORD._serialized_end = 81
+# @@protoc_insertion_point(module_scope)
diff --git a/bigquery_storage/snippets/customer_record_schema.json b/bigquery_storage/snippets/customer_record_schema.json
new file mode 100644
index 00000000000..e04b31a7ead
--- /dev/null
+++ b/bigquery_storage/snippets/customer_record_schema.json
@@ -0,0 +1,11 @@
+[
+ {
+ "name": "customer_name",
+ "type": "STRING"
+ },
+ {
+ "name": "row_num",
+ "type": "INTEGER",
+ "mode": "REQUIRED"
+ }
+]
diff --git a/appengine/flexible_python37_and_earlier/hello_world/noxfile_config.py b/bigquery_storage/snippets/noxfile_config.py
similarity index 83%
rename from appengine/flexible_python37_and_earlier/hello_world/noxfile_config.py
rename to bigquery_storage/snippets/noxfile_config.py
index 1665dd736f8..f1fa9e5618b 100644
--- a/appengine/flexible_python37_and_earlier/hello_world/noxfile_config.py
+++ b/bigquery_storage/snippets/noxfile_config.py
@@ -1,4 +1,4 @@
-# Copyright 2023 Google LLC
+# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -22,17 +22,20 @@
TEST_CONFIG_OVERRIDE = {
# You can opt out from the test for specific Python versions.
- # Skipping for Python 3.9 due to pyarrow compilation failure.
- "ignored_versions": ["2.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"],
+ "ignored_versions": ["2.7"],
# Old samples are opted out of enforcing Python type hints
# All new samples should feature them
- "enforce_type_hints": False,
+ "enforce_type_hints": True,
# An envvar key for determining the project id to use. Change it
# to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
# build specific Cloud project. You can also use your own string
# to use your own Cloud project.
"gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
# 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
+ # If you need to use a specific version of pip,
+ # change pip_version_override to the string representation
+ # of the version number, for example, "20.2.4"
+ "pip_version_override": None,
# A dictionary you want to inject into your test. Don't put any
# secrets here. These values will override predefined values.
"envs": {},
diff --git a/bigquery_storage/snippets/requirements-test.txt b/bigquery_storage/snippets/requirements-test.txt
new file mode 100644
index 00000000000..230ca56dc3a
--- /dev/null
+++ b/bigquery_storage/snippets/requirements-test.txt
@@ -0,0 +1,4 @@
+google-cloud-testutils==1.6.4
+pytest===7.4.3; python_version == '3.7'
+pytest===8.3.5; python_version == '3.8'
+pytest==8.4.1; python_version >= '3.9'
diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt
new file mode 100644
index 00000000000..8a456493526
--- /dev/null
+++ b/bigquery_storage/snippets/requirements.txt
@@ -0,0 +1,6 @@
+google-cloud-bigquery-storage==2.32.0
+google-cloud-bigquery===3.30.0; python_version <= '3.8'
+google-cloud-bigquery==3.35.1; python_version >= '3.9'
+pytest===7.4.3; python_version == '3.7'
+pytest===8.3.5; python_version == '3.8'
+pytest==8.4.1; python_version >= '3.9'
diff --git a/bigquery_storage/snippets/sample_data.proto b/bigquery_storage/snippets/sample_data.proto
new file mode 100644
index 00000000000..6f0bb93a65c
--- /dev/null
+++ b/bigquery_storage/snippets/sample_data.proto
@@ -0,0 +1,70 @@
+// Copyright 2021 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// [START bigquerystorage_append_rows_raw_proto2_definition]
+// The BigQuery Storage API expects protocol buffer data to be encoded in the
+// proto2 wire format. This allows it to disambiguate missing optional fields
+// from default values without the need for wrapper types.
+syntax = "proto2";
+
+// Define a message type representing the rows in your table. The message
+// cannot contain fields which are not present in the table.
+message SampleData {
+ // Use a nested message to encode STRUCT column values.
+ //
+ // References to external messages are not allowed. Any message definitions
+ // must be nested within the root message representing row data.
+ message SampleStruct {
+ optional int64 sub_int_col = 1;
+ }
+
+ message RangeValue {
+ optional int32 start = 1;
+ optional int32 end = 2;
+ }
+
+ // The following types map directly between protocol buffers and their
+ // corresponding BigQuery data types.
+ optional bool bool_col = 1;
+ optional bytes bytes_col = 2;
+ optional double float64_col = 3;
+ optional int64 int64_col = 4;
+ optional string string_col = 5;
+
+ // The following data types require some encoding to use. See the
+ // documentation for the expected data formats:
+ // https://cloud.google.com/bigquery/docs/write-api#data_type_conversion
+ optional int32 date_col = 6;
+ optional string datetime_col = 7;
+ optional string geography_col = 8;
+ optional string numeric_col = 9;
+ optional string bignumeric_col = 10;
+ optional string time_col = 11;
+ optional int64 timestamp_col = 12;
+
+ // Use a repeated field to represent a BigQuery ARRAY value.
+ repeated int64 int64_list = 13;
+
+ // Use a nested message to encode STRUCT and ARRAY values.
+ optional SampleStruct struct_col = 14;
+ repeated SampleStruct struct_list = 15;
+
+ // Range types, see:
+ // https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#range_type
+ optional RangeValue range_date = 16;
+
+ // Use the required keyword for client-side validation of required fields.
+ required int64 row_num = 17;
+}
+// [END bigquerystorage_append_rows_raw_proto2_definition]
diff --git a/bigquery_storage/snippets/sample_data_pb2.py b/bigquery_storage/snippets/sample_data_pb2.py
new file mode 100644
index 00000000000..54ef06d99fa
--- /dev/null
+++ b/bigquery_storage/snippets/sample_data_pb2.py
@@ -0,0 +1,43 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: sample_data.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf.internal import builder as _builder
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n\x11sample_data.proto"\xff\x03\n\nSampleData\x12\x10\n\x08\x62ool_col\x18\x01 \x01(\x08\x12\x11\n\tbytes_col\x18\x02 \x01(\x0c\x12\x13\n\x0b\x66loat64_col\x18\x03 \x01(\x01\x12\x11\n\tint64_col\x18\x04 \x01(\x03\x12\x12\n\nstring_col\x18\x05 \x01(\t\x12\x10\n\x08\x64\x61te_col\x18\x06 \x01(\x05\x12\x14\n\x0c\x64\x61tetime_col\x18\x07 \x01(\t\x12\x15\n\rgeography_col\x18\x08 \x01(\t\x12\x13\n\x0bnumeric_col\x18\t \x01(\t\x12\x16\n\x0e\x62ignumeric_col\x18\n \x01(\t\x12\x10\n\x08time_col\x18\x0b \x01(\t\x12\x15\n\rtimestamp_col\x18\x0c \x01(\x03\x12\x12\n\nint64_list\x18\r \x03(\x03\x12,\n\nstruct_col\x18\x0e \x01(\x0b\x32\x18.SampleData.SampleStruct\x12-\n\x0bstruct_list\x18\x0f \x03(\x0b\x32\x18.SampleData.SampleStruct\x12*\n\nrange_date\x18\x10 \x01(\x0b\x32\x16.SampleData.RangeValue\x12\x0f\n\x07row_num\x18\x11 \x02(\x03\x1a#\n\x0cSampleStruct\x12\x13\n\x0bsub_int_col\x18\x01 \x01(\x03\x1a(\n\nRangeValue\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05'
+)
+
+_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
+_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "sample_data_pb2", globals())
+if _descriptor._USE_C_DESCRIPTORS == False:
+ DESCRIPTOR._options = None
+ _SAMPLEDATA._serialized_start = 22
+ _SAMPLEDATA._serialized_end = 533
+ _SAMPLEDATA_SAMPLESTRUCT._serialized_start = 456
+ _SAMPLEDATA_SAMPLESTRUCT._serialized_end = 491
+ _SAMPLEDATA_RANGEVALUE._serialized_start = 493
+ _SAMPLEDATA_RANGEVALUE._serialized_end = 533
+# @@protoc_insertion_point(module_scope)
diff --git a/bigquery_storage/snippets/sample_data_schema.json b/bigquery_storage/snippets/sample_data_schema.json
new file mode 100644
index 00000000000..40efb7122b5
--- /dev/null
+++ b/bigquery_storage/snippets/sample_data_schema.json
@@ -0,0 +1,81 @@
+
+[
+ {
+ "name": "bool_col",
+ "type": "BOOLEAN"
+ },
+ {
+ "name": "bytes_col",
+ "type": "BYTES"
+ },
+ {
+ "name": "date_col",
+ "type": "DATE"
+ },
+ {
+ "name": "datetime_col",
+ "type": "DATETIME"
+ },
+ {
+ "name": "float64_col",
+ "type": "FLOAT"
+ },
+ {
+ "name": "geography_col",
+ "type": "GEOGRAPHY"
+ },
+ {
+ "name": "int64_col",
+ "type": "INTEGER"
+ },
+ {
+ "name": "numeric_col",
+ "type": "NUMERIC"
+ },
+ {
+ "name": "bignumeric_col",
+ "type": "BIGNUMERIC"
+ },
+ {
+ "name": "row_num",
+ "type": "INTEGER",
+ "mode": "REQUIRED"
+ },
+ {
+ "name": "string_col",
+ "type": "STRING"
+ },
+ {
+ "name": "time_col",
+ "type": "TIME"
+ },
+ {
+ "name": "timestamp_col",
+ "type": "TIMESTAMP"
+ },
+ {
+ "name": "int64_list",
+ "type": "INTEGER",
+ "mode": "REPEATED"
+ },
+ {
+ "name": "struct_col",
+ "type": "RECORD",
+ "fields": [
+ {"name": "sub_int_col", "type": "INTEGER"}
+ ]
+ },
+ {
+ "name": "struct_list",
+ "type": "RECORD",
+ "fields": [
+ {"name": "sub_int_col", "type": "INTEGER"}
+ ],
+ "mode": "REPEATED"
+ },
+ {
+ "name": "range_date",
+ "type": "RANGE",
+ "rangeElementType": {"type": "DATE"}
+ }
+ ]
diff --git a/appengine/flexible_python37_and_earlier/hello_world_django/helloworld/__init__.py b/bigquery_storage/to_dataframe/__init__.py
similarity index 100%
rename from appengine/flexible_python37_and_earlier/hello_world_django/helloworld/__init__.py
rename to bigquery_storage/to_dataframe/__init__.py
diff --git a/bigquery_storage/to_dataframe/jupyter_test.py b/bigquery_storage/to_dataframe/jupyter_test.py
new file mode 100644
index 00000000000..c2046b8c80e
--- /dev/null
+++ b/bigquery_storage/to_dataframe/jupyter_test.py
@@ -0,0 +1,67 @@
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+import IPython
+from IPython.terminal import interactiveshell
+from IPython.testing import tools
+import pytest
+
+# Ignore semicolon lint warning because semicolons are used in notebooks
+# flake8: noqa E703
+
+
+@pytest.fixture(scope="session")
+def ipython():
+ config = tools.default_config()
+ config.TerminalInteractiveShell.simple_prompt = True
+ shell = interactiveshell.TerminalInteractiveShell.instance(config=config)
+ return shell
+
+
+@pytest.fixture()
+def ipython_interactive(request, ipython):
+ """Activate IPython's builtin hooks
+
+ for the duration of the test scope.
+ """
+ with ipython.builtin_trap:
+ yield ipython
+
+
+def _strip_region_tags(sample_text):
+ """Remove blank lines and region tags from sample text"""
+ magic_lines = [
+ line for line in sample_text.split("\n") if len(line) > 0 and "# [" not in line
+ ]
+ return "\n".join(magic_lines)
+
+
+def test_jupyter_tutorial(ipython):
+ ip = IPython.get_ipython()
+ ip.extension_manager.load_extension("google.cloud.bigquery")
+
+ # This code sample intentionally queries a lot of data to demonstrate the
+ # speed-up of using the BigQuery Storage API to download the results.
+ sample = """
+ # [START bigquerystorage_jupyter_tutorial_query_default]
+ %%bigquery tax_forms
+ SELECT * FROM `bigquery-public-data.irs_990.irs_990_2012`
+ # [END bigquerystorage_jupyter_tutorial_query_default]
+ """
+ result = ip.run_cell(_strip_region_tags(sample))
+ result.raise_error() # Throws an exception if the cell failed.
+
+ assert "tax_forms" in ip.user_ns # verify that variable exists
diff --git a/appengine/flexible_python37_and_earlier/hello_world_django/noxfile_config.py b/bigquery_storage/to_dataframe/noxfile_config.py
similarity index 83%
rename from appengine/flexible_python37_and_earlier/hello_world_django/noxfile_config.py
rename to bigquery_storage/to_dataframe/noxfile_config.py
index 1665dd736f8..f1fa9e5618b 100644
--- a/appengine/flexible_python37_and_earlier/hello_world_django/noxfile_config.py
+++ b/bigquery_storage/to_dataframe/noxfile_config.py
@@ -1,4 +1,4 @@
-# Copyright 2023 Google LLC
+# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -22,17 +22,20 @@
TEST_CONFIG_OVERRIDE = {
# You can opt out from the test for specific Python versions.
- # Skipping for Python 3.9 due to pyarrow compilation failure.
- "ignored_versions": ["2.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"],
+ "ignored_versions": ["2.7"],
# Old samples are opted out of enforcing Python type hints
# All new samples should feature them
- "enforce_type_hints": False,
+ "enforce_type_hints": True,
# An envvar key for determining the project id to use. Change it
# to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
# build specific Cloud project. You can also use your own string
# to use your own Cloud project.
"gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
# 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
+ # If you need to use a specific version of pip,
+ # change pip_version_override to the string representation
+ # of the version number, for example, "20.2.4"
+ "pip_version_override": None,
# A dictionary you want to inject into your test. Don't put any
# secrets here. These values will override predefined values.
"envs": {},
diff --git a/bigquery_storage/to_dataframe/read_query_results.py b/bigquery_storage/to_dataframe/read_query_results.py
new file mode 100644
index 00000000000..e947e8afe93
--- /dev/null
+++ b/bigquery_storage/to_dataframe/read_query_results.py
@@ -0,0 +1,49 @@
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pandas
+
+
+def read_query_results() -> pandas.DataFrame:
+ # [START bigquerystorage_pandas_tutorial_read_query_results]
+ from google.cloud import bigquery
+
+ bqclient = bigquery.Client()
+
+ # Download query results.
+ query_string = """
+ SELECT
+ CONCAT(
+ 'https://stackoverflow.com/questions/',
+ CAST(id as STRING)) as url,
+ view_count
+ FROM `bigquery-public-data.stackoverflow.posts_questions`
+ WHERE tags like '%google-bigquery%'
+ ORDER BY view_count DESC
+ """
+
+ dataframe = (
+ bqclient.query(query_string)
+ .result()
+ .to_dataframe(
+ # Optionally, explicitly request to use the BigQuery Storage API. As of
+ # google-cloud-bigquery version 1.26.0 and above, the BigQuery Storage
+ # API is used by default.
+ create_bqstorage_client=True,
+ )
+ )
+ print(dataframe.head())
+ # [END bigquerystorage_pandas_tutorial_read_query_results]
+
+ return dataframe
diff --git a/appengine/flexible_python37_and_earlier/hello_world_django/helloworld/views.py b/bigquery_storage/to_dataframe/read_query_results_test.py
similarity index 68%
rename from appengine/flexible_python37_and_earlier/hello_world_django/helloworld/views.py
rename to bigquery_storage/to_dataframe/read_query_results_test.py
index 71c0106bda1..b5cb5517401 100644
--- a/appengine/flexible_python37_and_earlier/hello_world_django/helloworld/views.py
+++ b/bigquery_storage/to_dataframe/read_query_results_test.py
@@ -1,5 +1,4 @@
-#!/usr/bin/env python
-# Copyright 2015 Google LLC.
+# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,9 +12,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import pytest
-from django.http import HttpResponse
+from . import read_query_results
-def index(request):
- return HttpResponse("Hello, World. This is Django running on Google App Engine")
+def test_read_query_results(capsys: pytest.CaptureFixture) -> None:
+ read_query_results.read_query_results()
+ out, _ = capsys.readouterr()
+ assert "stackoverflow" in out
diff --git a/bigquery_storage/to_dataframe/read_table_bigquery.py b/bigquery_storage/to_dataframe/read_table_bigquery.py
new file mode 100644
index 00000000000..7a69a64d77d
--- /dev/null
+++ b/bigquery_storage/to_dataframe/read_table_bigquery.py
@@ -0,0 +1,45 @@
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import pandas
+
+
+def read_table() -> pandas.DataFrame:
+ # [START bigquerystorage_pandas_tutorial_read_table]
+ from google.cloud import bigquery
+
+ bqclient = bigquery.Client()
+
+ # Download a table.
+ table = bigquery.TableReference.from_string(
+ "bigquery-public-data.utility_us.country_code_iso"
+ )
+ rows = bqclient.list_rows(
+ table,
+ selected_fields=[
+ bigquery.SchemaField("country_name", "STRING"),
+ bigquery.SchemaField("fips_code", "STRING"),
+ ],
+ )
+ dataframe = rows.to_dataframe(
+ # Optionally, explicitly request to use the BigQuery Storage API. As of
+ # google-cloud-bigquery version 1.26.0 and above, the BigQuery Storage
+ # API is used by default.
+ create_bqstorage_client=True,
+ )
+ print(dataframe.head())
+ # [END bigquerystorage_pandas_tutorial_read_table]
+
+ return dataframe
diff --git a/appengine/flexible_python37_and_earlier/django_cloudsql/mysite/wsgi.py b/bigquery_storage/to_dataframe/read_table_bigquery_test.py
similarity index 69%
rename from appengine/flexible_python37_and_earlier/django_cloudsql/mysite/wsgi.py
rename to bigquery_storage/to_dataframe/read_table_bigquery_test.py
index 968cf994b60..5b45c4d5163 100644
--- a/appengine/flexible_python37_and_earlier/django_cloudsql/mysite/wsgi.py
+++ b/bigquery_storage/to_dataframe/read_table_bigquery_test.py
@@ -1,4 +1,4 @@
-# Copyright 2015 Google LLC.
+# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -12,11 +12,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import pytest
-import os
+from . import read_table_bigquery
-from django.core.wsgi import get_wsgi_application
-os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mysite.settings")
-
-application = get_wsgi_application()
+def test_read_table(capsys: pytest.CaptureFixture) -> None:
+ read_table_bigquery.read_table()
+ out, _ = capsys.readouterr()
+ assert "country_name" in out
diff --git a/bigquery_storage/to_dataframe/read_table_bqstorage.py b/bigquery_storage/to_dataframe/read_table_bqstorage.py
new file mode 100644
index 00000000000..ce1cd3872ae
--- /dev/null
+++ b/bigquery_storage/to_dataframe/read_table_bqstorage.py
@@ -0,0 +1,74 @@
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pandas as pd
+
+
+def read_table(your_project_id: str) -> pd.DataFrame:
+ original_your_project_id = your_project_id
+ # [START bigquerystorage_pandas_tutorial_read_session]
+ your_project_id = "project-for-read-session"
+ # [END bigquerystorage_pandas_tutorial_read_session]
+ your_project_id = original_your_project_id
+
+ # [START bigquerystorage_pandas_tutorial_read_session]
+ import pandas
+
+ from google.cloud import bigquery_storage
+ from google.cloud.bigquery_storage import types
+
+ bqstorageclient = bigquery_storage.BigQueryReadClient()
+
+ project_id = "bigquery-public-data"
+ dataset_id = "new_york_trees"
+ table_id = "tree_species"
+ table = f"projects/{project_id}/datasets/{dataset_id}/tables/{table_id}"
+
+ # Select columns to read with read options. If no read options are
+ # specified, the whole table is read.
+ read_options = types.ReadSession.TableReadOptions(
+ selected_fields=["species_common_name", "fall_color"]
+ )
+
+ parent = "projects/{}".format(your_project_id)
+
+ requested_session = types.ReadSession(
+ table=table,
+ # Avro is also supported, but the Arrow data format is optimized to
+ # work well with column-oriented data structures such as pandas
+ # DataFrames.
+ data_format=types.DataFormat.ARROW,
+ read_options=read_options,
+ )
+ read_session = bqstorageclient.create_read_session(
+ parent=parent,
+ read_session=requested_session,
+ max_stream_count=1,
+ )
+
+ # This example reads from only a single stream. Read from multiple streams
+ # to fetch data faster. Note that the session may not contain any streams
+ # if there are no rows to read.
+ stream = read_session.streams[0]
+ reader = bqstorageclient.read_rows(stream.name)
+
+ # Parse all Arrow blocks and create a dataframe.
+ frames = []
+ for message in reader.rows().pages:
+ frames.append(message.to_dataframe())
+ dataframe = pandas.concat(frames)
+ print(dataframe.head())
+ # [END bigquerystorage_pandas_tutorial_read_session]
+
+ return dataframe
diff --git a/appengine/flexible_python37_and_earlier/django_cloudsql/manage.py b/bigquery_storage/to_dataframe/read_table_bqstorage_test.py
old mode 100755
new mode 100644
similarity index 65%
rename from appengine/flexible_python37_and_earlier/django_cloudsql/manage.py
rename to bigquery_storage/to_dataframe/read_table_bqstorage_test.py
index 89fb5ae5607..7b46a6b180a
--- a/appengine/flexible_python37_and_earlier/django_cloudsql/manage.py
+++ b/bigquery_storage/to_dataframe/read_table_bqstorage_test.py
@@ -1,5 +1,4 @@
-#!/usr/bin/env python
-# Copyright 2015 Google LLC.
+# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,12 +12,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import os
-import sys
+import pytest
-if __name__ == "__main__":
- os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mysite.settings")
+from . import read_table_bqstorage
- from django.core.management import execute_from_command_line
- execute_from_command_line(sys.argv)
+def test_read_table(capsys: pytest.CaptureFixture, project_id: str) -> None:
+ read_table_bqstorage.read_table(your_project_id=project_id)
+ out, _ = capsys.readouterr()
+ assert "species_common_name" in out
diff --git a/bigquery_storage/to_dataframe/requirements-test.txt b/bigquery_storage/to_dataframe/requirements-test.txt
new file mode 100644
index 00000000000..7561ed55ce2
--- /dev/null
+++ b/bigquery_storage/to_dataframe/requirements-test.txt
@@ -0,0 +1,3 @@
+pytest===7.4.3; python_version == '3.7'
+pytest===8.3.5; python_version == '3.8'
+pytest==8.4.1; python_version >= '3.9'
diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt
new file mode 100644
index 00000000000..e3b75fdaf5f
--- /dev/null
+++ b/bigquery_storage/to_dataframe/requirements.txt
@@ -0,0 +1,19 @@
+google-auth==2.40.3
+google-cloud-bigquery-storage==2.32.0
+google-cloud-bigquery===3.30.0; python_version <= '3.8'
+google-cloud-bigquery==3.35.1; python_version >= '3.9'
+pyarrow===12.0.1; python_version == '3.7'
+pyarrow===17.0.0; python_version == '3.8'
+pyarrow==21.0.0; python_version >= '3.9'
+ipython===7.31.1; python_version == '3.7'
+ipython===8.10.0; python_version == '3.8'
+ipython===8.18.1; python_version == '3.9'
+ipython===8.33.0; python_version == '3.10'
+ipython==9.4.0; python_version >= '3.11'
+ipywidgets==8.1.7
+pandas===1.3.5; python_version == '3.7'
+pandas===2.0.3; python_version == '3.8'
+pandas==2.3.1; python_version >= '3.9'
+tqdm==4.67.1
+db-dtypes===1.4.2; python_version <= '3.8'
+db-dtypes==1.4.3; python_version >= '3.9'
diff --git a/cloud-media-livestream/keypublisher/requirements.txt b/cloud-media-livestream/keypublisher/requirements.txt
index 7d169d94c66..f56357f0f87 100644
--- a/cloud-media-livestream/keypublisher/requirements.txt
+++ b/cloud-media-livestream/keypublisher/requirements.txt
@@ -1,5 +1,5 @@
Flask==2.2.5
-functions-framework==3.8.2
+functions-framework==3.9.2
google-cloud-secret-manager==2.21.1
lxml==5.2.1
pycryptodome==3.21.0
diff --git a/cloud-sql/mysql/sqlalchemy/requirements.txt b/cloud-sql/mysql/sqlalchemy/requirements.txt
index 5335c1fb512..a5e6f819085 100644
--- a/cloud-sql/mysql/sqlalchemy/requirements.txt
+++ b/cloud-sql/mysql/sqlalchemy/requirements.txt
@@ -2,6 +2,6 @@ Flask==2.2.2
SQLAlchemy==2.0.40
PyMySQL==1.1.1
gunicorn==23.0.0
-cloud-sql-python-connector==1.18.1
-functions-framework==3.8.2
+cloud-sql-python-connector==1.20.0
+functions-framework==3.9.2
Werkzeug==2.3.8
diff --git a/cloud-sql/postgres/client-side-encryption/requirements.txt b/cloud-sql/postgres/client-side-encryption/requirements.txt
index 1749cee78fb..1ec3e93d497 100644
--- a/cloud-sql/postgres/client-side-encryption/requirements.txt
+++ b/cloud-sql/postgres/client-side-encryption/requirements.txt
@@ -1,3 +1,3 @@
SQLAlchemy==2.0.40
-pg8000==1.31.2
+pg8000==1.31.5
tink==1.9.0
diff --git a/cloud-sql/postgres/sqlalchemy/requirements.txt b/cloud-sql/postgres/sqlalchemy/requirements.txt
index ecf3b67d267..ba738cc1669 100644
--- a/cloud-sql/postgres/sqlalchemy/requirements.txt
+++ b/cloud-sql/postgres/sqlalchemy/requirements.txt
@@ -1,7 +1,7 @@
Flask==2.2.2
-pg8000==1.31.2
+pg8000==1.31.5
SQLAlchemy==2.0.40
-cloud-sql-python-connector==1.18.1
+cloud-sql-python-connector==1.20.0
gunicorn==23.0.0
-functions-framework==3.8.2
+functions-framework==3.9.2
Werkzeug==2.3.8
diff --git a/cloud-sql/sql-server/sqlalchemy/requirements.txt b/cloud-sql/sql-server/sqlalchemy/requirements.txt
index 99a0f2c595f..a2aae8784d1 100644
--- a/cloud-sql/sql-server/sqlalchemy/requirements.txt
+++ b/cloud-sql/sql-server/sqlalchemy/requirements.txt
@@ -3,7 +3,7 @@ gunicorn==23.0.0
python-tds==1.16.0
pyopenssl==25.0.0
SQLAlchemy==2.0.40
-cloud-sql-python-connector==1.18.1
+cloud-sql-python-connector==1.20.0
sqlalchemy-pytds==1.0.2
-functions-framework==3.8.2
+functions-framework==3.9.2
Werkzeug==2.3.8
diff --git a/cloud_scheduler/snippets/requirements.txt b/cloud_scheduler/snippets/requirements.txt
index e95a2ef8c50..af65635c6c9 100644
--- a/cloud_scheduler/snippets/requirements.txt
+++ b/cloud_scheduler/snippets/requirements.txt
@@ -1,4 +1,4 @@
Flask==3.0.3
gunicorn==23.0.0
google-cloud-scheduler==2.14.1
-Werkzeug==3.0.6
+Werkzeug==3.1.5
diff --git a/cloud_tasks/http_queues/delete_http_queue_test.py b/cloud_tasks/http_queues/delete_http_queue_test.py
index 3b802179ef2..33fd90129ee 100644
--- a/cloud_tasks/http_queues/delete_http_queue_test.py
+++ b/cloud_tasks/http_queues/delete_http_queue_test.py
@@ -59,7 +59,7 @@ def q():
try:
client.delete_queue(name=queue.name)
except Exception as e:
- if type(e) == NotFound: # It's still gone, anyway, so it's fine
+ if type(e) is NotFound: # It's still gone, anyway, so it's fine
pass
else:
print(f"Tried my best to clean up, but could not: {e}")
diff --git a/composer/2022_airflow_summit/data_analytics_process_expansion_test.py b/composer/2022_airflow_summit/data_analytics_process_expansion_test.py
index 466a546391d..ffd4b46b7c5 100644
--- a/composer/2022_airflow_summit/data_analytics_process_expansion_test.py
+++ b/composer/2022_airflow_summit/data_analytics_process_expansion_test.py
@@ -214,7 +214,7 @@ def bq_dataset(test_bucket):
print(f"Ignoring NotFound on cleanup, details: {e}")
-@backoff.on_exception(backoff.expo, AssertionError, max_tries=3)
+@backoff.on_exception(backoff.expo, AssertionError, max_tries=5)
def test_process(test_dataproc_batch):
print(test_dataproc_batch)
diff --git a/composer/airflow_1_samples/gke_operator.py b/composer/airflow_1_samples/gke_operator.py
index b3638655b20..082d3333f9a 100644
--- a/composer/airflow_1_samples/gke_operator.py
+++ b/composer/airflow_1_samples/gke_operator.py
@@ -92,7 +92,7 @@
# project-id as the gcr.io images and the service account that Composer
# uses has permission to access the Google Container Registry
# (the default service account has permission)
- image="gcr.io/gcp-runtimes/ubuntu_18_0_4",
+ image="marketplace.gcr.io/google/ubuntu2204",
)
# [END composer_gkeoperator_minconfig_airflow_1]
diff --git a/composer/airflow_1_samples/kubernetes_pod_operator.py b/composer/airflow_1_samples/kubernetes_pod_operator.py
index 11abdb6b1ec..2799f467ec9 100644
--- a/composer/airflow_1_samples/kubernetes_pod_operator.py
+++ b/composer/airflow_1_samples/kubernetes_pod_operator.py
@@ -93,7 +93,7 @@
# project-id as the gcr.io images and the service account that Composer
# uses has permission to access the Google Container Registry
# (the default service account has permission)
- image="gcr.io/gcp-runtimes/ubuntu_18_0_4",
+ image="marketplace.gcr.io/google/ubuntu2204",
)
# [END composer_kubernetespodoperator_minconfig_airflow_1]
# [START composer_kubernetespodoperator_templateconfig_airflow_1]
diff --git a/composer/airflow_1_samples/noxfile_config.py b/composer/airflow_1_samples/noxfile_config.py
index 7185f415100..21ea6aca21a 100644
--- a/composer/airflow_1_samples/noxfile_config.py
+++ b/composer/airflow_1_samples/noxfile_config.py
@@ -32,7 +32,7 @@
# You can opt out from the test for specific Python versions.
# Skipping for Python 3.9 due to numpy compilation failure.
# Skipping 3.6 and 3.7, they are more out of date
- "ignored_versions": ["2.7", "3.6", "3.7", "3.9", "3.10", "3.11", "3.12", "3.13"],
+ "ignored_versions": ["2.7", "3.6", "3.7", "3.9", "3.10", "3.11", "3.12", "3.13", "3.14"],
# Old samples are opted out of enforcing Python type hints
# All new samples should feature them
"enforce_type_hints": False,
diff --git a/composer/tools/composer_dags.py b/composer/tools/composer_dags.py
index f6967782fa4..a5306fa52d5 100644
--- a/composer/tools/composer_dags.py
+++ b/composer/tools/composer_dags.py
@@ -33,7 +33,7 @@ class DAG:
"""Provides necessary utils for Composer DAGs."""
COMPOSER_AF_VERSION_RE = re.compile(
- "composer-([0-9]+).([0-9]+).([0-9]+).*" "-airflow-([0-9]+).([0-9]+).([0-9]+).*"
+ "composer-(\d+)(?:\.(\d+)\.(\d+))?.*?-airflow-(\d+)\.(\d+)\.(\d+)"
)
@staticmethod
diff --git a/composer/tools/composer_migrate.py b/composer/tools/composer_migrate.py
index ecbbb97dae8..c4ef2fbb5f9 100644
--- a/composer/tools/composer_migrate.py
+++ b/composer/tools/composer_migrate.py
@@ -108,7 +108,7 @@ def unpause_dag(
dag_id: str,
environment_name: str,
) -> Any:
- """Unpauses all DAGs in a Composer environment."""
+ """Unpauses a DAG in a Composer environment."""
command = (
f"CLOUDSDK_API_ENDPOINT_OVERRIDES_COMPOSER={self.sdk_endpoint} gcloud"
" composer environments run"
@@ -363,7 +363,7 @@ def main(
pprint.pformat(target_environment),
)
logger.warning(
- "Composer 3 environnment workloads config may be different from the"
+ "Composer 3 environment workloads config may be different from the"
" source environment."
)
logger.warning(
@@ -413,7 +413,7 @@ def main(
client.load_snapshot(target_environment_name, snapshot_path)
logger.info("Snapshot loaded.")
- # 6. Unpase DAGs in the new environment
+ # 6. Unpause DAGs in the new environment
logger.info("STEP 6: Unpausing DAGs in the new environment...")
all_dags_present = False
# Wait until all DAGs from source environment are visible.
diff --git a/composer/workflows/airflow_db_cleanup.py b/composer/workflows/airflow_db_cleanup.py
index d277d5ec378..45119168111 100644
--- a/composer/workflows/airflow_db_cleanup.py
+++ b/composer/workflows/airflow_db_cleanup.py
@@ -66,7 +66,7 @@
from airflow.version import version as airflow_version
import dateutil.parser
-from sqlalchemy import desc, sql, text
+from sqlalchemy import desc, text
from sqlalchemy.exc import ProgrammingError
@@ -360,12 +360,11 @@ def build_query(
logging.info("INITIAL QUERY : " + str(query))
- if dag_id:
+ if hasattr(airflow_db_model, 'dag_id'):
+ logging.info("Filtering by dag_id: " + str(dag_id))
query = query.filter(airflow_db_model.dag_id == dag_id)
if airflow_db_model == DagRun:
- # For DagRuns we want to leave last *scheduled* DagRun
- # regardless of its age
newest_dagrun = (
session
.query(airflow_db_model)
@@ -375,17 +374,16 @@ def build_query(
.first()
)
logging.info("Newest dagrun: " + str(newest_dagrun))
+
+ # For DagRuns we want to leave last *scheduled* DagRun
+ # regardless of its age, otherwise Airflow will retrigger it
if newest_dagrun is not None:
query = (
query
- .filter(age_check_column <= max_date)
.filter(airflow_db_model.id != newest_dagrun.id)
)
- else:
- query = query.filter(sql.false())
- else:
- query = query.filter(age_check_column <= max_date)
+ query = query.filter(age_check_column <= max_date)
logging.info("FINAL QUERY: " + str(query))
return query
diff --git a/composer/workflows/gke_operator.py b/composer/workflows/gke_operator.py
index 2f1eaa62c8a..31536ba55e7 100644
--- a/composer/workflows/gke_operator.py
+++ b/composer/workflows/gke_operator.py
@@ -29,7 +29,7 @@
with models.DAG(
"example_gcp_gke",
- schedule_interval=None, # Override to match your needs
+ schedule=None, # Override to match your needs
start_date=days_ago(1),
tags=["example"],
) as dag:
@@ -86,7 +86,7 @@
# project-id as the gcr.io images and the service account that Composer
# uses has permission to access the Google Container Registry
# (the default service account has permission)
- image="gcr.io/gcp-runtimes/ubuntu_18_0_4",
+ image="marketplace.gcr.io/google/ubuntu2204",
)
# [END composer_gkeoperator_minconfig]
diff --git a/composer/workflows/kubernetes_pod_operator.py b/composer/workflows/kubernetes_pod_operator.py
index f679ead81d7..26dcb9d5173 100644
--- a/composer/workflows/kubernetes_pod_operator.py
+++ b/composer/workflows/kubernetes_pod_operator.py
@@ -96,7 +96,7 @@
# project-id as the gcr.io images and the service account that Composer
# uses has permission to access the Google Container Registry
# (the default service account has permission)
- image="gcr.io/gcp-runtimes/ubuntu_18_0_4",
+ image="marketplace.gcr.io/google/ubuntu2204",
)
# [END composer_kubernetespodoperator_minconfig]
# [START composer_kubernetespodoperator_templateconfig]
diff --git a/composer/workflows/kubernetes_pod_operator_c2.py b/composer/workflows/kubernetes_pod_operator_c2.py
index 65e43289695..0a227058d77 100644
--- a/composer/workflows/kubernetes_pod_operator_c2.py
+++ b/composer/workflows/kubernetes_pod_operator_c2.py
@@ -17,10 +17,11 @@
import datetime
from airflow import models
-from airflow.kubernetes.secret import Secret
+
from airflow.providers.cncf.kubernetes.operators.pod import (
KubernetesPodOperator,
)
+from airflow.providers.cncf.kubernetes.secret import Secret
from kubernetes.client import models as k8s_models
# A Secret is an object that contains a small amount of sensitive data such as
@@ -60,7 +61,7 @@
# required to debug.
with models.DAG(
dag_id="composer_sample_kubernetes_pod",
- schedule_interval=datetime.timedelta(days=1),
+ schedule=datetime.timedelta(days=1),
start_date=YESTERDAY,
) as dag:
# Only name, image, and task_id are required to create a
@@ -88,7 +89,7 @@
# project-id as the gcr.io images and the service account that Composer
# uses has permission to access the Google Container Registry
# (the default service account has permission)
- image="gcr.io/gcp-runtimes/ubuntu_20_0_4",
+ image="marketplace.gcr.io/google/ubuntu2204",
# Specifies path to kubernetes config. The config_file is templated.
config_file="/home/airflow/composer_kube_config",
# Identifier of connection that should be used
@@ -130,7 +131,7 @@
task_id="ex-kube-secrets",
name="ex-kube-secrets",
namespace="composer-user-workloads",
- image="gcr.io/gcp-runtimes/ubuntu_20_0_4",
+ image="marketplace.gcr.io/google/ubuntu2204",
startup_timeout_seconds=300,
# The secrets to pass to Pod, the Pod will fail to create if the
# secrets you specify in a Secret object do not exist in Kubernetes.
diff --git a/composer/workflows/noxfile_config.py b/composer/workflows/noxfile_config.py
index 7eeb5bb5817..1dbb9beffd2 100644
--- a/composer/workflows/noxfile_config.py
+++ b/composer/workflows/noxfile_config.py
@@ -39,6 +39,7 @@
"3.10",
"3.12",
"3.13",
+ "3.14",
],
# Old samples are opted out of enforcing Python type hints
# All new samples should feature them
diff --git a/compute/managed-instances/demo/app.py b/compute/managed-instances/demo/app.py
index e7b49a81ed5..7195278eba2 100644
--- a/compute/managed-instances/demo/app.py
+++ b/compute/managed-instances/demo/app.py
@@ -50,7 +50,7 @@ def init():
@app.route("/")
def index():
"""Returns the demo UI."""
- global _cpu_burner, _is_healthy
+ global _cpu_burner, _is_healthy # noqa: F824
return render_template(
"index.html",
hostname=gethostname(),
@@ -68,7 +68,7 @@ def health():
Returns:
HTTP status 200 if 'healthy', HTTP status 500 if 'unhealthy'
"""
- global _is_healthy
+ global _is_healthy # noqa: F824
template = render_template("health.html", healthy=_is_healthy)
return make_response(template, 200 if _is_healthy else 500)
@@ -76,7 +76,7 @@ def health():
@app.route("/makeHealthy")
def make_healthy():
"""Sets the server to simulate a 'healthy' status."""
- global _cpu_burner, _is_healthy
+ global _cpu_burner, _is_healthy # noqa: F824
_is_healthy = True
template = render_template(
@@ -95,7 +95,7 @@ def make_healthy():
@app.route("/makeUnhealthy")
def make_unhealthy():
"""Sets the server to simulate an 'unhealthy' status."""
- global _cpu_burner, _is_healthy
+ global _cpu_burner, _is_healthy # noqa: F824
_is_healthy = False
template = render_template(
@@ -114,7 +114,7 @@ def make_unhealthy():
@app.route("/startLoad")
def start_load():
"""Sets the server to simulate high CPU load."""
- global _cpu_burner, _is_healthy
+ global _cpu_burner, _is_healthy # noqa: F824
_cpu_burner.start()
template = render_template(
@@ -133,7 +133,7 @@ def start_load():
@app.route("/stopLoad")
def stop_load():
"""Sets the server to stop simulating CPU load."""
- global _cpu_burner, _is_healthy
+ global _cpu_burner, _is_healthy # noqa: F824
_cpu_burner.stop()
template = render_template(
diff --git a/connectgateway/requirements.txt b/connectgateway/requirements.txt
index eea0fbe3ce7..531ee9e7eb4 100644
--- a/connectgateway/requirements.txt
+++ b/connectgateway/requirements.txt
@@ -1,4 +1,4 @@
-google-cloud-gke-connect-gateway==0.10.3
+google-cloud-gke-connect-gateway==0.10.4
google-auth==2.38.0
-kubernetes==33.1.0
+kubernetes==34.1.0
google-api-core==2.24.2
diff --git a/dataflow/flex-templates/pipeline_with_dependencies/requirements.txt b/dataflow/flex-templates/pipeline_with_dependencies/requirements.txt
index b971c1e9f7e..bef166bb943 100644
--- a/dataflow/flex-templates/pipeline_with_dependencies/requirements.txt
+++ b/dataflow/flex-templates/pipeline_with_dependencies/requirements.txt
@@ -218,7 +218,7 @@ proto-plus==1.23.0
# google-cloud-spanner
# google-cloud-videointelligence
# google-cloud-vision
-protobuf==4.25.3
+protobuf==4.25.8
# via
# apache-beam
# google-api-core
@@ -305,7 +305,7 @@ typing-extensions==4.10.0
# via apache-beam
tzlocal==5.2
# via js2py
-urllib3==2.2.2
+urllib3==2.6.0
# via requests
wrapt==1.16.0
# via deprecated
diff --git a/dataflow/gemma-flex-template/requirements.txt b/dataflow/gemma-flex-template/requirements.txt
index d19ddb3dacc..71966b2a122 100644
--- a/dataflow/gemma-flex-template/requirements.txt
+++ b/dataflow/gemma-flex-template/requirements.txt
@@ -1,7 +1,7 @@
# For reproducible builds, it is better to also include transitive dependencies:
# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/c93accadf3bd29e9c3166676abb2c95564579c5e/dataflow/flex-templates/pipeline_with_dependencies/requirements.txt#L22,
# but for simplicity of this example, we are only including the top-level dependencies.
-apache_beam[gcp]==2.65.0
+apache_beam[gcp]==2.66.0
immutabledict==4.2.0
# Also required, please download and install gemma_pytorch.
diff --git a/dataproc/snippets/noxfile_config.py b/dataproc/snippets/noxfile_config.py
index 084fb0d01db..99f474dc0b6 100644
--- a/dataproc/snippets/noxfile_config.py
+++ b/dataproc/snippets/noxfile_config.py
@@ -22,7 +22,7 @@
TEST_CONFIG_OVERRIDE = {
# You can opt out from the test for specific Python versions.
- "ignored_versions": ["2.7", "3.7", "3.9", "3.10", "3.11"],
+ "ignored_versions": ["2.7", "3.7", "3.8", "3.10", "3.11", "3.12", "3.13"],
# Old samples are opted out of enforcing Python type hints
# All new samples should feature them
# "enforce_type_hints": True,
diff --git a/dataproc/snippets/requirements.txt b/dataproc/snippets/requirements.txt
index be44f16d3e6..70297ad7006 100644
--- a/dataproc/snippets/requirements.txt
+++ b/dataproc/snippets/requirements.txt
@@ -1,8 +1,8 @@
backoff==2.2.1
-grpcio==1.70.0
+grpcio==1.74.0
google-auth==2.38.0
google-auth-httplib2==0.2.0
google-cloud==0.34.0
google-cloud-storage==2.9.0
-google-cloud-dataproc==5.4.3
+google-cloud-dataproc==5.20.0
diff --git a/dataproc/snippets/submit_pyspark_job_to_driver_node_group_cluster.py b/dataproc/snippets/submit_pyspark_job_to_driver_node_group_cluster.py
new file mode 100644
index 00000000000..45334c82ee0
--- /dev/null
+++ b/dataproc/snippets/submit_pyspark_job_to_driver_node_group_cluster.py
@@ -0,0 +1,107 @@
+#!/usr/bin/env python
+
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This sample walks a user through submitting a Spark job to a
+# Dataproc driver node group cluster using the Dataproc
+# client library.
+
+# Usage:
+# python submit_pyspark_job_to_driver_node_group_cluster.py \
+# --project_id --region \
+# --cluster_name
+
+# [START dataproc_submit_pyspark_job_to_driver_node_group_cluster]
+
+import re
+
+from google.cloud import dataproc_v1 as dataproc
+from google.cloud import storage
+
+
+def submit_job(project_id, region, cluster_name):
+ """Submits a PySpark job to a Dataproc cluster with a driver node group.
+
+ Args:
+ project_id (str): The ID of the Google Cloud project.
+ region (str): The region where the Dataproc cluster is located.
+ cluster_name (str): The name of the Dataproc cluster.
+ """
+ # Create the job client.
+ job_client = dataproc.JobControllerClient(
+ client_options={"api_endpoint": f"{region}-dataproc.googleapis.com:443"}
+ )
+
+ driver_scheduling_config = dataproc.DriverSchedulingConfig(
+ memory_mb=2048, # Example memory in MB
+ vcores=2, # Example number of vcores
+ )
+
+ # Create the job config. The main Python file URI points to the script in
+ # a Google Cloud Storage bucket.
+ job = {
+ "placement": {"cluster_name": cluster_name},
+ "pyspark_job": {
+ "main_python_file_uri": "gs://dataproc-examples/pyspark/hello-world/hello-world.py"
+ },
+ "driver_scheduling_config": driver_scheduling_config,
+ }
+
+ operation = job_client.submit_job_as_operation(
+ request={"project_id": project_id, "region": region, "job": job}
+ )
+ response = operation.result()
+
+ # Dataproc job output gets saved to the Google Cloud Storage bucket
+ # allocated to the job. Use a regex to obtain the bucket and blob info.
+ matches = re.match("gs://(.*?)/(.*)", response.driver_output_resource_uri)
+ if not matches:
+ raise ValueError(
+ f"Unexpected driver output URI: {response.driver_output_resource_uri}"
+ )
+
+ output = (
+ storage.Client()
+ .get_bucket(matches.group(1))
+ .blob(f"{matches.group(2)}.000000000")
+ .download_as_bytes()
+ .decode("utf-8")
+ )
+
+ print(f"Job finished successfully: {output}")
+
+
+# [END dataproc_submit_pyspark_job_to_driver_node_group_cluster]
+
+if __name__ == "__main__":
+ import argparse
+
+ parser = argparse.ArgumentParser(
+ description="Submits a Spark job to a Dataproc driver node group cluster."
+ )
+ parser.add_argument(
+ "--project_id", help="The Google Cloud project ID.", required=True
+ )
+ parser.add_argument(
+ "--region",
+ help="The Dataproc region where the cluster is located.",
+ required=True,
+ )
+ parser.add_argument(
+ "--cluster_name", help="The name of the Dataproc cluster.", required=True
+ )
+
+ args = parser.parse_args()
+ submit_job(args.project_id, args.region, args.cluster_name)
diff --git a/dataproc/snippets/submit_pyspark_job_to_driver_node_group_cluster_test.py b/dataproc/snippets/submit_pyspark_job_to_driver_node_group_cluster_test.py
new file mode 100644
index 00000000000..38e3ebb24e3
--- /dev/null
+++ b/dataproc/snippets/submit_pyspark_job_to_driver_node_group_cluster_test.py
@@ -0,0 +1,88 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import subprocess
+import uuid
+
+import backoff
+from google.api_core.exceptions import (
+ Aborted,
+ InternalServerError,
+ NotFound,
+ ServiceUnavailable,
+)
+from google.cloud import dataproc_v1 as dataproc
+
+import submit_pyspark_job_to_driver_node_group_cluster
+
+PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"]
+REGION = "us-central1"
+CLUSTER_NAME = f"py-ps-test-{str(uuid.uuid4())}"
+
+cluster_client = dataproc.ClusterControllerClient(
+ client_options={"api_endpoint": f"{REGION}-dataproc.googleapis.com:443"}
+)
+
+
+@backoff.on_exception(backoff.expo, (Exception), max_tries=5)
+def teardown():
+ try:
+ operation = cluster_client.delete_cluster(
+ request={
+ "project_id": PROJECT_ID,
+ "region": REGION,
+ "cluster_name": CLUSTER_NAME,
+ }
+ )
+ # Wait for cluster to delete
+ operation.result()
+ except NotFound:
+ print("Cluster already deleted")
+
+
+@backoff.on_exception(
+ backoff.expo,
+ (
+ InternalServerError,
+ ServiceUnavailable,
+ Aborted,
+ ),
+ max_tries=5,
+)
+def test_workflows(capsys):
+ # Setup driver node group cluster. TODO: cleanup b/424371877
+ command = f"""gcloud dataproc clusters create {CLUSTER_NAME} \
+ --region {REGION} \
+ --project {PROJECT_ID} \
+ --driver-pool-size=1 \
+ --driver-pool-id=pytest"""
+
+ output = subprocess.run(
+ command,
+ capture_output=True,
+ shell=True,
+ check=True,
+ )
+ print(output)
+
+ # Wrapper function for client library function
+ submit_pyspark_job_to_driver_node_group_cluster.submit_job(
+ PROJECT_ID, REGION, CLUSTER_NAME
+ )
+
+ out, _ = capsys.readouterr()
+ assert "Job finished successfully" in out
+
+ # cluster deleted in teardown()
diff --git a/dataproc/snippets/submit_spark_job_to_driver_node_group_cluster.py b/dataproc/snippets/submit_spark_job_to_driver_node_group_cluster.py
new file mode 100644
index 00000000000..9715736d1b1
--- /dev/null
+++ b/dataproc/snippets/submit_spark_job_to_driver_node_group_cluster.py
@@ -0,0 +1,101 @@
+#!/usr/bin/env python
+
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This sample walks a user through submitting a Spark job to a
+# Dataproc driver node group cluster using the Dataproc
+# client library.
+
+# Usage:
+# python submit_spark_job_to_driver_node_group_cluster.py \
+# --project_id --region \
+# --cluster_name
+
+# [START dataproc_submit_spark_job_to_driver_node_group_cluster]
+
+import re
+
+from google.cloud import dataproc_v1 as dataproc
+from google.cloud import storage
+
+
+def submit_job(project_id: str, region: str, cluster_name: str) -> None:
+ """Submits a Spark job to the specified Dataproc cluster with a driver node group and prints the output.
+
+ Args:
+ project_id: The Google Cloud project ID.
+ region: The Dataproc region where the cluster is located.
+ cluster_name: The name of the Dataproc cluster.
+ """
+ # Create the job client.
+ with dataproc.JobControllerClient(
+ client_options={"api_endpoint": f"{region}-dataproc.googleapis.com:443"}
+ ) as job_client:
+
+ driver_scheduling_config = dataproc.DriverSchedulingConfig(
+ memory_mb=2048, # Example memory in MB
+ vcores=2, # Example number of vcores
+ )
+
+ # Create the job config. 'main_jar_file_uri' can also be a
+ # Google Cloud Storage URL.
+ job = {
+ "placement": {"cluster_name": cluster_name},
+ "spark_job": {
+ "main_class": "org.apache.spark.examples.SparkPi",
+ "jar_file_uris": ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
+ "args": ["1000"],
+ },
+ "driver_scheduling_config": driver_scheduling_config
+ }
+
+ operation = job_client.submit_job_as_operation(
+ request={"project_id": project_id, "region": region, "job": job}
+ )
+
+ response = operation.result()
+
+ # Dataproc job output gets saved to the Cloud Storage bucket
+ # allocated to the job. Use a regex to obtain the bucket and blob info.
+ matches = re.match("gs://(.*?)/(.*)", response.driver_output_resource_uri)
+ if not matches:
+ print(f"Error: Could not parse driver output URI: {response.driver_output_resource_uri}")
+ raise ValueError
+
+ output = (
+ storage.Client()
+ .get_bucket(matches.group(1))
+ .blob(f"{matches.group(2)}.000000000")
+ .download_as_bytes()
+ .decode("utf-8")
+ )
+
+ print(f"Job finished successfully: {output}")
+
+# [END dataproc_submit_spark_job_to_driver_node_group_cluster]
+
+
+if __name__ == "__main__":
+ import argparse
+
+ parser = argparse.ArgumentParser(
+ description="Submits a Spark job to a Dataproc driver node group cluster."
+ )
+ parser.add_argument("--project_id", help="The Google Cloud project ID.", required=True)
+ parser.add_argument("--region", help="The Dataproc region where the cluster is located.", required=True)
+ parser.add_argument("--cluster_name", help="The name of the Dataproc cluster.", required=True)
+
+ args = parser.parse_args()
+ submit_job(args.project_id, args.region, args.cluster_name)
diff --git a/dataproc/snippets/submit_spark_job_to_driver_node_group_cluster_test.py b/dataproc/snippets/submit_spark_job_to_driver_node_group_cluster_test.py
new file mode 100644
index 00000000000..ac642ed2e5a
--- /dev/null
+++ b/dataproc/snippets/submit_spark_job_to_driver_node_group_cluster_test.py
@@ -0,0 +1,88 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import subprocess
+import uuid
+
+import backoff
+from google.api_core.exceptions import (
+ Aborted,
+ InternalServerError,
+ NotFound,
+ ServiceUnavailable,
+)
+from google.cloud import dataproc_v1 as dataproc
+
+import submit_spark_job_to_driver_node_group_cluster
+
+PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"]
+REGION = "us-central1"
+CLUSTER_NAME = f"py-ss-test-{str(uuid.uuid4())}"
+
+cluster_client = dataproc.ClusterControllerClient(
+ client_options={"api_endpoint": f"{REGION}-dataproc.googleapis.com:443"}
+)
+
+
+@backoff.on_exception(backoff.expo, (Exception), max_tries=5)
+def teardown():
+ try:
+ operation = cluster_client.delete_cluster(
+ request={
+ "project_id": PROJECT_ID,
+ "region": REGION,
+ "cluster_name": CLUSTER_NAME,
+ }
+ )
+ # Wait for cluster to delete
+ operation.result()
+ except NotFound:
+ print("Cluster already deleted")
+
+
+@backoff.on_exception(
+ backoff.expo,
+ (
+ InternalServerError,
+ ServiceUnavailable,
+ Aborted,
+ ),
+ max_tries=5,
+)
+def test_workflows(capsys):
+ # Setup driver node group cluster. TODO: cleanup b/424371877
+ command = f"""gcloud dataproc clusters create {CLUSTER_NAME} \
+ --region {REGION} \
+ --project {PROJECT_ID} \
+ --driver-pool-size=1 \
+ --driver-pool-id=pytest"""
+
+ output = subprocess.run(
+ command,
+ capture_output=True,
+ shell=True,
+ check=True,
+ )
+ print(output)
+
+ # Wrapper function for client library function
+ submit_spark_job_to_driver_node_group_cluster.submit_job(
+ PROJECT_ID, REGION, CLUSTER_NAME
+ )
+
+ out, _ = capsys.readouterr()
+ assert "Job finished successfully" in out
+
+ # cluster deleted in teardown()
diff --git a/datastore/cloud-ndb/requirements.txt b/datastore/cloud-ndb/requirements.txt
index 7444220cb6a..35949d51f53 100644
--- a/datastore/cloud-ndb/requirements.txt
+++ b/datastore/cloud-ndb/requirements.txt
@@ -1,3 +1,3 @@
-google-cloud-ndb==2.3.2
+google-cloud-ndb==2.3.4
Flask==3.0.3
Werkzeug==3.0.6
diff --git a/datastore/samples/snippets/requirements-test.txt b/datastore/samples/snippets/requirements-test.txt
new file mode 100644
index 00000000000..2a21e952015
--- /dev/null
+++ b/datastore/samples/snippets/requirements-test.txt
@@ -0,0 +1,7 @@
+backoff===1.11.1; python_version < "3.7"
+backoff==2.2.1; python_version >= "3.7"
+pytest===7.4.3; python_version == '3.7'
+pytest===8.3.5; python_version == '3.8'
+pytest===8.4.2; python_version == '3.9'
+pytest==9.0.2; python_version >= '3.10'
+flaky==3.8.1
diff --git a/datastore/samples/snippets/requirements.txt b/datastore/samples/snippets/requirements.txt
new file mode 100644
index 00000000000..7852f23b24e
--- /dev/null
+++ b/datastore/samples/snippets/requirements.txt
@@ -0,0 +1 @@
+google-cloud-datastore==2.23.0
\ No newline at end of file
diff --git a/datastore/samples/snippets/schedule-export/README.md b/datastore/samples/snippets/schedule-export/README.md
new file mode 100644
index 00000000000..a8501cddc34
--- /dev/null
+++ b/datastore/samples/snippets/schedule-export/README.md
@@ -0,0 +1,5 @@
+# Scheduling Datastore exports with Cloud Functions and Cloud Scheduler
+
+This sample application demonstrates how to schedule exports of your Datastore entities. To deploy this sample, see:
+
+[Scheduling exports](https://cloud.google.com/datastore/docs/schedule-export)
diff --git a/datastore/samples/snippets/schedule-export/main.py b/datastore/samples/snippets/schedule-export/main.py
new file mode 100644
index 00000000000..f91b1466913
--- /dev/null
+++ b/datastore/samples/snippets/schedule-export/main.py
@@ -0,0 +1,57 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import base64
+import json
+import os
+
+from google.cloud import datastore_admin_v1
+
+project_id = os.environ.get("GCP_PROJECT")
+client = datastore_admin_v1.DatastoreAdminClient()
+
+
+def datastore_export(event, context):
+ """Triggers a Datastore export from a Cloud Scheduler job.
+
+ Args:
+ event (dict): event[data] must contain a json object encoded in
+ base-64. Cloud Scheduler encodes payloads in base-64 by default.
+ Object must include a 'bucket' value and can include 'kinds'
+ and 'namespaceIds' values.
+ context (google.cloud.functions.Context): The Cloud Functions event
+ metadata.
+ """
+ if "data" in event:
+ # Triggered via Cloud Scheduler, decode the inner data field of the json payload.
+ json_data = json.loads(base64.b64decode(event["data"]).decode("utf-8"))
+ else:
+ # Otherwise, for instance if triggered via the Cloud Console on a Cloud Function, the event is the data.
+ json_data = event
+
+ bucket = json_data["bucket"]
+ entity_filter = datastore_admin_v1.EntityFilter()
+
+ if "kinds" in json_data:
+ entity_filter.kinds = json_data["kinds"]
+
+ if "namespaceIds" in json_data:
+ entity_filter.namespace_ids = json_data["namespaceIds"]
+
+ export_request = datastore_admin_v1.ExportEntitiesRequest(
+ project_id=project_id, output_url_prefix=bucket, entity_filter=entity_filter
+ )
+ operation = client.export_entities(request=export_request)
+ response = operation.result()
+ print(response)
diff --git a/datastore/samples/snippets/schedule-export/requirements-test.txt b/datastore/samples/snippets/schedule-export/requirements-test.txt
new file mode 100644
index 00000000000..cb982446b31
--- /dev/null
+++ b/datastore/samples/snippets/schedule-export/requirements-test.txt
@@ -0,0 +1,2 @@
+pytest===8.4.2; python_version == '3.9'
+pytest==9.0.2; python_version >= '3.10'
diff --git a/datastore/samples/snippets/schedule-export/requirements.txt b/datastore/samples/snippets/schedule-export/requirements.txt
new file mode 100644
index 00000000000..fa16c1e95ab
--- /dev/null
+++ b/datastore/samples/snippets/schedule-export/requirements.txt
@@ -0,0 +1 @@
+google-cloud-datastore==2.23.0
diff --git a/datastore/samples/snippets/schedule-export/schedule_export_test.py b/datastore/samples/snippets/schedule-export/schedule_export_test.py
new file mode 100644
index 00000000000..48d9147c923
--- /dev/null
+++ b/datastore/samples/snippets/schedule-export/schedule_export_test.py
@@ -0,0 +1,73 @@
+# Copyright 2019 Google LLC All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import base64
+from unittest.mock import Mock
+
+import main
+
+mock_context = Mock()
+mock_context.event_id = "617187464135194"
+mock_context.timestamp = "2020-04-15T22:09:03.761Z"
+
+
+def test_datastore_export(capsys):
+ # Test an export without an entity filter
+ bucket = "gs://my-bucket"
+ json_string = '{{ "bucket": "{bucket}" }}'.format(bucket=bucket)
+
+ # Encode data like Cloud Scheduler
+ data = bytes(json_string, "utf-8")
+ data_encoded = base64.b64encode(data)
+ event = {"data": data_encoded}
+
+ # Mock the Datastore service
+ mockDatastore = Mock()
+ main.client = mockDatastore
+
+ # Call tested function
+ main.datastore_export(event, mock_context)
+ out, err = capsys.readouterr()
+ export_args = mockDatastore.export_entities.call_args[1]
+ # Assert request includes test values
+ assert export_args["request"].output_url_prefix == bucket
+
+
+def test_datastore_export_entity_filter(capsys):
+ # Test an export with an entity filter
+ bucket = "gs://my-bucket"
+ kinds = "Users,Tasks"
+ namespaceIds = "Customer831,Customer157"
+ json_string = '{{ "bucket": "{bucket}", "kinds": "{kinds}", "namespaceIds": "{namespaceIds}" }}'.format(
+ bucket=bucket, kinds=kinds, namespaceIds=namespaceIds
+ )
+
+ # Encode data like Cloud Scheduler
+ data = bytes(json_string, "utf-8")
+ data_encoded = base64.b64encode(data)
+ event = {"data": data_encoded}
+
+ # Mock the Datastore service
+ mockDatastore = Mock()
+ main.client = mockDatastore
+
+ # Call tested function
+ main.datastore_export(event, mock_context)
+ out, err = capsys.readouterr()
+ export_args = mockDatastore.export_entities.call_args[1]
+ # Assert request includes test values
+
+ assert export_args["request"].output_url_prefix == bucket
+ assert export_args["request"].entity_filter.kinds == kinds
+ assert export_args["request"].entity_filter.namespace_ids == namespaceIds
diff --git a/datastore/samples/snippets/snippets.py b/datastore/samples/snippets/snippets.py
new file mode 100644
index 00000000000..1b86ba8b0cd
--- /dev/null
+++ b/datastore/samples/snippets/snippets.py
@@ -0,0 +1,513 @@
+# Copyright 2022 Google, Inc.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+from datetime import datetime, timedelta, timezone
+from pprint import pprint
+import time
+
+from google.cloud import datastore # noqa: I100
+
+
+def _preamble():
+ # [START datastore_size_coloration_query]
+ from google.cloud import datastore
+
+ # For help authenticating your client, visit
+ # https://cloud.google.com/docs/authentication/getting-started
+ client = datastore.Client()
+
+ # [END datastore_size_coloration_query]
+ assert client is not None
+
+
+def in_query(client):
+ # [START datastore_in_query]
+ query = client.query(kind="Task")
+ query.add_filter("tag", "IN", ["learn", "study"])
+ # [END datastore_in_query]
+
+ return list(query.fetch())
+
+
+def not_equals_query(client):
+ # [START datastore_not_equals_query]
+ query = client.query(kind="Task")
+ query.add_filter("category", "!=", "work")
+ # [END datastore_not_equals_query]
+
+ return list(query.fetch())
+
+
+def not_in_query(client):
+ # [START datastore_not_in_query]
+ query = client.query(kind="Task")
+ query.add_filter("category", "NOT_IN", ["work", "chores", "school"])
+ # [END datastore_not_in_query]
+
+ return list(query.fetch())
+
+
+def query_with_readtime(client):
+ # [START datastore_stale_read]
+ # Create a read time of 15 seconds in the past
+ read_time = datetime.now(timezone.utc) - timedelta(seconds=15)
+
+ # Fetch an entity with read_time
+ task_key = client.key("Task", "sampletask")
+ entity = client.get(task_key, read_time=read_time)
+
+ # Query Task entities with read_time
+ query = client.query(kind="Task")
+ tasks = query.fetch(read_time=read_time, limit=10)
+ # [END datastore_stale_read]
+
+ results = list(tasks)
+ results.append(entity)
+
+ return results
+
+
+def count_query_in_transaction(client):
+ # [START datastore_count_in_transaction]
+ task1 = datastore.Entity(client.key("Task", "task1"))
+ task2 = datastore.Entity(client.key("Task", "task2"))
+
+ task1["owner"] = "john"
+ task2["owner"] = "john"
+
+ tasks = [task1, task2]
+ client.put_multi(tasks)
+
+ with client.transaction() as transaction:
+
+ tasks_of_john = client.query(kind="Task")
+ tasks_of_john.add_filter("owner", "=", "john")
+ total_tasks_query = client.aggregation_query(tasks_of_john)
+
+ query_result = total_tasks_query.count(alias="tasks_count").fetch()
+ for task_result in query_result:
+ tasks_count = task_result[0]
+ if tasks_count.value < 2:
+ task3 = datastore.Entity(client.key("Task", "task3"))
+ task3["owner"] = "john"
+ transaction.put(task3)
+ tasks.append(task3)
+ else:
+ print(f"Found existing {tasks_count.value} tasks, rolling back")
+ client.entities_to_delete.extend(tasks)
+ raise ValueError("User 'John' cannot have more than 2 tasks")
+ # [END datastore_count_in_transaction]
+
+
+def count_query_on_kind(client):
+ # [START datastore_count_on_kind]
+ task1 = datastore.Entity(client.key("Task", "task1"))
+ task2 = datastore.Entity(client.key("Task", "task2"))
+
+ tasks = [task1, task2]
+ client.put_multi(tasks)
+ all_tasks_query = client.query(kind="Task")
+ all_tasks_count_query = client.aggregation_query(all_tasks_query).count()
+ query_result = all_tasks_count_query.fetch()
+ for aggregation_results in query_result:
+ for aggregation in aggregation_results:
+ print(f"Total tasks (accessible from default alias) is {aggregation.value}")
+ # [END datastore_count_on_kind]
+ return tasks
+
+
+def count_query_with_limit(client):
+ # [START datastore_count_with_limit]
+ task1 = datastore.Entity(client.key("Task", "task1"))
+ task2 = datastore.Entity(client.key("Task", "task2"))
+ task3 = datastore.Entity(client.key("Task", "task3"))
+
+ tasks = [task1, task2, task3]
+ client.put_multi(tasks)
+ all_tasks_query = client.query(kind="Task")
+ all_tasks_count_query = client.aggregation_query(all_tasks_query).count()
+ query_result = all_tasks_count_query.fetch(limit=2)
+ for aggregation_results in query_result:
+ for aggregation in aggregation_results:
+ print(f"We have at least {aggregation.value} tasks")
+ # [END datastore_count_with_limit]
+ return tasks
+
+
+def count_query_property_filter(client):
+ # [START datastore_count_with_property_filter]
+ task1 = datastore.Entity(client.key("Task", "task1"))
+ task2 = datastore.Entity(client.key("Task", "task2"))
+ task3 = datastore.Entity(client.key("Task", "task3"))
+
+ task1["done"] = True
+ task2["done"] = False
+ task3["done"] = True
+
+ tasks = [task1, task2, task3]
+ client.put_multi(tasks)
+ completed_tasks = client.query(kind="Task").add_filter("done", "=", True)
+ remaining_tasks = client.query(kind="Task").add_filter("done", "=", False)
+
+ completed_tasks_query = client.aggregation_query(query=completed_tasks).count(
+ alias="total_completed_count"
+ )
+ remaining_tasks_query = client.aggregation_query(query=remaining_tasks).count(
+ alias="total_remaining_count"
+ )
+
+ completed_query_result = completed_tasks_query.fetch()
+ for aggregation_results in completed_query_result:
+ for aggregation_result in aggregation_results:
+ if aggregation_result.alias == "total_completed_count":
+ print(f"Total completed tasks count is {aggregation_result.value}")
+
+ remaining_query_result = remaining_tasks_query.fetch()
+ for aggregation_results in remaining_query_result:
+ for aggregation_result in aggregation_results:
+ if aggregation_result.alias == "total_remaining_count":
+ print(f"Total remaining tasks count is {aggregation_result.value}")
+ # [END datastore_count_with_property_filter]
+ return tasks
+
+
+def count_query_with_stale_read(client):
+
+ tasks = [task for task in client.query(kind="Task").fetch()]
+ client.delete_multi(tasks) # ensure the database is empty before starting
+
+ # [START datastore_count_query_with_stale_read]
+ task1 = datastore.Entity(client.key("Task", "task1"))
+ task2 = datastore.Entity(client.key("Task", "task2"))
+
+ # Saving two tasks
+ task1["done"] = True
+ task2["done"] = False
+ client.put_multi([task1, task2])
+ time.sleep(10)
+
+ past_timestamp = datetime.now(
+ timezone.utc
+ ) # we have two tasks in database at this time.
+ time.sleep(10)
+
+ # Saving third task
+ task3 = datastore.Entity(client.key("Task", "task3"))
+ task3["done"] = False
+ client.put(task3)
+
+ all_tasks = client.query(kind="Task")
+ all_tasks_count = client.aggregation_query(
+ query=all_tasks,
+ ).count(alias="all_tasks_count")
+
+ # Executing aggregation query
+ query_result = all_tasks_count.fetch()
+ for aggregation_results in query_result:
+ for aggregation_result in aggregation_results:
+ print(f"Latest tasks count is {aggregation_result.value}")
+
+ # Executing aggregation query with past timestamp
+ tasks_in_past = client.aggregation_query(query=all_tasks).count(
+ alias="tasks_in_past"
+ )
+ tasks_in_the_past_query_result = tasks_in_past.fetch(read_time=past_timestamp)
+ for aggregation_results in tasks_in_the_past_query_result:
+ for aggregation_result in aggregation_results:
+ print(f"Stale tasks count is {aggregation_result.value}")
+ # [END datastore_count_query_with_stale_read]
+ return [task1, task2, task3]
+
+
+def sum_query_on_kind(client):
+ # [START datastore_sum_aggregation_query_on_kind]
+ # Set up sample entities
+ # Use incomplete key to auto-generate ID
+ task1 = datastore.Entity(client.key("Task"))
+ task2 = datastore.Entity(client.key("Task"))
+ task3 = datastore.Entity(client.key("Task"))
+
+ task1["hours"] = 5
+ task2["hours"] = 3
+ task3["hours"] = 1
+
+ tasks = [task1, task2, task3]
+ client.put_multi(tasks)
+
+ # Execute sum aggregation query
+ all_tasks_query = client.query(kind="Task")
+ all_tasks_sum_query = client.aggregation_query(all_tasks_query).sum("hours")
+ query_result = all_tasks_sum_query.fetch()
+ for aggregation_results in query_result:
+ for aggregation in aggregation_results:
+ print(f"Total sum of hours in tasks is {aggregation.value}")
+ # [END datastore_sum_aggregation_query_on_kind]
+ return tasks
+
+
+def sum_query_property_filter(client):
+ # [START datastore_sum_aggregation_query_with_filters]
+ # Set up sample entities
+ # Use incomplete key to auto-generate ID
+ task1 = datastore.Entity(client.key("Task"))
+ task2 = datastore.Entity(client.key("Task"))
+ task3 = datastore.Entity(client.key("Task"))
+
+ task1["hours"] = 5
+ task2["hours"] = 3
+ task3["hours"] = 1
+
+ task1["done"] = True
+ task2["done"] = True
+ task3["done"] = False
+
+ tasks = [task1, task2, task3]
+ client.put_multi(tasks)
+
+ # Execute sum aggregation query with filters
+ completed_tasks = client.query(kind="Task").add_filter("done", "=", True)
+ completed_tasks_query = client.aggregation_query(query=completed_tasks).sum(
+ property_ref="hours", alias="total_completed_sum_hours"
+ )
+
+ completed_query_result = completed_tasks_query.fetch()
+ for aggregation_results in completed_query_result:
+ for aggregation_result in aggregation_results:
+ if aggregation_result.alias == "total_completed_sum_hours":
+ print(
+ f"Total sum of hours in completed tasks is {aggregation_result.value}"
+ )
+ # [END datastore_sum_aggregation_query_with_filters]
+ return tasks
+
+
+def avg_query_on_kind(client):
+ # [START datastore_avg_aggregation_query_on_kind]
+ # Set up sample entities
+ # Use incomplete key to auto-generate ID
+ task1 = datastore.Entity(client.key("Task"))
+ task2 = datastore.Entity(client.key("Task"))
+ task3 = datastore.Entity(client.key("Task"))
+
+ task1["hours"] = 5
+ task2["hours"] = 3
+ task3["hours"] = 1
+
+ tasks = [task1, task2, task3]
+ client.put_multi(tasks)
+
+ # Execute average aggregation query
+ all_tasks_query = client.query(kind="Task")
+ all_tasks_avg_query = client.aggregation_query(all_tasks_query).avg("hours")
+ query_result = all_tasks_avg_query.fetch()
+ for aggregation_results in query_result:
+ for aggregation in aggregation_results:
+ print(f"Total average of hours in tasks is {aggregation.value}")
+ # [END datastore_avg_aggregation_query_on_kind]
+ return tasks
+
+
+def avg_query_property_filter(client):
+ # [START datastore_avg_aggregation_query_with_filters]
+ # Set up sample entities
+ # Use incomplete key to auto-generate ID
+ task1 = datastore.Entity(client.key("Task"))
+ task2 = datastore.Entity(client.key("Task"))
+ task3 = datastore.Entity(client.key("Task"))
+
+ task1["hours"] = 5
+ task2["hours"] = 3
+ task3["hours"] = 1
+
+ task1["done"] = True
+ task2["done"] = True
+ task3["done"] = False
+
+ tasks = [task1, task2, task3]
+ client.put_multi(tasks)
+
+ # Execute average aggregation query with filters
+ completed_tasks = client.query(kind="Task").add_filter("done", "=", True)
+ completed_tasks_query = client.aggregation_query(query=completed_tasks).avg(
+ property_ref="hours", alias="total_completed_avg_hours"
+ )
+
+ completed_query_result = completed_tasks_query.fetch()
+ for aggregation_results in completed_query_result:
+ for aggregation_result in aggregation_results:
+ if aggregation_result.alias == "total_completed_avg_hours":
+ print(
+ f"Total average of hours in completed tasks is {aggregation_result.value}"
+ )
+ # [END datastore_avg_aggregation_query_with_filters]
+ return tasks
+
+
+def multiple_aggregations_query(client):
+ # [START datastore_multiple_aggregation_in_structured_query]
+ # Set up sample entities
+ # Use incomplete key to auto-generate ID
+ task1 = datastore.Entity(client.key("Task"))
+ task2 = datastore.Entity(client.key("Task"))
+ task3 = datastore.Entity(client.key("Task"))
+
+ task1["hours"] = 5
+ task2["hours"] = 3
+ task3["hours"] = 1
+
+ tasks = [task1, task2, task3]
+ client.put_multi(tasks)
+
+ # Execute query with multiple aggregations
+ all_tasks_query = client.query(kind="Task")
+ aggregation_query = client.aggregation_query(all_tasks_query)
+ # Add aggregations
+ aggregation_query.add_aggregations(
+ [
+ datastore.aggregation.CountAggregation(alias="count_aggregation"),
+ datastore.aggregation.SumAggregation(
+ property_ref="hours", alias="sum_aggregation"
+ ),
+ datastore.aggregation.AvgAggregation(
+ property_ref="hours", alias="avg_aggregation"
+ ),
+ ]
+ )
+
+ query_result = aggregation_query.fetch()
+ for aggregation_results in query_result:
+ for aggregation in aggregation_results:
+ print(f"{aggregation.alias} value is {aggregation.value}")
+ # [END datastore_multiple_aggregation_in_structured_query]
+ return tasks
+
+
+def explain_analyze_entity(client):
+ # [START datastore_query_explain_analyze_entity]
+ # Build the query with explain_options
+ # analzye = true to get back the query stats, plan info, and query results
+ query = client.query(
+ kind="Task", explain_options=datastore.ExplainOptions(analyze=True)
+ )
+
+ # initiate the query
+ iterator = query.fetch()
+
+ # explain_metrics is only available after query is completed
+ for task_result in iterator:
+ print(task_result)
+
+ # get the plan summary
+ plan_summary = iterator.explain_metrics.plan_summary
+ print(f"Indexes used: {plan_summary.indexes_used}")
+
+ # get the execution stats
+ execution_stats = iterator.explain_metrics.execution_stats
+ print(f"Results returned: {execution_stats.results_returned}")
+ print(f"Execution duration: {execution_stats.execution_duration}")
+ print(f"Read operations: {execution_stats.read_operations}")
+ print(f"Debug stats: {execution_stats.debug_stats}")
+ # [END datastore_query_explain_analyze_entity]
+
+
+def explain_entity(client):
+ # [START datastore_query_explain_entity]
+ # Build the query with explain_options
+ # by default (analyze = false), only plan_summary property is available
+ query = client.query(kind="Task", explain_options=datastore.ExplainOptions())
+
+ # initiate the query
+ iterator = query.fetch()
+
+ # get the plan summary
+ plan_summary = iterator.explain_metrics.plan_summary
+ print(f"Indexes used: {plan_summary.indexes_used}")
+ # [END datastore_query_explain_entity]
+
+
+def explain_analyze_aggregation(client):
+ # [START datastore_query_explain_analyze_aggregation]
+ # Build the aggregation query with explain_options
+ # analzye = true to get back the query stats, plan info, and query results
+ all_tasks_query = client.query(kind="Task")
+ count_query = client.aggregation_query(
+ all_tasks_query, explain_options=datastore.ExplainOptions(analyze=True)
+ ).count()
+
+ # initiate the query
+ iterator = count_query.fetch()
+
+ # explain_metrics is only available after query is completed
+ for task_result in iterator:
+ print(task_result)
+
+ # get the plan summary
+ plan_summary = iterator.explain_metrics.plan_summary
+ print(f"Indexes used: {plan_summary.indexes_used}")
+
+ # get the execution stats
+ execution_stats = iterator.explain_metrics.execution_stats
+ print(f"Results returned: {execution_stats.results_returned}")
+ print(f"Execution duration: {execution_stats.execution_duration}")
+ print(f"Read operations: {execution_stats.read_operations}")
+ print(f"Debug stats: {execution_stats.debug_stats}")
+ # [END datastore_query_explain_analyze_aggregation]
+
+
+def explain_aggregation(client):
+ # [START datastore_query_explain_aggregation]
+ # Build the aggregation query with explain_options
+ # by default (analyze = false), only plan_summary property is available
+ all_tasks_query = client.query(kind="Task")
+ count_query = client.aggregation_query(
+ all_tasks_query, explain_options=datastore.ExplainOptions()
+ ).count()
+
+ # initiate the query
+ iterator = count_query.fetch()
+
+ # get the plan summary
+ plan_summary = iterator.explain_metrics.plan_summary
+ print(f"Indexes used: {plan_summary.indexes_used}")
+ # [END datastore_query_explain_aggregation]
+
+
+def main(project_id):
+ client = datastore.Client(project_id)
+
+ for name, function in globals().items():
+ if name in (
+ "main",
+ "_preamble",
+ "defaultdict",
+ "datetime",
+ "timezone",
+ "timedelta",
+ ) or not callable(function):
+ continue
+
+ print(name)
+ pprint(function(client))
+ print("\n-----------------\n")
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(
+ description="Demonstrates datastore API operations."
+ )
+ parser.add_argument("project_id", help="Your cloud project ID.")
+
+ args = parser.parse_args()
+
+ main(args.project_id)
diff --git a/datastore/samples/snippets/snippets_test.py b/datastore/samples/snippets/snippets_test.py
new file mode 100644
index 00000000000..ae3b2948b34
--- /dev/null
+++ b/datastore/samples/snippets/snippets_test.py
@@ -0,0 +1,249 @@
+# Copyright 2022 Google, Inc.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+import backoff
+import google.api_core.exceptions
+from google.cloud import datastore
+from google.cloud import datastore_admin_v1
+import pytest
+
+import snippets
+
+PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"]
+
+
+class CleanupClient(datastore.Client):
+ def __init__(self, *args, **kwargs):
+ super(CleanupClient, self).__init__(*args, **kwargs)
+ self.entities_to_delete = []
+ self.keys_to_delete = []
+
+ def cleanup(self):
+ with self.batch():
+ self.delete_multi(
+ list(set([x.key for x in self.entities_to_delete if x]))
+ + list(set(self.keys_to_delete))
+ )
+
+
+@pytest.fixture
+def client():
+ client = CleanupClient(PROJECT)
+ yield client
+ client.cleanup()
+
+
+@pytest.fixture(scope="session", autouse=True)
+def setup_indexes(request):
+ # Set up required indexes
+ admin_client = datastore_admin_v1.DatastoreAdminClient()
+
+ indexes = []
+ done_property_index = datastore_admin_v1.Index.IndexedProperty(
+ name="done", direction=datastore_admin_v1.Index.Direction.ASCENDING
+ )
+ hour_property_index = datastore_admin_v1.Index.IndexedProperty(
+ name="hours", direction=datastore_admin_v1.Index.Direction.ASCENDING
+ )
+ done_hour_index = datastore_admin_v1.Index(
+ kind="Task",
+ ancestor=datastore_admin_v1.Index.AncestorMode.NONE,
+ properties=[done_property_index, hour_property_index],
+ )
+ indexes.append(done_hour_index)
+
+ for index in indexes:
+ request = datastore_admin_v1.CreateIndexRequest(project_id=PROJECT, index=index)
+ # Create the required index
+ # Dependant tests will fail until the index is ready
+ try:
+ admin_client.create_index(request)
+ # Pass if the index already exists
+ except (google.api_core.exceptions.AlreadyExists):
+ pass
+
+
+@pytest.mark.flaky
+class TestDatastoreSnippets:
+ # These tests mostly just test the absence of exceptions.
+
+ @backoff.on_exception(backoff.expo, AssertionError, max_time=240)
+ def test_in_query(self, client):
+ tasks = snippets.in_query(client)
+ client.entities_to_delete.extend(tasks)
+ assert tasks is not None
+
+ @backoff.on_exception(backoff.expo, AssertionError, max_time=240)
+ def test_not_equals_query(self, client):
+ tasks = snippets.not_equals_query(client)
+ client.entities_to_delete.extend(tasks)
+ assert tasks is not None
+
+ @backoff.on_exception(backoff.expo, AssertionError, max_time=240)
+ def test_not_in_query(self, client):
+ tasks = snippets.not_in_query(client)
+ client.entities_to_delete.extend(tasks)
+ assert tasks is not None
+
+ @backoff.on_exception(backoff.expo, AssertionError, max_time=240)
+ def test_query_with_readtime(self, client):
+ tasks = snippets.query_with_readtime(client)
+ client.entities_to_delete.extend(tasks)
+ assert tasks is not None
+
+ @backoff.on_exception(backoff.expo, AssertionError, max_time=240)
+ def test_count_query_in_transaction(self, client):
+ with pytest.raises(ValueError) as excinfo:
+ snippets.count_query_in_transaction(client)
+ assert "User 'John' cannot have more than 2 tasks" in str(excinfo.value)
+
+ @backoff.on_exception(backoff.expo, AssertionError, max_time=240)
+ def test_count_query_on_kind(self, capsys, client):
+ tasks = snippets.count_query_on_kind(client)
+ captured = capsys.readouterr()
+ assert (
+ captured.out.strip() == "Total tasks (accessible from default alias) is 2"
+ )
+ assert captured.err == ""
+
+ client.entities_to_delete.extend(tasks)
+
+ @backoff.on_exception(backoff.expo, AssertionError, max_time=240)
+ def test_count_query_with_limit(self, capsys, client):
+ tasks = snippets.count_query_with_limit(client)
+ captured = capsys.readouterr()
+ assert captured.out.strip() == "We have at least 2 tasks"
+ assert captured.err == ""
+
+ client.entities_to_delete.extend(tasks)
+
+ @backoff.on_exception(backoff.expo, AssertionError, max_time=240)
+ def test_count_query_property_filter(self, capsys, client):
+ tasks = snippets.count_query_property_filter(client)
+ captured = capsys.readouterr()
+
+ assert "Total completed tasks count is 2" in captured.out
+ assert "Total remaining tasks count is 1" in captured.out
+ assert captured.err == ""
+
+ client.entities_to_delete.extend(tasks)
+
+ @backoff.on_exception(backoff.expo, AssertionError, max_time=240)
+ def test_count_query_with_stale_read(self, capsys, client):
+ tasks = snippets.count_query_with_stale_read(client)
+ captured = capsys.readouterr()
+
+ assert "Latest tasks count is 3" in captured.out
+ assert "Stale tasks count is 2" in captured.out
+ assert captured.err == ""
+
+ client.entities_to_delete.extend(tasks)
+
+ @backoff.on_exception(backoff.expo, AssertionError, max_time=240)
+ def test_sum_query_on_kind(self, capsys, client):
+ tasks = snippets.sum_query_on_kind(client)
+ captured = capsys.readouterr()
+ assert captured.out.strip() == "Total sum of hours in tasks is 9"
+ assert captured.err == ""
+
+ client.entities_to_delete.extend(tasks)
+
+ @backoff.on_exception(backoff.expo, AssertionError, max_time=240)
+ def test_sum_query_property_filter(self, capsys, client):
+ tasks = snippets.sum_query_property_filter(client)
+ captured = capsys.readouterr()
+ assert captured.out.strip() == "Total sum of hours in completed tasks is 8"
+ assert captured.err == ""
+
+ client.entities_to_delete.extend(tasks)
+
+ @backoff.on_exception(backoff.expo, AssertionError, max_time=240)
+ def test_avg_query_on_kind(self, capsys, client):
+ tasks = snippets.avg_query_on_kind(client)
+ captured = capsys.readouterr()
+ assert captured.out.strip() == "Total average of hours in tasks is 3.0"
+ assert captured.err == ""
+
+ client.entities_to_delete.extend(tasks)
+
+ @backoff.on_exception(backoff.expo, AssertionError, max_time=240)
+ def test_avg_query_property_filter(self, capsys, client):
+ tasks = snippets.avg_query_property_filter(client)
+ captured = capsys.readouterr()
+ assert (
+ captured.out.strip() == "Total average of hours in completed tasks is 4.0"
+ )
+ assert captured.err == ""
+
+ client.entities_to_delete.extend(tasks)
+
+ @backoff.on_exception(backoff.expo, AssertionError, max_time=240)
+ def test_multiple_aggregations_query(self, capsys, client):
+ tasks = snippets.multiple_aggregations_query(client)
+ captured = capsys.readouterr()
+ assert "avg_aggregation value is 3.0" in captured.out
+ assert "count_aggregation value is 3" in captured.out
+ assert "sum_aggregation value is 9" in captured.out
+ assert captured.err == ""
+
+ client.entities_to_delete.extend(tasks)
+
+ @backoff.on_exception(backoff.expo, AssertionError, max_time=240)
+ def test_explain_analyze_entity(self, capsys, client):
+ snippets.explain_analyze_entity(client)
+ captured = capsys.readouterr()
+ assert (
+ "Indexes used: [{'properties': '(__name__ ASC)', 'query_scope': 'Collection group'}]"
+ in captured.out
+ )
+ assert "Results returned: 0" in captured.out
+ assert "Execution duration: 0:00" in captured.out
+ assert "Read operations: 0" in captured.out
+ assert "Debug stats: {" in captured.out
+ assert captured.err == ""
+
+ @backoff.on_exception(backoff.expo, AssertionError, max_time=240)
+ def test_explain_entity(self, capsys, client):
+ snippets.explain_entity(client)
+ captured = capsys.readouterr()
+ assert (
+ "Indexes used: [{'properties': '(__name__ ASC)', 'query_scope': 'Collection group'}]"
+ in captured.out
+ )
+ assert captured.err == ""
+
+ @backoff.on_exception(backoff.expo, AssertionError, max_time=240)
+ def test_explain_analyze_aggregation(self, capsys, client):
+ snippets.explain_analyze_aggregation(client)
+ captured = capsys.readouterr()
+ assert (
+ "Indexes used: [{'properties': '(__name__ ASC)', 'query_scope': 'Collection group'}]"
+ in captured.out
+ )
+ assert "Results returned: 1" in captured.out
+ assert "Execution duration: 0:00" in captured.out
+ assert "Read operations: 1" in captured.out
+ assert "Debug stats: {" in captured.out
+ assert captured.err == ""
+
+ @backoff.on_exception(backoff.expo, AssertionError, max_time=240)
+ def test_explain_aggregation(self, capsys, client):
+ snippets.explain_aggregation(client)
+ captured = capsys.readouterr()
+ assert (
+ "Indexes used: [{'properties': '(__name__ ASC)', 'query_scope': 'Collection group'}]"
+ in captured.out
+ )
+ assert captured.err == ""
diff --git a/dialogflow-cx/noxfile_config.py b/dialogflow-cx/noxfile_config.py
index 462f6d428f7..cc8143940ee 100644
--- a/dialogflow-cx/noxfile_config.py
+++ b/dialogflow-cx/noxfile_config.py
@@ -22,7 +22,7 @@
TEST_CONFIG_OVERRIDE = {
# You can opt out from the test for specific Python versions.
- "ignored_versions": ["2.7", "3.7", "3.9", "3.10", "3.11", "3.12", "3.13"],
+ "ignored_versions": ["2.7", "3.7", "3.8", "3.9", "3.11", "3.12", "3.13"],
# An envvar key for determining the project id to use. Change it
# to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
# build specific Cloud project. You can also use your own string
diff --git a/dialogflow-cx/requirements.txt b/dialogflow-cx/requirements.txt
index 2f69fb9a0dc..5c29bf4a7bf 100644
--- a/dialogflow-cx/requirements.txt
+++ b/dialogflow-cx/requirements.txt
@@ -1,8 +1,8 @@
-google-cloud-dialogflow-cx==1.38.0
+google-cloud-dialogflow-cx==2.0.0
Flask==3.0.3
python-dateutil==2.9.0.post0
-functions-framework==3.8.2
-Werkzeug==3.0.6
+functions-framework==3.9.2
+Werkzeug==3.1.5
termcolor==3.0.0; python_version >= "3.9"
termcolor==2.4.0; python_version == "3.8"
pyaudio==0.2.14
\ No newline at end of file
diff --git a/dialogflow/requirements.txt b/dialogflow/requirements.txt
index 8b6f00fa3f7..4c7d355eb45 100644
--- a/dialogflow/requirements.txt
+++ b/dialogflow/requirements.txt
@@ -2,5 +2,5 @@ google-cloud-dialogflow==2.36.0
Flask==3.0.3
pyaudio==0.2.14
termcolor==3.0.0
-functions-framework==3.8.2
+functions-framework==3.9.2
Werkzeug==3.0.6
diff --git a/discoveryengine/answer_query_sample.py b/discoveryengine/answer_query_sample.py
index 5f546ab6f7d..fcb47bff6b8 100644
--- a/discoveryengine/answer_query_sample.py
+++ b/discoveryengine/answer_query_sample.py
@@ -69,7 +69,7 @@ def answer_query_sample(
ignore_non_answer_seeking_query=False, # Optional: Ignore non-answer seeking query
ignore_low_relevant_content=False, # Optional: Return fallback answer when content is not relevant
model_spec=discoveryengine.AnswerQueryRequest.AnswerGenerationSpec.ModelSpec(
- model_version="gemini-2.0-flash-001/answer_gen/v1", # Optional: Model to use for answer generation
+ model_version="gemini-2.5-flash/answer_gen/v1", # Optional: Model to use for answer generation
),
prompt_spec=discoveryengine.AnswerQueryRequest.AnswerGenerationSpec.PromptSpec(
preamble="Give a detailed answer.", # Optional: Natural language instructions for customizing the answer.
@@ -85,6 +85,7 @@ def answer_query_sample(
session=None, # Optional: include previous session ID to continue a conversation
query_understanding_spec=query_understanding_spec,
answer_generation_spec=answer_generation_spec,
+ user_pseudo_id="user-pseudo-id", # Optional: Add user pseudo-identifier for queries.
)
# Make the request
diff --git a/discoveryengine/documents_sample_test.py b/discoveryengine/documents_sample_test.py
index 1e1b6af84db..c94d56e59c2 100644
--- a/discoveryengine/documents_sample_test.py
+++ b/discoveryengine/documents_sample_test.py
@@ -26,6 +26,7 @@
data_store_id = "test-structured-data-engine"
+@pytest.mark.skip(reason="Table deleted.")
def test_import_documents_bigquery():
# Empty Dataset
bigquery_dataset = "genappbuilder_test"
diff --git a/discoveryengine/requirements.txt b/discoveryengine/requirements.txt
index 10d05966ed6..0adc48717bf 100644
--- a/discoveryengine/requirements.txt
+++ b/discoveryengine/requirements.txt
@@ -1 +1 @@
-google-cloud-discoveryengine==0.13.8
+google-cloud-discoveryengine==0.13.11
diff --git a/discoveryengine/session_sample.py b/discoveryengine/session_sample.py
index a4744dfe9d1..e92a0cf97aa 100644
--- a/discoveryengine/session_sample.py
+++ b/discoveryengine/session_sample.py
@@ -37,7 +37,7 @@ def create_session(
discoveryengine.Session: The newly created Session.
"""
- client = discoveryengine.ConversationalSearchServiceClient()
+ client = discoveryengine.SessionServiceClient()
session = client.create_session(
# The full resource name of the engine
@@ -71,7 +71,7 @@ def get_session(
session_id: The ID of the session.
"""
- client = discoveryengine.ConversationalSearchServiceClient()
+ client = discoveryengine.SessionServiceClient()
# The full resource name of the session
name = f"projects/{project_id}/locations/{location}/collections/default_collection/engines/{engine_id}/sessions/{session_id}"
@@ -104,7 +104,7 @@ def delete_session(
session_id: The ID of the session.
"""
- client = discoveryengine.ConversationalSearchServiceClient()
+ client = discoveryengine.SessionServiceClient()
# The full resource name of the session
name = f"projects/{project_id}/locations/{location}/collections/default_collection/engines/{engine_id}/sessions/{session_id}"
@@ -138,7 +138,7 @@ def update_session(
Returns:
discoveryengine.Session: The updated Session.
"""
- client = discoveryengine.ConversationalSearchServiceClient()
+ client = discoveryengine.SessionServiceClient()
# The full resource name of the session
name = f"projects/{project_id}/locations/{location}/collections/default_collection/engines/{engine_id}/sessions/{session_id}"
@@ -178,7 +178,7 @@ def list_sessions(
discoveryengine.ListSessionsResponse: The list of sessions.
"""
- client = discoveryengine.ConversationalSearchServiceClient()
+ client = discoveryengine.SessionServiceClient()
# The full resource name of the engine
parent = f"projects/{project_id}/locations/{location}/collections/default_collection/engines/{engine_id}"
diff --git a/discoveryengine/standalone_apis_sample.py b/discoveryengine/standalone_apis_sample.py
index 3c8673d27a5..1a0ff112904 100644
--- a/discoveryengine/standalone_apis_sample.py
+++ b/discoveryengine/standalone_apis_sample.py
@@ -94,7 +94,7 @@ def rank_sample(
)
request = discoveryengine.RankRequest(
ranking_config=ranking_config,
- model="semantic-ranker-512@latest",
+ model="semantic-ranker-default@latest",
top_n=10,
query="What is Google Gemini?",
records=[
@@ -123,3 +123,183 @@ def rank_sample(
# [END genappbuilder_rank]
return response
+
+
+def grounded_generation_inline_vais_sample(
+ project_number: str,
+ engine_id: str,
+) -> discoveryengine.GenerateGroundedContentResponse:
+ # [START genappbuilder_grounded_generation_inline_vais]
+ from google.cloud import discoveryengine_v1 as discoveryengine
+
+ # TODO(developer): Uncomment these variables before running the sample.
+ # project_number = "YOUR_PROJECT_NUMBER"
+ # engine_id = "YOUR_ENGINE_ID"
+
+ client = discoveryengine.GroundedGenerationServiceClient()
+
+ request = discoveryengine.GenerateGroundedContentRequest(
+ # The full resource name of the location.
+ # Format: projects/{project_number}/locations/{location}
+ location=client.common_location_path(project=project_number, location="global"),
+ generation_spec=discoveryengine.GenerateGroundedContentRequest.GenerationSpec(
+ model_id="gemini-2.5-flash",
+ ),
+ # Conversation between user and model
+ contents=[
+ discoveryengine.GroundedGenerationContent(
+ role="user",
+ parts=[
+ discoveryengine.GroundedGenerationContent.Part(
+ text="How did Google do in 2020? Where can I find BigQuery docs?"
+ )
+ ],
+ )
+ ],
+ system_instruction=discoveryengine.GroundedGenerationContent(
+ parts=[
+ discoveryengine.GroundedGenerationContent.Part(
+ text="Add a smiley emoji after the answer."
+ )
+ ],
+ ),
+ # What to ground on.
+ grounding_spec=discoveryengine.GenerateGroundedContentRequest.GroundingSpec(
+ grounding_sources=[
+ discoveryengine.GenerateGroundedContentRequest.GroundingSource(
+ inline_source=discoveryengine.GenerateGroundedContentRequest.GroundingSource.InlineSource(
+ grounding_facts=[
+ discoveryengine.GroundingFact(
+ fact_text=(
+ "The BigQuery documentation can be found at https://cloud.google.com/bigquery/docs/introduction"
+ ),
+ attributes={
+ "title": "BigQuery Overview",
+ "uri": "https://cloud.google.com/bigquery/docs/introduction",
+ },
+ ),
+ ]
+ ),
+ ),
+ discoveryengine.GenerateGroundedContentRequest.GroundingSource(
+ search_source=discoveryengine.GenerateGroundedContentRequest.GroundingSource.SearchSource(
+ # The full resource name of the serving config for a Vertex AI Search App
+ serving_config=f"projects/{project_number}/locations/global/collections/default_collection/engines/{engine_id}/servingConfigs/default_search",
+ ),
+ ),
+ ]
+ ),
+ )
+ response = client.generate_grounded_content(request)
+
+ # Handle the response
+ print(response)
+ # [END genappbuilder_grounded_generation_inline_vais]
+
+ return response
+
+
+def grounded_generation_google_search_sample(
+ project_number: str,
+) -> discoveryengine.GenerateGroundedContentResponse:
+ # [START genappbuilder_grounded_generation_google_search]
+ from google.cloud import discoveryengine_v1 as discoveryengine
+
+ # TODO(developer): Uncomment these variables before running the sample.
+ # project_number = "YOUR_PROJECT_NUMBER"
+
+ client = discoveryengine.GroundedGenerationServiceClient()
+
+ request = discoveryengine.GenerateGroundedContentRequest(
+ # The full resource name of the location.
+ # Format: projects/{project_number}/locations/{location}
+ location=client.common_location_path(project=project_number, location="global"),
+ generation_spec=discoveryengine.GenerateGroundedContentRequest.GenerationSpec(
+ model_id="gemini-2.5-flash",
+ ),
+ # Conversation between user and model
+ contents=[
+ discoveryengine.GroundedGenerationContent(
+ role="user",
+ parts=[
+ discoveryengine.GroundedGenerationContent.Part(
+ text="How much is Google stock?"
+ )
+ ],
+ )
+ ],
+ system_instruction=discoveryengine.GroundedGenerationContent(
+ parts=[
+ discoveryengine.GroundedGenerationContent.Part(text="Be comprehensive.")
+ ],
+ ),
+ # What to ground on.
+ grounding_spec=discoveryengine.GenerateGroundedContentRequest.GroundingSpec(
+ grounding_sources=[
+ discoveryengine.GenerateGroundedContentRequest.GroundingSource(
+ google_search_source=discoveryengine.GenerateGroundedContentRequest.GroundingSource.GoogleSearchSource(
+ # Optional: For Dynamic Retrieval
+ dynamic_retrieval_config=discoveryengine.GenerateGroundedContentRequest.DynamicRetrievalConfiguration(
+ predictor=discoveryengine.GenerateGroundedContentRequest.DynamicRetrievalConfiguration.DynamicRetrievalPredictor(
+ threshold=0.7
+ )
+ )
+ )
+ ),
+ ]
+ ),
+ )
+ response = client.generate_grounded_content(request)
+
+ # Handle the response
+ print(response)
+ # [END genappbuilder_grounded_generation_google_search]
+
+ return response
+
+
+def grounded_generation_streaming_sample(
+ project_number: str,
+) -> discoveryengine.GenerateGroundedContentResponse:
+ # [START genappbuilder_grounded_generation_streaming]
+ from google.cloud import discoveryengine_v1 as discoveryengine
+
+ # TODO(developer): Uncomment these variables before running the sample.
+ # project_id = "YOUR_PROJECT_ID"
+
+ client = discoveryengine.GroundedGenerationServiceClient()
+
+ request = discoveryengine.GenerateGroundedContentRequest(
+ # The full resource name of the location.
+ # Format: projects/{project_number}/locations/{location}
+ location=client.common_location_path(project=project_number, location="global"),
+ generation_spec=discoveryengine.GenerateGroundedContentRequest.GenerationSpec(
+ model_id="gemini-2.5-flash",
+ ),
+ # Conversation between user and model
+ contents=[
+ discoveryengine.GroundedGenerationContent(
+ role="user",
+ parts=[
+ discoveryengine.GroundedGenerationContent.Part(
+ text="Summarize how to delete a data store in Vertex AI Agent Builder?"
+ )
+ ],
+ )
+ ],
+ grounding_spec=discoveryengine.GenerateGroundedContentRequest.GroundingSpec(
+ grounding_sources=[
+ discoveryengine.GenerateGroundedContentRequest.GroundingSource(
+ google_search_source=discoveryengine.GenerateGroundedContentRequest.GroundingSource.GoogleSearchSource()
+ ),
+ ]
+ ),
+ )
+ responses = client.stream_generate_grounded_content(iter([request]))
+
+ for response in responses:
+ # Handle the response
+ print(response)
+ # [END genappbuilder_grounded_generation_streaming]
+
+ return response
diff --git a/discoveryengine/standalone_apis_sample_test.py b/discoveryengine/standalone_apis_sample_test.py
index f0c00cb937d..60405afd7db 100644
--- a/discoveryengine/standalone_apis_sample_test.py
+++ b/discoveryengine/standalone_apis_sample_test.py
@@ -17,6 +17,8 @@
from discoveryengine import standalone_apis_sample
+from google.cloud import resourcemanager_v3
+
project_id = os.environ["GOOGLE_CLOUD_PROJECT"]
@@ -32,3 +34,27 @@ def test_rank():
response = standalone_apis_sample.rank_sample(project_id)
assert response
assert response.records
+
+
+def test_grounded_generation_inline_vais_sample():
+ # Grounded Generation requires Project Number
+ client = resourcemanager_v3.ProjectsClient()
+ project = client.get_project(name=client.project_path(project_id))
+ project_number = client.parse_project_path(project.name)["project"]
+
+ response = standalone_apis_sample.grounded_generation_inline_vais_sample(
+ project_number, engine_id="test-search-engine_1689960780551"
+ )
+ assert response
+
+
+def test_grounded_generation_google_search_sample():
+ # Grounded Generation requires Project Number
+ client = resourcemanager_v3.ProjectsClient()
+ project = client.get_project(name=client.project_path(project_id))
+ project_number = client.parse_project_path(project.name)["project"]
+
+ response = standalone_apis_sample.grounded_generation_google_search_sample(
+ project_number
+ )
+ assert response
diff --git a/endpoints/getting-started/clients/service_to_service_gae_default/main.py b/endpoints/getting-started/clients/service_to_service_gae_default/main.py
index 0eb54639e00..5af1ed9b83b 100644
--- a/endpoints/getting-started/clients/service_to_service_gae_default/main.py
+++ b/endpoints/getting-started/clients/service_to_service_gae_default/main.py
@@ -16,11 +16,11 @@
Google App Engine Default Service Account."""
import base64
-import httplib
import json
import time
from google.appengine.api import app_identity
+import httplib
import webapp2
DEFAULT_SERVICE_ACCOUNT = "YOUR-CLIENT-PROJECT-ID@appspot.gserviceaccount.com"
diff --git a/endpoints/getting-started/clients/service_to_service_google_id_token/main.py b/endpoints/getting-started/clients/service_to_service_google_id_token/main.py
index c19c625a958..a8faa5647d4 100644
--- a/endpoints/getting-started/clients/service_to_service_google_id_token/main.py
+++ b/endpoints/getting-started/clients/service_to_service_google_id_token/main.py
@@ -16,12 +16,12 @@
Default Service Account using Google ID token."""
import base64
-import httplib
import json
import time
import urllib
from google.appengine.api import app_identity
+import httplib
import webapp2
SERVICE_ACCOUNT_EMAIL = "YOUR-CLIENT-PROJECT-ID@appspot.gserviceaccount.com"
diff --git a/endpoints/getting-started/clients/service_to_service_non_default/main.py b/endpoints/getting-started/clients/service_to_service_non_default/main.py
index b42406c57d0..77426b58d80 100644
--- a/endpoints/getting-started/clients/service_to_service_non_default/main.py
+++ b/endpoints/getting-started/clients/service_to_service_non_default/main.py
@@ -16,12 +16,12 @@
Service Account."""
import base64
-import httplib
import json
import time
import google.auth.app_engine
import googleapiclient.discovery
+import httplib
import webapp2
SERVICE_ACCOUNT_EMAIL = "YOUR-SERVICE-ACCOUNT-EMAIL"
diff --git a/firestore/cloud-async-client/snippets.py b/firestore/cloud-async-client/snippets.py
index b0a97962cc5..3a7b9476941 100644
--- a/firestore/cloud-async-client/snippets.py
+++ b/firestore/cloud-async-client/snippets.py
@@ -693,24 +693,16 @@ async def delete_full_collection():
db = firestore.AsyncClient()
# [START firestore_data_delete_collection_async]
- async def delete_collection(coll_ref, batch_size):
- docs = coll_ref.limit(batch_size).stream()
- deleted = 0
+ async def delete_collection(coll_ref):
- async for doc in docs:
- print(f"Deleting doc {doc.id} => {doc.to_dict()}")
- await doc.reference.delete()
- deleted = deleted + 1
-
- if deleted >= batch_size:
- return delete_collection(coll_ref, batch_size)
+ await db.recursive_delete(coll_ref)
# [END firestore_data_delete_collection_async]
- await delete_collection(db.collection("cities"), 10)
- await delete_collection(db.collection("data"), 10)
- await delete_collection(db.collection("objects"), 10)
- await delete_collection(db.collection("users"), 10)
+ await delete_collection(db.collection("cities"))
+ await delete_collection(db.collection("data"))
+ await delete_collection(db.collection("objects"))
+ await delete_collection(db.collection("users"))
async def collection_group_query(db):
diff --git a/firestore/cloud-client/snippets.py b/firestore/cloud-client/snippets.py
index 09dff308a50..9bc64d1c383 100644
--- a/firestore/cloud-client/snippets.py
+++ b/firestore/cloud-client/snippets.py
@@ -839,28 +839,17 @@ def delete_full_collection():
db = firestore.Client()
# [START firestore_data_delete_collection]
- def delete_collection(coll_ref, batch_size):
- if batch_size == 0:
- return
+ def delete_collection(coll_ref):
- docs = coll_ref.list_documents(page_size=batch_size)
- deleted = 0
-
- for doc in docs:
- print(f"Deleting doc {doc.id} => {doc.get().to_dict()}")
- doc.delete()
- deleted = deleted + 1
-
- if deleted >= batch_size:
- return delete_collection(coll_ref, batch_size)
+ print(f"Recursively deleting collection: {coll_ref}")
+ db.recursive_delete(coll_ref)
# [END firestore_data_delete_collection]
- delete_collection(db.collection("cities"), 10)
- delete_collection(db.collection("data"), 10)
- delete_collection(db.collection("objects"), 10)
- delete_collection(db.collection("users"), 10)
- delete_collection(db.collection("users"), 0)
+ delete_collection(db.collection("cities"))
+ delete_collection(db.collection("data"))
+ delete_collection(db.collection("objects"))
+ delete_collection(db.collection("users"))
def collection_group_query(db):
diff --git a/functions/bigtable/requirements.txt b/functions/bigtable/requirements.txt
index 8b72b7e9f54..3799ff092d5 100644
--- a/functions/bigtable/requirements.txt
+++ b/functions/bigtable/requirements.txt
@@ -1,2 +1,2 @@
-functions-framework==3.8.2
+functions-framework==3.9.2
google-cloud-bigtable==2.27.0
diff --git a/functions/billing/main.py b/functions/billing/main.py
index 518347c69d8..317d91842bf 100644
--- a/functions/billing/main.py
+++ b/functions/billing/main.py
@@ -14,37 +14,28 @@
# [START functions_billing_limit]
# [START functions_billing_limit_appengine]
-# [START functions_billing_stop]
# [START functions_billing_slack]
import base64
import json
import os
-
-# [END functions_billing_stop]
# [END functions_billing_limit]
# [END functions_billing_limit_appengine]
# [END functions_billing_slack]
# [START functions_billing_limit]
# [START functions_billing_limit_appengine]
-# [START functions_billing_stop]
from googleapiclient import discovery
-
-# [END functions_billing_stop]
# [END functions_billing_limit]
# [END functions_billing_limit_appengine]
# [START functions_billing_slack]
import slack
from slack.errors import SlackApiError
-
# [END functions_billing_slack]
# [START functions_billing_limit]
-# [START functions_billing_stop]
PROJECT_ID = os.getenv("GCP_PROJECT")
PROJECT_NAME = f"projects/{PROJECT_ID}"
-# [END functions_billing_stop]
# [END functions_billing_limit]
# [START functions_billing_slack]
@@ -86,7 +77,6 @@ def notify_slack(data, context):
# [END functions_billing_slack]
-# [START functions_billing_stop]
def stop_billing(data, context):
pubsub_data = base64.b64decode(data["data"]).decode("utf-8")
pubsub_json = json.loads(pubsub_data)
@@ -148,9 +138,6 @@ def __disable_billing_for_project(project_name, projects):
print("Failed to disable billing, possibly check permissions")
-# [END functions_billing_stop]
-
-
# [START functions_billing_limit]
ZONE = "us-west1-b"
diff --git a/functions/billing_stop_on_notification/requirements.txt b/functions/billing_stop_on_notification/requirements.txt
index 912b07cd0a3..b730a52aa07 100644
--- a/functions/billing_stop_on_notification/requirements.txt
+++ b/functions/billing_stop_on_notification/requirements.txt
@@ -1,3 +1,5 @@
+# [START functions_billing_stop_requirements]
functions-framework==3.*
google-cloud-billing==1.16.2
google-cloud-logging==3.12.1
+# [END functions_billing_stop_requirements]
diff --git a/functions/concepts-after-timeout/requirements.txt b/functions/concepts-after-timeout/requirements.txt
index bb8882c4cff..0e1e6cbe66a 100644
--- a/functions/concepts-after-timeout/requirements.txt
+++ b/functions/concepts-after-timeout/requirements.txt
@@ -1 +1 @@
-functions-framework==3.8.2
+functions-framework==3.9.2
diff --git a/functions/concepts-filesystem/requirements.txt b/functions/concepts-filesystem/requirements.txt
index bb8882c4cff..0e1e6cbe66a 100644
--- a/functions/concepts-filesystem/requirements.txt
+++ b/functions/concepts-filesystem/requirements.txt
@@ -1 +1 @@
-functions-framework==3.8.2
+functions-framework==3.9.2
diff --git a/functions/concepts-requests/requirements.txt b/functions/concepts-requests/requirements.txt
index 97d8ec7f997..e8dc91f5eb5 100644
--- a/functions/concepts-requests/requirements.txt
+++ b/functions/concepts-requests/requirements.txt
@@ -1,2 +1,2 @@
-functions-framework==3.8.2
+functions-framework==3.9.2
requests==2.31.0
diff --git a/functions/concepts-stateless/requirements-test.txt b/functions/concepts-stateless/requirements-test.txt
index dc5fe349e81..06c13ca892f 100644
--- a/functions/concepts-stateless/requirements-test.txt
+++ b/functions/concepts-stateless/requirements-test.txt
@@ -1,3 +1,3 @@
flask==3.0.3
pytest==8.2.0
-functions-framework==3.8.2
+functions-framework==3.9.2
diff --git a/functions/concepts-stateless/requirements.txt b/functions/concepts-stateless/requirements.txt
index bb8882c4cff..0e1e6cbe66a 100644
--- a/functions/concepts-stateless/requirements.txt
+++ b/functions/concepts-stateless/requirements.txt
@@ -1 +1 @@
-functions-framework==3.8.2
+functions-framework==3.9.2
diff --git a/functions/helloworld/requirements-test.txt b/functions/helloworld/requirements-test.txt
index ed2b31ccff8..6031c4d8ee4 100644
--- a/functions/helloworld/requirements-test.txt
+++ b/functions/helloworld/requirements-test.txt
@@ -1,3 +1,3 @@
-functions-framework==3.8.2
+functions-framework==3.9.2
pytest==8.2.0
uuid==1.30
diff --git a/functions/helloworld/requirements.txt b/functions/helloworld/requirements.txt
index 3ea2c88c384..8c9cb7ea6d4 100644
--- a/functions/helloworld/requirements.txt
+++ b/functions/helloworld/requirements.txt
@@ -1,4 +1,4 @@
-functions-framework==3.8.2
+functions-framework==3.9.2
flask==3.0.3
google-cloud-error-reporting==1.11.1
MarkupSafe==2.1.3
diff --git a/functions/http/requirements.txt b/functions/http/requirements.txt
index 53e544093b7..49c6c6065c1 100644
--- a/functions/http/requirements.txt
+++ b/functions/http/requirements.txt
@@ -1,4 +1,4 @@
-functions-framework==3.8.2
+functions-framework==3.9.2
google-cloud-storage==2.9.0; python_version < '3.7'
google-cloud-storage==2.9.0; python_version > '3.6'
xmltodict==0.13.0
diff --git a/functions/memorystore/redis/requirements.txt b/functions/memorystore/redis/requirements.txt
index f9b248cdd90..8719dde06fc 100644
--- a/functions/memorystore/redis/requirements.txt
+++ b/functions/memorystore/redis/requirements.txt
@@ -1,2 +1,2 @@
-functions-framework==3.8.2
+functions-framework==3.9.2
redis==6.0.0
diff --git a/functions/slack/requirements.txt b/functions/slack/requirements.txt
index 9abacb043e7..a6d5d05bb78 100644
--- a/functions/slack/requirements.txt
+++ b/functions/slack/requirements.txt
@@ -1,4 +1,4 @@
google-api-python-client==2.131.0
flask==3.0.3
-functions-framework==3.5.0
+functions-framework==3.9.2
slackclient==2.9.4
diff --git a/functions/spanner/requirements.txt b/functions/spanner/requirements.txt
index 47337520a80..139fa6462a3 100644
--- a/functions/spanner/requirements.txt
+++ b/functions/spanner/requirements.txt
@@ -1,2 +1,2 @@
google-cloud-spanner==3.51.0
-functions-framework==3.8.2
\ No newline at end of file
+functions-framework==3.9.2
\ No newline at end of file
diff --git a/functions/tips-connection-pooling/requirements.txt b/functions/tips-connection-pooling/requirements.txt
index d258643ded1..a267b387ca6 100644
--- a/functions/tips-connection-pooling/requirements.txt
+++ b/functions/tips-connection-pooling/requirements.txt
@@ -1,2 +1,2 @@
requests==2.31.0
-functions-framework==3.8.2
+functions-framework==3.9.2
diff --git a/functions/tips-gcp-apis/requirements.txt b/functions/tips-gcp-apis/requirements.txt
index 95daf02ad85..b4c1c4018a4 100644
--- a/functions/tips-gcp-apis/requirements.txt
+++ b/functions/tips-gcp-apis/requirements.txt
@@ -1,2 +1,2 @@
google-cloud-pubsub==2.28.0
-functions-framework==3.8.2
\ No newline at end of file
+functions-framework==3.9.2
\ No newline at end of file
diff --git a/functions/tips-lazy-globals/main.py b/functions/tips-lazy-globals/main.py
index a9e23d902b2..9c36ac5724d 100644
--- a/functions/tips-lazy-globals/main.py
+++ b/functions/tips-lazy-globals/main.py
@@ -51,7 +51,7 @@ def lazy_globals(request):
Response object using `make_response`
.
"""
- global lazy_global, non_lazy_global
+ global lazy_global, non_lazy_global # noqa: F824
# This value is initialized only if (and when) the function is called
if not lazy_global:
diff --git a/functions/tips-lazy-globals/requirements.txt b/functions/tips-lazy-globals/requirements.txt
index f5b37113ca8..e923e1ec3a5 100644
--- a/functions/tips-lazy-globals/requirements.txt
+++ b/functions/tips-lazy-globals/requirements.txt
@@ -1 +1 @@
-functions-framework==3.8.2
\ No newline at end of file
+functions-framework==3.9.2
\ No newline at end of file
diff --git a/functions/tips-scopes/requirements.txt b/functions/tips-scopes/requirements.txt
index bb8882c4cff..0e1e6cbe66a 100644
--- a/functions/tips-scopes/requirements.txt
+++ b/functions/tips-scopes/requirements.txt
@@ -1 +1 @@
-functions-framework==3.8.2
+functions-framework==3.9.2
diff --git a/functions/v2/audit_log/requirements.txt b/functions/v2/audit_log/requirements.txt
index f5b37113ca8..e923e1ec3a5 100644
--- a/functions/v2/audit_log/requirements.txt
+++ b/functions/v2/audit_log/requirements.txt
@@ -1 +1 @@
-functions-framework==3.8.2
\ No newline at end of file
+functions-framework==3.9.2
\ No newline at end of file
diff --git a/functions/v2/datastore/hello-datastore/requirements.txt b/functions/v2/datastore/hello-datastore/requirements.txt
index 4afb5b152da..35e86dbfbc5 100644
--- a/functions/v2/datastore/hello-datastore/requirements.txt
+++ b/functions/v2/datastore/hello-datastore/requirements.txt
@@ -1,6 +1,6 @@
-functions-framework==3.8.2
+functions-framework==3.9.2
google-events==0.14.0
google-cloud-datastore==2.20.2
google-api-core==2.17.1
-protobuf==4.25.6
+protobuf==4.25.8
cloudevents==1.11.0
diff --git a/functions/v2/firebase/hello-firestore/requirements.txt b/functions/v2/firebase/hello-firestore/requirements.txt
index 635adb54080..b2d03f648de 100644
--- a/functions/v2/firebase/hello-firestore/requirements.txt
+++ b/functions/v2/firebase/hello-firestore/requirements.txt
@@ -1,4 +1,4 @@
-functions-framework==3.8.2
+functions-framework==3.9.2
google-events==0.14.0
google-api-core==2.17.1
protobuf==4.25.6
diff --git a/functions/v2/firebase/hello-remote-config/requirements.txt b/functions/v2/firebase/hello-remote-config/requirements.txt
index e0dd9dcd8bc..7404d8b7887 100644
--- a/functions/v2/firebase/hello-remote-config/requirements.txt
+++ b/functions/v2/firebase/hello-remote-config/requirements.txt
@@ -1,2 +1,2 @@
-functions-framework==3.8.2
+functions-framework==3.9.2
cloudevents==1.11.0
\ No newline at end of file
diff --git a/functions/v2/firebase/hello-rtdb/requirements.txt b/functions/v2/firebase/hello-rtdb/requirements.txt
index e0dd9dcd8bc..7404d8b7887 100644
--- a/functions/v2/firebase/hello-rtdb/requirements.txt
+++ b/functions/v2/firebase/hello-rtdb/requirements.txt
@@ -1,2 +1,2 @@
-functions-framework==3.8.2
+functions-framework==3.9.2
cloudevents==1.11.0
\ No newline at end of file
diff --git a/functions/v2/firebase/upper-firestore/requirements.txt b/functions/v2/firebase/upper-firestore/requirements.txt
index daf869fa8d3..cc5c66225f4 100644
--- a/functions/v2/firebase/upper-firestore/requirements.txt
+++ b/functions/v2/firebase/upper-firestore/requirements.txt
@@ -1,4 +1,4 @@
-functions-framework==3.8.2
+functions-framework==3.9.2
google-events==0.14.0
google-api-core==2.17.1
protobuf==4.25.6
diff --git a/functions/v2/http_logging/requirements.txt b/functions/v2/http_logging/requirements.txt
index 845296cfe8a..1fa9b20e822 100644
--- a/functions/v2/http_logging/requirements.txt
+++ b/functions/v2/http_logging/requirements.txt
@@ -1,2 +1,2 @@
google-cloud-logging==3.11.4
-functions-framework==3.8.2
\ No newline at end of file
+functions-framework==3.9.2
\ No newline at end of file
diff --git a/functions/v2/imagemagick/requirements.txt b/functions/v2/imagemagick/requirements.txt
index f00e4b306ee..26540b76df1 100644
--- a/functions/v2/imagemagick/requirements.txt
+++ b/functions/v2/imagemagick/requirements.txt
@@ -1,4 +1,4 @@
-functions-framework==3.8.2
+functions-framework==3.9.2
google-cloud-vision==3.8.1
google-cloud-storage==2.9.0; python_version < '3.7'
google-cloud-storage==2.9.0; python_version > '3.6'
diff --git a/functions/v2/log/helloworld/requirements.txt b/functions/v2/log/helloworld/requirements.txt
index bb8882c4cff..0e1e6cbe66a 100644
--- a/functions/v2/log/helloworld/requirements.txt
+++ b/functions/v2/log/helloworld/requirements.txt
@@ -1 +1 @@
-functions-framework==3.8.2
+functions-framework==3.9.2
diff --git a/functions/v2/log/stackdriver/requirements.txt b/functions/v2/log/stackdriver/requirements.txt
index bb8882c4cff..0e1e6cbe66a 100644
--- a/functions/v2/log/stackdriver/requirements.txt
+++ b/functions/v2/log/stackdriver/requirements.txt
@@ -1 +1 @@
-functions-framework==3.8.2
+functions-framework==3.9.2
diff --git a/functions/v2/ocr/requirements.txt b/functions/v2/ocr/requirements.txt
index ee2b12cb5d1..bb768f4a45b 100644
--- a/functions/v2/ocr/requirements.txt
+++ b/functions/v2/ocr/requirements.txt
@@ -1,4 +1,4 @@
-functions-framework==3.8.2
+functions-framework==3.9.2
google-cloud-pubsub==2.28.0
google-cloud-storage==2.9.0
google-cloud-translate==3.18.0
diff --git a/functions/v2/pubsub/requirements.txt b/functions/v2/pubsub/requirements.txt
index f5b37113ca8..e923e1ec3a5 100644
--- a/functions/v2/pubsub/requirements.txt
+++ b/functions/v2/pubsub/requirements.txt
@@ -1 +1 @@
-functions-framework==3.8.2
\ No newline at end of file
+functions-framework==3.9.2
\ No newline at end of file
diff --git a/functions/v2/response_streaming/requirements.txt b/functions/v2/response_streaming/requirements.txt
index 3027361675c..56da3662b54 100644
--- a/functions/v2/response_streaming/requirements.txt
+++ b/functions/v2/response_streaming/requirements.txt
@@ -1,5 +1,5 @@
Flask==2.2.2
-functions-framework==3.8.2
+functions-framework==3.9.2
google-cloud-bigquery==3.27.0
pytest==8.2.0
Werkzeug==2.3.8
diff --git a/functions/v2/storage/requirements.txt b/functions/v2/storage/requirements.txt
index e0dd9dcd8bc..7404d8b7887 100644
--- a/functions/v2/storage/requirements.txt
+++ b/functions/v2/storage/requirements.txt
@@ -1,2 +1,2 @@
-functions-framework==3.8.2
+functions-framework==3.9.2
cloudevents==1.11.0
\ No newline at end of file
diff --git a/functions/v2/tips-avoid-infinite-retries/requirements.txt b/functions/v2/tips-avoid-infinite-retries/requirements.txt
index f1a1d8d7dab..0ec1dec6818 100644
--- a/functions/v2/tips-avoid-infinite-retries/requirements.txt
+++ b/functions/v2/tips-avoid-infinite-retries/requirements.txt
@@ -1,2 +1,2 @@
-functions-framework==3.8.2
+functions-framework==3.9.2
python-dateutil==2.9.0.post0
diff --git a/functions/v2/tips-retry/requirements.txt b/functions/v2/tips-retry/requirements.txt
index 07fe1647ccf..adb62565b72 100644
--- a/functions/v2/tips-retry/requirements.txt
+++ b/functions/v2/tips-retry/requirements.txt
@@ -1,2 +1,2 @@
google-cloud-error-reporting==1.11.1
-functions-framework==3.8.2
+functions-framework==3.9.2
diff --git a/functions/v2/typed/googlechatbot/requirements.txt b/functions/v2/typed/googlechatbot/requirements.txt
index bb8882c4cff..0e1e6cbe66a 100644
--- a/functions/v2/typed/googlechatbot/requirements.txt
+++ b/functions/v2/typed/googlechatbot/requirements.txt
@@ -1 +1 @@
-functions-framework==3.8.2
+functions-framework==3.9.2
diff --git a/functions/v2/typed/greeting/requirements.txt b/functions/v2/typed/greeting/requirements.txt
index bb8882c4cff..0e1e6cbe66a 100644
--- a/functions/v2/typed/greeting/requirements.txt
+++ b/functions/v2/typed/greeting/requirements.txt
@@ -1 +1 @@
-functions-framework==3.8.2
+functions-framework==3.9.2
diff --git a/gemma2/requirements.txt b/gemma2/requirements.txt
index 824654c39a6..f8990233d3f 100644
--- a/gemma2/requirements.txt
+++ b/gemma2/requirements.txt
@@ -1,2 +1,2 @@
google-cloud-aiplatform[all]==1.64.0
-protobuf==5.29.4
+protobuf==5.29.5
diff --git a/genai/batch_prediction/batchpredict_embeddings_with_gcs.py b/genai/batch_prediction/batchpredict_embeddings_with_gcs.py
index 41420db3141..4fb8148e9f5 100644
--- a/genai/batch_prediction/batchpredict_embeddings_with_gcs.py
+++ b/genai/batch_prediction/batchpredict_embeddings_with_gcs.py
@@ -34,7 +34,7 @@ def generate_content(output_uri: str) -> str:
print(f"Job name: {job.name}")
print(f"Job state: {job.state}")
# Example response:
- # Job name: projects/%PROJECT_ID%/locations/us-central1/batchPredictionJobs/9876453210000000000
+ # Job name: projects/.../locations/.../batchPredictionJobs/9876453210000000000
# Job state: JOB_STATE_PENDING
# See the documentation: https://googleapis.github.io/python-genai/genai.html#genai.types.BatchJob
diff --git a/genai/batch_prediction/batchpredict_with_bq.py b/genai/batch_prediction/batchpredict_with_bq.py
index b3d3db1c752..bf051f2a223 100644
--- a/genai/batch_prediction/batchpredict_with_bq.py
+++ b/genai/batch_prediction/batchpredict_with_bq.py
@@ -28,14 +28,14 @@ def generate_content(output_uri: str) -> str:
job = client.batches.create(
# To use a tuned model, set the model param to your tuned model using the following format:
# model="projects/{PROJECT_ID}/locations/{LOCATION}/models/{MODEL_ID}
- model="gemini-2.0-flash-001",
+ model="gemini-2.5-flash",
src="bq://storage-samples.generative_ai.batch_requests_for_multimodal_input",
config=CreateBatchJobConfig(dest=output_uri),
)
print(f"Job name: {job.name}")
print(f"Job state: {job.state}")
# Example response:
- # Job name: projects/%PROJECT_ID%/locations/us-central1/batchPredictionJobs/9876453210000000000
+ # Job name: projects/.../locations/.../batchPredictionJobs/9876453210000000000
# Job state: JOB_STATE_PENDING
# See the documentation: https://googleapis.github.io/python-genai/genai.html#genai.types.BatchJob
diff --git a/genai/batch_prediction/batchpredict_with_gcs.py b/genai/batch_prediction/batchpredict_with_gcs.py
index 280c29506a9..fcedf217bdc 100644
--- a/genai/batch_prediction/batchpredict_with_gcs.py
+++ b/genai/batch_prediction/batchpredict_with_gcs.py
@@ -28,7 +28,7 @@ def generate_content(output_uri: str) -> str:
job = client.batches.create(
# To use a tuned model, set the model param to your tuned model using the following format:
# model="projects/{PROJECT_ID}/locations/{LOCATION}/models/{MODEL_ID}
- model="gemini-2.0-flash-001",
+ model="gemini-2.5-flash",
# Source link: https://storage.cloud.google.com/cloud-samples-data/batch/prompt_for_batch_gemini_predict.jsonl
src="gs://cloud-samples-data/batch/prompt_for_batch_gemini_predict.jsonl",
config=CreateBatchJobConfig(dest=output_uri),
@@ -36,7 +36,7 @@ def generate_content(output_uri: str) -> str:
print(f"Job name: {job.name}")
print(f"Job state: {job.state}")
# Example response:
- # Job name: projects/%PROJECT_ID%/locations/us-central1/batchPredictionJobs/9876453210000000000
+ # Job name: projects/.../locations/.../batchPredictionJobs/9876453210000000000
# Job state: JOB_STATE_PENDING
# See the documentation: https://googleapis.github.io/python-genai/genai.html#genai.types.BatchJob
diff --git a/genai/batch_prediction/get_batch_job.py b/genai/batch_prediction/get_batch_job.py
new file mode 100644
index 00000000000..c6e0453da64
--- /dev/null
+++ b/genai/batch_prediction/get_batch_job.py
@@ -0,0 +1,43 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from google.genai import types
+
+
+def get_batch_job(batch_job_name: str) -> types.BatchJob:
+ # [START googlegenaisdk_batch_job_get]
+ from google import genai
+ from google.genai.types import HttpOptions
+
+ client = genai.Client(http_options=HttpOptions(api_version="v1"))
+
+ # Get the batch job
+# Eg. batch_job_name = "projects/123456789012/locations/.../batchPredictionJobs/1234567890123456789"
+ batch_job = client.batches.get(name=batch_job_name)
+
+ print(f"Job state: {batch_job.state}")
+ # Example response:
+ # Job state: JOB_STATE_PENDING
+ # Job state: JOB_STATE_RUNNING
+ # Job state: JOB_STATE_SUCCEEDED
+
+ # [END googlegenaisdk_batch_job_get]
+ return batch_job
+
+
+if __name__ == "__main__":
+ try:
+ get_batch_job(input("Batch job name: "))
+ except Exception as e:
+ print(f"An error occurred: {e}")
diff --git a/genai/batch_prediction/requirements-test.txt b/genai/batch_prediction/requirements-test.txt
index 937db8fb0d5..e43b7792721 100644
--- a/genai/batch_prediction/requirements-test.txt
+++ b/genai/batch_prediction/requirements-test.txt
@@ -1,4 +1,2 @@
google-api-core==2.24.0
-google-cloud-bigquery==3.29.0
-google-cloud-storage==2.19.0
pytest==8.2.0
diff --git a/genai/batch_prediction/requirements.txt b/genai/batch_prediction/requirements.txt
index 7890f90e26a..4f44a6593bb 100644
--- a/genai/batch_prediction/requirements.txt
+++ b/genai/batch_prediction/requirements.txt
@@ -1 +1,3 @@
-google-genai==1.16.1
+google-cloud-bigquery==3.29.0
+google-cloud-storage==2.19.0
+google-genai==1.42.0
diff --git a/genai/batch_prediction/test_batch_prediction_examples.py b/genai/batch_prediction/test_batch_prediction_examples.py
index f9979c352f6..5079dfd2cd0 100644
--- a/genai/batch_prediction/test_batch_prediction_examples.py
+++ b/genai/batch_prediction/test_batch_prediction_examples.py
@@ -11,69 +11,124 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+from unittest.mock import MagicMock, patch
-#
-# Using Google Cloud Vertex AI to test the code samples.
-#
-
-from datetime import datetime as dt
-
-import os
-
-from google.cloud import bigquery, storage
+from google.genai import types
from google.genai.types import JobState
-import pytest
-
import batchpredict_embeddings_with_gcs
import batchpredict_with_bq
import batchpredict_with_gcs
+import get_batch_job
+
+@patch("google.genai.Client")
+@patch("time.sleep", return_value=None)
+def test_batch_prediction_embeddings_with_gcs(
+ mock_sleep: MagicMock, mock_genai_client: MagicMock
+) -> None:
+ # Mock the API response
+ mock_batch_job_running = types.BatchJob(
+ name="test-batch-job", state="JOB_STATE_RUNNING"
+ )
+ mock_batch_job_succeeded = types.BatchJob(
+ name="test-batch-job", state="JOB_STATE_SUCCEEDED"
+ )
-os.environ["GOOGLE_GENAI_USE_VERTEXAI"] = "True"
-os.environ["GOOGLE_CLOUD_LOCATION"] = "us-central1"
-# The project name is included in the CICD pipeline
-# os.environ['GOOGLE_CLOUD_PROJECT'] = "add-your-project-name"
-BQ_OUTPUT_DATASET = f"{os.environ['GOOGLE_CLOUD_PROJECT']}.gen_ai_batch_prediction"
-GCS_OUTPUT_BUCKET = "python-docs-samples-tests"
+ mock_genai_client.return_value.batches.create.return_value = (
+ mock_batch_job_running
+ )
+ mock_genai_client.return_value.batches.get.return_value = (
+ mock_batch_job_succeeded
+ )
+
+ response = batchpredict_embeddings_with_gcs.generate_content(
+ output_uri="gs://test-bucket/test-prefix"
+ )
+
+ mock_genai_client.assert_called_once_with(
+ http_options=types.HttpOptions(api_version="v1")
+ )
+ mock_genai_client.return_value.batches.create.assert_called_once()
+ mock_genai_client.return_value.batches.get.assert_called_once()
+ assert response == JobState.JOB_STATE_SUCCEEDED
-@pytest.fixture(scope="session")
-def bq_output_uri() -> str:
- table_name = f"text_output_{dt.now().strftime('%Y_%m_%d_T%H_%M_%S')}"
- table_uri = f"{BQ_OUTPUT_DATASET}.{table_name}"
+@patch("google.genai.Client")
+@patch("time.sleep", return_value=None)
+def test_batch_prediction_with_bq(
+ mock_sleep: MagicMock, mock_genai_client: MagicMock
+) -> None:
+ # Mock the API response
+ mock_batch_job_running = types.BatchJob(
+ name="test-batch-job", state="JOB_STATE_RUNNING"
+ )
+ mock_batch_job_succeeded = types.BatchJob(
+ name="test-batch-job", state="JOB_STATE_SUCCEEDED"
+ )
- yield f"bq://{table_uri}"
+ mock_genai_client.return_value.batches.create.return_value = (
+ mock_batch_job_running
+ )
+ mock_genai_client.return_value.batches.get.return_value = (
+ mock_batch_job_succeeded
+ )
- bq_client = bigquery.Client()
- bq_client.delete_table(table_uri, not_found_ok=True)
+ response = batchpredict_with_bq.generate_content(
+ output_uri="bq://test-project.test_dataset.test_table"
+ )
+ mock_genai_client.assert_called_once_with(
+ http_options=types.HttpOptions(api_version="v1")
+ )
+ mock_genai_client.return_value.batches.create.assert_called_once()
+ mock_genai_client.return_value.batches.get.assert_called_once()
+ assert response == JobState.JOB_STATE_SUCCEEDED
-@pytest.fixture(scope="session")
-def gcs_output_uri() -> str:
- prefix = f"text_output/{dt.now()}"
- yield f"gs://{GCS_OUTPUT_BUCKET}/{prefix}"
+@patch("google.genai.Client")
+@patch("time.sleep", return_value=None)
+def test_batch_prediction_with_gcs(
+ mock_sleep: MagicMock, mock_genai_client: MagicMock
+) -> None:
+ # Mock the API response
+ mock_batch_job_running = types.BatchJob(
+ name="test-batch-job", state="JOB_STATE_RUNNING"
+ )
+ mock_batch_job_succeeded = types.BatchJob(
+ name="test-batch-job", state="JOB_STATE_SUCCEEDED"
+ )
- storage_client = storage.Client()
- bucket = storage_client.get_bucket(GCS_OUTPUT_BUCKET)
- blobs = bucket.list_blobs(prefix=prefix)
- for blob in blobs:
- blob.delete()
+ mock_genai_client.return_value.batches.create.return_value = (
+ mock_batch_job_running
+ )
+ mock_genai_client.return_value.batches.get.return_value = (
+ mock_batch_job_succeeded
+ )
+ response = batchpredict_with_gcs.generate_content(
+ output_uri="gs://test-bucket/test-prefix"
+ )
-def test_batch_prediction_embeddings_with_gcs(gcs_output_uri: str) -> None:
- response = batchpredict_embeddings_with_gcs.generate_content(
- output_uri=gcs_output_uri
+ mock_genai_client.assert_called_once_with(
+ http_options=types.HttpOptions(api_version="v1")
)
+ mock_genai_client.return_value.batches.create.assert_called_once()
+ mock_genai_client.return_value.batches.get.assert_called_once()
assert response == JobState.JOB_STATE_SUCCEEDED
-def test_batch_prediction_with_bq(bq_output_uri: str) -> None:
- response = batchpredict_with_bq.generate_content(output_uri=bq_output_uri)
- assert response == JobState.JOB_STATE_SUCCEEDED
+@patch("google.genai.Client")
+def test_get_batch_job(mock_genai_client: MagicMock) -> None:
+ # Mock the API response
+ mock_batch_job = types.BatchJob(name="test-batch-job", state="JOB_STATE_PENDING")
+ mock_genai_client.return_value.batches.get.return_value = mock_batch_job
-def test_batch_prediction_with_gcs(gcs_output_uri: str) -> None:
- response = batchpredict_with_gcs.generate_content(output_uri=gcs_output_uri)
- assert response == JobState.JOB_STATE_SUCCEEDED
+ response = get_batch_job.get_batch_job("test-batch-job")
+
+ mock_genai_client.assert_called_once_with(
+ http_options=types.HttpOptions(api_version="v1")
+ )
+ mock_genai_client.return_value.batches.get.assert_called_once()
+ assert response == mock_batch_job
diff --git a/genai/bounding_box/boundingbox_with_txt_img.py b/genai/bounding_box/boundingbox_with_txt_img.py
index 5ced23c25e1..a22f15dc664 100644
--- a/genai/bounding_box/boundingbox_with_txt_img.py
+++ b/genai/bounding_box/boundingbox_with_txt_img.py
@@ -16,12 +16,16 @@
def generate_content() -> str:
# [START googlegenaisdk_boundingbox_with_txt_img]
import requests
-
from google import genai
- from google.genai.types import GenerateContentConfig, HttpOptions, Part, SafetySetting
-
+ from google.genai.types import (
+ GenerateContentConfig,
+ HarmBlockThreshold,
+ HarmCategory,
+ HttpOptions,
+ Part,
+ SafetySetting,
+ )
from PIL import Image, ImageColor, ImageDraw
-
from pydantic import BaseModel
# Helper class to represent a bounding box
@@ -31,7 +35,7 @@ class BoundingBox(BaseModel):
Attributes:
box_2d (list[int]): A list of integers representing the 2D coordinates of the bounding box,
- typically in the format [x_min, y_min, x_max, y_max].
+ typically in the format [y_min, x_min, y_max, x_max].
label (str): A string representing the label or class associated with the object within the bounding box.
"""
@@ -41,12 +45,12 @@ class BoundingBox(BaseModel):
# Helper function to plot bounding boxes on an image
def plot_bounding_boxes(image_uri: str, bounding_boxes: list[BoundingBox]) -> None:
"""
- Plots bounding boxes on an image with markers for each a name, using PIL, normalized coordinates, and different colors.
+ Plots bounding boxes on an image with labels, using PIL and normalized coordinates.
Args:
- img_path: The path to the image file.
- bounding_boxes: A list of bounding boxes containing the name of the object
- and their positions in normalized [y1 x1 y2 x2] format.
+ image_uri: The URI of the image file.
+ bounding_boxes: A list of BoundingBox objects. Each box's coordinates are in
+ normalized [y_min, x_min, y_max, x_max] format.
"""
with Image.open(requests.get(image_uri, stream=True, timeout=10).raw) as im:
width, height = im.size
@@ -55,19 +59,23 @@ def plot_bounding_boxes(image_uri: str, bounding_boxes: list[BoundingBox]) -> No
colors = list(ImageColor.colormap.keys())
for i, bbox in enumerate(bounding_boxes):
- y1, x1, y2, x2 = bbox.box_2d
- abs_y1 = int(y1 / 1000 * height)
- abs_x1 = int(x1 / 1000 * width)
- abs_y2 = int(y2 / 1000 * height)
- abs_x2 = int(x2 / 1000 * width)
+ # Scale normalized coordinates to image dimensions
+ abs_y_min = int(bbox.box_2d[0] / 1000 * height)
+ abs_x_min = int(bbox.box_2d[1] / 1000 * width)
+ abs_y_max = int(bbox.box_2d[2] / 1000 * height)
+ abs_x_max = int(bbox.box_2d[3] / 1000 * width)
color = colors[i % len(colors)]
+ # Draw the rectangle using the correct (x, y) pairs
draw.rectangle(
- ((abs_x1, abs_y1), (abs_x2, abs_y2)), outline=color, width=4
+ ((abs_x_min, abs_y_min), (abs_x_max, abs_y_max)),
+ outline=color,
+ width=4,
)
if bbox.label:
- draw.text((abs_x1 + 8, abs_y1 + 6), bbox.label, fill=color)
+ # Position the text at the top-left corner of the box
+ draw.text((abs_x_min + 8, abs_y_min + 6), bbox.label, fill=color)
im.show()
@@ -83,18 +91,18 @@ def plot_bounding_boxes(image_uri: str, bounding_boxes: list[BoundingBox]) -> No
temperature=0.5,
safety_settings=[
SafetySetting(
- category="HARM_CATEGORY_DANGEROUS_CONTENT",
- threshold="BLOCK_ONLY_HIGH",
+ category=HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT,
+ threshold=HarmBlockThreshold.BLOCK_ONLY_HIGH,
),
],
response_mime_type="application/json",
- response_schema=list[BoundingBox], # Add BoundingBox class to the response schema
+ response_schema=list[BoundingBox],
)
image_uri = "https://storage.googleapis.com/generativeai-downloads/images/socks.jpg"
response = client.models.generate_content(
- model="gemini-2.5-flash-preview-05-20",
+ model="gemini-2.5-flash",
contents=[
Part.from_uri(
file_uri=image_uri,
@@ -109,8 +117,8 @@ def plot_bounding_boxes(image_uri: str, bounding_boxes: list[BoundingBox]) -> No
# Example response:
# [
- # {"box_2d": [36, 246, 380, 492], "label": "top left sock with face"},
- # {"box_2d": [260, 663, 640, 917], "label": "top right sock with face"},
+ # {"box_2d": [6, 246, 386, 526], "label": "top-left light blue sock with cat face"},
+ # {"box_2d": [234, 649, 650, 863], "label": "top-right light blue sock with cat face"},
# ]
# [END googlegenaisdk_boundingbox_with_txt_img]
return response.text
diff --git a/genai/bounding_box/requirements.txt b/genai/bounding_box/requirements.txt
index 9653154bf93..86da356810f 100644
--- a/genai/bounding_box/requirements.txt
+++ b/genai/bounding_box/requirements.txt
@@ -1,2 +1,2 @@
-google-genai==1.16.1
+google-genai==1.42.0
pillow==11.1.0
diff --git a/genai/code_execution/codeexecution_annotateimage_with_txt_gcsimg.py b/genai/code_execution/codeexecution_annotateimage_with_txt_gcsimg.py
new file mode 100644
index 00000000000..a81f62c8491
--- /dev/null
+++ b/genai/code_execution/codeexecution_annotateimage_with_txt_gcsimg.py
@@ -0,0 +1,150 @@
+# Copyright 2026 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def generate_content() -> bool:
+ # [START googlegenaisdk_codeexecution_annotateimage_with_txt_gcsimg]
+ import io
+ from PIL import Image
+ from google import genai
+ from google.genai import types
+
+ client = genai.Client()
+
+ response = client.models.generate_content(
+ model="gemini-3-flash-preview",
+ contents=[
+ types.Part.from_uri(
+ file_uri="https://storage.googleapis.com/cloud-samples-data/generative-ai/image/robotic.jpeg",
+ mime_type="image/png",
+ ),
+ "Annotate on the image with arrows of different colors, which object should go into which bin.",
+ ],
+ config=types.GenerateContentConfig(tools=[types.Tool(code_execution=types.ToolCodeExecution)]),
+ )
+
+ img_count = 0
+ for part in response.candidates[0].content.parts:
+ if part.text is not None:
+ print(part.text)
+ if part.executable_code is not None:
+ print("####################### 1. Generate Python Code #######################")
+ print(part.executable_code.code)
+ if part.code_execution_result is not None:
+ print("####################### 2. Executing Python Code #######################")
+ print(part.code_execution_result.output)
+ # For local executions, save the output to a local filename
+ if part.as_image() is not None:
+ print("####################### 3. Save Output #######################")
+ img_count += 1
+ output_location = f"robotic-annotate-output-{img_count}.jpg"
+ image_data = part.as_image().image_bytes
+ image = Image.open(io.BytesIO(image_data))
+ image = image.convert("RGB")
+ image.save(output_location)
+ print(f"Output is saved to {output_location}")
+ # Example response:
+ # ####################### 1. Generate Python Code #######################
+ # import PIL.Image
+ # import PIL.ImageDraw
+ #
+ # # Load the image to get dimensions
+ # img = PIL.Image.open('f_https___storage.googleapis.com_cloud_samples_data_generative_ai_image_robotic.jpeg')
+ # width, height = img.size
+ #
+ # # Define objects and bins with normalized coordinates [ymin, xmin, ymax, xmax]
+ # bins = {
+ # 'light_blue': [118, 308, 338, 436],
+ # 'green': [248, 678, 458, 831],
+ # 'black': [645, 407, 898, 578]
+ # }
+ #
+ # objects = [
+ # {'name': 'green pepper', 'box': [256, 482, 296, 546], 'target': 'green'},
+ # {'name': 'red pepper', 'box': [317, 478, 349, 544], 'target': 'green'},
+ # {'name': 'grapes', 'box': [584, 555, 664, 593], 'target': 'green'},
+ # {'name': 'cherries', 'box': [463, 671, 511, 718], 'target': 'green'},
+ # {'name': 'soda can', 'box': [397, 524, 489, 605], 'target': 'light_blue'},
+ # {'name': 'brown snack', 'box': [397, 422, 475, 503], 'target': 'black'},
+ # {'name': 'welch snack', 'box': [520, 466, 600, 543], 'target': 'black'},
+ # {'name': 'paper towel', 'box': [179, 564, 250, 607], 'target': 'black'},
+ # {'name': 'plastic cup', 'box': [271, 587, 346, 643], 'target': 'black'},
+ # ]
+ #
+ # # Helper to get center of a normalized box
+ # def get_center(box):
+ # ymin, xmin, ymax, xmax = box
+ # return ((xmin + xmax) / 2000 * width, (ymin + ymax) / 2000 * height)
+ #
+ # draw = PIL.ImageDraw.Draw(img)
+ #
+ # # Define arrow colors based on target bin
+ # colors = {
+ # 'green': 'green',
+ # 'light_blue': 'blue',
+ # 'black': 'red'
+ # }
+ #
+ # for obj in objects:
+ # start_point = get_center(obj['box'])
+ # end_point = get_center(bins[obj['target']])
+ # color = colors[obj['target']]
+ # # Drawing a line with an arrow head (simulated with a few extra lines)
+ # draw.line([start_point, end_point], fill=color, width=5)
+ # # Simple arrowhead
+ # import math
+ # angle = math.atan2(end_point[1] - start_point[1], end_point[0] - start_point[0])
+ # arrow_len = 20
+ # p1 = (end_point[0] - arrow_len * math.cos(angle - math.pi / 6),
+ # end_point[1] - arrow_len * math.sin(angle - math.pi / 6))
+ # p2 = (end_point[0] - arrow_len * math.cos(angle + math.pi / 6),
+ # end_point[1] - arrow_len * math.sin(angle + math.pi / 6))
+ # draw.line([end_point, p1], fill=color, width=5)
+ # draw.line([end_point, p2], fill=color, width=5)
+ #
+ # img.save('annotated_robotic.jpeg')
+ #
+ # # Also list detections for confirmation
+ # # [
+ # # {"box_2d": [118, 308, 338, 436], "label": "light blue bin"},
+ # # {"box_2d": [248, 678, 458, 831], "label": "green bin"},
+ # # {"box_2d": [645, 407, 898, 578], "label": "black bin"},
+ # # {"box_2d": [256, 482, 296, 546], "label": "green pepper"},
+ # # {"box_2d": [317, 478, 349, 544], "label": "red pepper"},
+ # # {"box_2d": [584, 555, 664, 593], "label": "grapes"},
+ # # {"box_2d": [463, 671, 511, 718], "label": "cherries"},
+ # # {"box_2d": [397, 524, 489, 605], "label": "soda can"},
+ # # {"box_2d": [397, 422, 475, 503], "label": "brown snack"},
+ # # {"box_2d": [520, 466, 600, 543], "label": "welch snack"},
+ # # {"box_2d": [179, 564, 250, 607], "label": "paper towel"},
+ # # {"box_2d": [271, 587, 346, 643], "label": "plastic cup"}
+ # # ]
+ #
+ # ####################### 2. Executing Python Code #######################
+ # None
+ # ####################### 3. Save Output #######################
+ # Output is saved to output-annotate-image-1.jpg
+ # The image has been annotated with arrows indicating the appropriate bin for each object based on standard waste sorting practices:
+ #
+ # - **Green Arrows (Compost):** Organic items such as the green pepper, red pepper, grapes, and cherries are directed to the **green bin**.
+ # - **Blue Arrow (Recycling):** The crushed soda can is directed to the **light blue bin**.
+ # - **Red Arrows (Trash/Landfill):** Non-recyclable or contaminated items like the snack wrappers (brown and Welch's), the white paper towel, and the small plastic cup are directed to the **black bin**.
+ #
+ # These categorizations follow common sorting rules where green is for organics, blue for recyclables, and black for general waste.
+ # [END googlegenaisdk_codeexecution_annotateimage_with_txt_gcsimg]
+ return True
+
+
+if __name__ == "__main__":
+ generate_content()
diff --git a/genai/code_execution/codeexecution_barplot_with_txt_img.py b/genai/code_execution/codeexecution_barplot_with_txt_img.py
new file mode 100644
index 00000000000..7542282e4be
--- /dev/null
+++ b/genai/code_execution/codeexecution_barplot_with_txt_img.py
@@ -0,0 +1,156 @@
+# Copyright 2026 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def generate_content() -> bool:
+ # [START googlegenaisdk_codeexecution_barplot_with_txt_img]
+ import io
+ from PIL import Image
+ from google import genai
+ from google.genai import types
+
+ # Use to the benchmark image in Cloud Storage
+ image = types.Part.from_uri(
+ file_uri="https://storage.googleapis.com/cloud-samples-data/generative-ai/image/benchmark.jpeg",
+ mime_type="image/jpeg",
+ )
+
+ client = genai.Client()
+
+ response = client.models.generate_content(
+ model="gemini-3-flash-preview",
+ contents=[
+ image,
+ "Make a bar chart of per-category performance, normalize prior SOTA as 1.0 for each task,"
+ "then take average per-category. Plot using matplotlib with nice style.",
+ ],
+ config=types.GenerateContentConfig(tools=[types.Tool(code_execution=types.ToolCodeExecution)]),
+ )
+
+ img_count = 0
+ for part in response.candidates[0].content.parts:
+ if part.text is not None:
+ print(part.text)
+ if part.executable_code is not None:
+ print("####################### 1. Generate Python Code #######################")
+ print(part.executable_code.code)
+ if part.code_execution_result is not None:
+ print("####################### 2. Executing Python Code #######################")
+ print(part.code_execution_result.output)
+ # For local executions, save the output to a local filename
+ if part.as_image() is not None:
+ print("####################### 3. Save Output #######################")
+ img_count += 1
+ output_location = f"output-barplot-{img_count}.jpg"
+ image_data = part.as_image().image_bytes
+ image = Image.open(io.BytesIO(image_data))
+ image = image.convert("RGB")
+ image.save(output_location)
+ print(f"Output is saved to {output_location}")
+ # Example response:
+ # ####################### 1. Generate Python Code #######################
+ # import matplotlib.pyplot as plt
+ # import numpy as np
+ #
+ # data = [
+ # # Category, Benchmark, G3P, G2.5P, C4.5, GPT5.1, lower_is_better
+ # ("Visual Reasoning", "MMMU Pro", 81.0, 68.0, 72.0, 76.0, False),
+ # ("Visual Reasoning", "VLMsAreBiased", 50.6, 24.3, 32.7, 21.7, False),
+ # ("Document", "CharXiv Reasoning", 81.4, 69.6, 67.2, 69.5, False),
+ # ("Document", "OmniDocBench1.5*", 0.115, 0.145, 0.120, 0.147, True),
+ # ("Spatial", "ERQA", 70.5, 56.0, 51.3, 60.0, False),
+ # ("Spatial", "Point-Bench", 85.5, 62.7, 38.5, 41.8, False),
+ # ("Spatial", "RefSpatial", 65.5, 33.6, 19.5, 28.2, False),
+ # ("Spatial", "CV-Bench", 92.0, 85.9, 83.8, 84.6, False),
+ # ("Spatial", "MindCube", 77.7, 57.5, 58.5, 61.7, False),
+ # ("Screen", "ScreenSpot Pro", 72.7, 11.4, 49.9, 3.50, False),
+ # ("Screen", "Gui-World QA", 68.0, 42.8, 44.9, 38.7, False),
+ # ("Video", "Video-MMMU", 87.6, 83.6, 84.4, 80.4, False),
+ # ("Video", "Video-MME", 88.4, 86.9, 84.1, 86.3, False),
+ # ("Video", "1H-VideoQA", 81.8, 79.4, 52.0, 61.5, False),
+ # ("Video", "Perception Test", 80.0, 78.4, 74.1, 77.8, False),
+ # ("Video", "YouCook2", 222.7, 188.3, 145.8, 132.4, False),
+ # ("Video", "Vatex", 77.4, 71.3, 60.1, 62.9, False),
+ # ("Video", "Motion Bench", 70.3, 66.3, 65.9, 61.1, False),
+ # ("Education", "Math Kangaroo", 84.4, 77.4, 68.9, 79.9, False),
+ # ("Biomedical", "MedXpertQA-MM", 77.8, 65.9, 62.2, 65.5, False),
+ # ("Biomedical", "VQA-RAD", 81.9, 71.4, 76.0, 72.2, False),
+ # ("Biomedical", "MicroVQA", 68.8, 63.5, 61.4, 61.5, False),
+ # ]
+ #
+ # normalized_scores = []
+ # for cat, bench, g3p, g25p, c45, gpt, lib in data:
+ # others = [g25p, c45, gpt]
+ # if lib:
+ # sota = min(others)
+ # norm_score = sota / g3p
+ # else:
+ # sota = max(others)
+ # norm_score = g3p / sota
+ # normalized_scores.append((cat, norm_score))
+ #
+ # categories = {}
+ # for cat, score in normalized_scores:
+ # if cat not in categories:
+ # categories[cat] = []
+ # categories[cat].append(score)
+ #
+ # avg_per_category = {cat: np.mean(scores) for cat, scores in categories.items()}
+ #
+ # # Plotting
+ # cats = list(avg_per_category.keys())
+ # values = [avg_per_category[c] for c in cats]
+ #
+ # # Sort categories for better visualization if needed, or keep order from data
+ # plt.figure(figsize=(10, 6))
+ # plt.style.use('ggplot')
+ # bars = plt.bar(cats, values, color='skyblue', edgecolor='navy')
+ #
+ # plt.axhline(y=1.0, color='red', linestyle='--', label='Prior SOTA (1.0)')
+ # plt.ylabel('Normalized Performance (SOTA = 1.0)')
+ # plt.title('Gemini 3 Pro Performance relative to Prior SOTA (Normalized)', fontsize=14)
+ # plt.xticks(rotation=45, ha='right')
+ # plt.ylim(0, max(values) * 1.2)
+ #
+ # for bar in bars:
+ # yval = bar.get_height()
+ # plt.text(bar.get_x() + bar.get_width()/2, yval + 0.02, f'{yval:.2f}x', ha='center', va='bottom')
+ #
+ # plt.legend()
+ # plt.tight_layout()
+ # plt.savefig('performance_chart.png')
+ # plt.show()
+ #
+ # print(avg_per_category)
+ #
+ # ####################### 2. Executing Python Code #######################
+ # {'Visual Reasoning': np.float64(1.3065950426525028), 'Document': np.float64(1.1065092453773113), 'Spatial': np.float64(1.3636746436001959), 'Screen': np.float64(1.4856952211773211), 'Video': np.float64(1.0620548283943443), 'Education': np.float64(1.0563204005006257), 'Biomedical': np.float64(1.1138909257119955)}
+ #
+ # ####################### 3. Save Output #######################
+ # Output is saved to output-barplot-1.jpg
+ # ####################### 3. Save Output #######################
+ # Output is saved to output-barplot-2.jpg
+ # Based on the data provided in the table, I have calculated the per-category performance of Gemini 3 Pro normalized against the prior state-of-the-art (SOTA), which is defined as the best performance among Gemini 2.5 Pro, Claude Opus 4.5, and GPT-5.1 for each benchmark.
+ #
+ # For benchmarks where lower values are better (indicated by an asterisk, e.g., OmniDocBench1.5*), the normalization was calculated as $\text{Prior SOTA} / \text{Gemini 3 Pro Score}$. For all other benchmarks, it was calculated as $\text{Gemini 3 Pro Score} / \text{Prior SOTA}$. The values were then averaged within each category.
+ #
+ # The resulting bar chart below shows that Gemini 3 Pro outperforms the prior SOTA across all categories, with the most significant gains in **Screen** (1.49x), **Spatial** (1.36x), and **Visual Reasoning** (1.31x) benchmarks.
+ #
+ # 
+ # [END googlegenaisdk_codeexecution_barplot_with_txt_img]
+ return True
+
+
+if __name__ == "__main__":
+ generate_content()
diff --git a/genai/code_execution/codeexecution_cropimage_with_txt_img.py b/genai/code_execution/codeexecution_cropimage_with_txt_img.py
new file mode 100644
index 00000000000..9acfae1f93f
--- /dev/null
+++ b/genai/code_execution/codeexecution_cropimage_with_txt_img.py
@@ -0,0 +1,95 @@
+# Copyright 2026 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def generate_content() -> bool:
+ # [START googlegenaisdk_codeexecution_cropimage_with_txt_img]
+ import io
+ import requests
+ from PIL import Image
+ from google import genai
+ from google.genai import types
+
+ # Download the input image
+ image_path = "https://storage.googleapis.com/cloud-samples-data/generative-ai/image/chips.jpeg"
+ image_bytes = requests.get(image_path).content
+ image = types.Part.from_bytes(data=image_bytes, mime_type="image/jpeg")
+
+ client = genai.Client()
+
+ response = client.models.generate_content(
+ model="gemini-3-flash-preview",
+ contents=[
+ image,
+ "Locate the ESMT chip. What are the numbers on the chip?",
+ ],
+ config=types.GenerateContentConfig(tools=[types.Tool(code_execution=types.ToolCodeExecution)]),
+ )
+
+ for part in response.candidates[0].content.parts:
+ if part.text is not None:
+ print(part.text)
+ if part.executable_code is not None:
+ print("####################### 1. Generate Python Code #######################")
+ print(part.executable_code.code)
+ if part.code_execution_result is not None:
+ print("####################### 2. Executing Python Code #######################")
+ print(part.code_execution_result.output)
+ # For local executions, save the output to a local filename
+ if part.as_image() is not None:
+ print("####################### 3. Save Output #######################")
+ image_data = part.as_image().image_bytes
+ image = Image.open(io.BytesIO(image_data))
+ output_location = "ESMT-chip-output.jpg"
+ image.save(output_location)
+ print(f"Output is saved to {output_location}")
+ # Example response:
+ # ####################### 1. Generate Python Code #######################
+ # import PIL.Image
+ # import PIL.ImageDraw
+ #
+ # # Load the image to get dimensions
+ # img = PIL.Image.open('input_file_0.jpeg')
+ # width, height = img.size
+ #
+ # # Define the region for expression pedals
+ # # They are roughly in the center
+ # # Normalized coordinates roughly: [ymin, xmin, ymax, xmax]
+ # expression_pedals_box = [460, 465, 615, 615]
+ #
+ # # Convert normalized to pixel coordinates
+ # def norm_to_pixel(norm_box, w, h):
+ # ymin, xmin, ymax, xmax = norm_box
+ # return [int(ymin * h / 1000), int(xmin * w / 1000), int(ymax * h / 1000), int(xmax * w / 1000)]
+ #
+ # pedals_pixel_box = norm_to_pixel(expression_pedals_box, width, height)
+ #
+ # # Crop and save
+ # pedals_crop = img.crop((pedals_pixel_box[1], pedals_pixel_box[0], pedals_pixel_box[3], pedals_pixel_box[2]))
+ # pedals_crop.save('expression_pedals_zoom.png')
+ #
+ # # Output objects for verification (optional but helpful for internal tracking)
+ # # [{box_2d: [460, 465, 615, 615], label: "expression pedals"}]
+ #
+ # ####################### 2. Executing Python Code #######################
+ # None
+ # ####################### 3. Save Output #######################
+ # Output is saved to instrument-img-output.jpg
+ # Based on the zoomed-in image, there are 4 expression pedals located in the center of the organ console, above the pedalboard.
+ # [END googlegenaisdk_codeexecution_cropimage_with_txt_img]
+ return True
+
+
+if __name__ == "__main__":
+ generate_content()
diff --git a/genai/code_execution/noxfile_config.py b/genai/code_execution/noxfile_config.py
new file mode 100644
index 00000000000..29d9e7911eb
--- /dev/null
+++ b/genai/code_execution/noxfile_config.py
@@ -0,0 +1,42 @@
+# Copyright 2026 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Default TEST_CONFIG_OVERRIDE for python repos.
+
+# You can copy this file into your directory, then it will be imported from
+# the noxfile.py.
+
+# The source of truth:
+# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/noxfile_config.py
+
+TEST_CONFIG_OVERRIDE = {
+ # You can opt out from the test for specific Python versions.
+ "ignored_versions": ["2.7", "3.7", "3.8", "3.9", "3.10", "3.11", "3.13", "3.14"],
+ # Old samples are opted out of enforcing Python type hints
+ # All new samples should feature them
+ "enforce_type_hints": True,
+ # An envvar key for determining the project id to use. Change it
+ # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
+ # build specific Cloud project. You can also use your own string
+ # to use your own Cloud project.
+ "gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
+ # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
+ # If you need to use a specific version of pip,
+ # change pip_version_override to the string representation
+ # of the version number, for example, "20.2.4"
+ "pip_version_override": None,
+ # A dictionary you want to inject into your test. Don't put any
+ # secrets here. These values will override predefined values.
+ "envs": {},
+}
diff --git a/genai/code_execution/requirements-test.txt b/genai/code_execution/requirements-test.txt
new file mode 100644
index 00000000000..8d10ef87035
--- /dev/null
+++ b/genai/code_execution/requirements-test.txt
@@ -0,0 +1,4 @@
+backoff==2.2.1
+google-api-core==2.29.0
+pytest==9.0.2
+pytest-asyncio==1.3.0
diff --git a/genai/code_execution/requirements.txt b/genai/code_execution/requirements.txt
new file mode 100644
index 00000000000..7365e0b937d
--- /dev/null
+++ b/genai/code_execution/requirements.txt
@@ -0,0 +1,2 @@
+google-genai==1.60.0
+pillow==11.1.0
diff --git a/genai/code_execution/test_codeexecution.py b/genai/code_execution/test_codeexecution.py
new file mode 100644
index 00000000000..e3a8bfb7944
--- /dev/null
+++ b/genai/code_execution/test_codeexecution.py
@@ -0,0 +1,35 @@
+# Copyright 2026 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import os
+
+import codeexecution_annotateimage_with_txt_gcsimg
+import codeexecution_barplot_with_txt_img
+import codeexecution_cropimage_with_txt_img
+
+os.environ["GOOGLE_GENAI_USE_VERTEXAI"] = "True"
+os.environ["GOOGLE_CLOUD_LOCATION"] = "global" # "us-central1"
+# The project name is included in the CICD pipeline
+# os.environ['GOOGLE_CLOUD_PROJECT'] = "add-your-project-name"
+
+
+def test_codeexecution_annotateimage_with_txt_gcsimg() -> None:
+ assert codeexecution_annotateimage_with_txt_gcsimg.generate_content()
+
+
+def test_codeexecution_barplot_with_txt_img() -> None:
+ assert codeexecution_barplot_with_txt_img.generate_content()
+
+
+def test_codeexecution_cropimage_with_txt_img() -> None:
+ assert codeexecution_cropimage_with_txt_img.generate_content()
diff --git a/genai/content_cache/contentcache_create_with_txt_gcs_pdf.py b/genai/content_cache/contentcache_create_with_txt_gcs_pdf.py
index 5916d523dae..2ed5ee6b713 100644
--- a/genai/content_cache/contentcache_create_with_txt_gcs_pdf.py
+++ b/genai/content_cache/contentcache_create_with_txt_gcs_pdf.py
@@ -42,10 +42,12 @@ def create_content_cache() -> str:
]
content_cache = client.caches.create(
- model="gemini-2.5-flash-preview-05-20",
+ model="gemini-2.5-flash",
config=CreateCachedContentConfig(
contents=contents,
system_instruction=system_instruction,
+ # (Optional) For enhanced security, the content cache can be encrypted using a Cloud KMS key
+ # kms_key_name = "projects/.../locations/.../keyRings/.../cryptoKeys/..."
display_name="example-cache",
ttl="86400s",
),
@@ -54,7 +56,7 @@ def create_content_cache() -> str:
print(content_cache.name)
print(content_cache.usage_metadata)
# Example response:
- # projects/111111111111/locations/us-central1/cachedContents/1111111111111111111
+ # projects/111111111111/locations/.../cachedContents/1111111111111111111
# CachedContentUsageMetadata(audio_duration_seconds=None, image_count=167,
# text_count=153, total_token_count=43130, video_duration_seconds=None)
# [END googlegenaisdk_contentcache_create_with_txt_gcs_pdf]
diff --git a/genai/content_cache/contentcache_delete.py b/genai/content_cache/contentcache_delete.py
index 3761b84ea6a..9afe8962a5a 100644
--- a/genai/content_cache/contentcache_delete.py
+++ b/genai/content_cache/contentcache_delete.py
@@ -19,11 +19,11 @@ def delete_context_caches(cache_name: str) -> str:
client = genai.Client()
# Delete content cache using name
- # E.g cache_name = 'projects/111111111111/locations/us-central1/cachedContents/1111111111111111111'
+ # E.g cache_name = 'projects/111111111111/locations/.../cachedContents/1111111111111111111'
client.caches.delete(name=cache_name)
print("Deleted Cache", cache_name)
# Example response
- # Deleted Cache projects/111111111111/locations/us-central1/cachedContents/1111111111111111111
+ # Deleted Cache projects/111111111111/locations/.../cachedContents/1111111111111111111
# [END googlegenaisdk_contentcache_delete]
return cache_name
diff --git a/genai/content_cache/contentcache_list.py b/genai/content_cache/contentcache_list.py
index f477da31b29..9f0f2a6b510 100644
--- a/genai/content_cache/contentcache_list.py
+++ b/genai/content_cache/contentcache_list.py
@@ -29,8 +29,8 @@ def list_context_caches() -> str:
print(f"Expires at: {content_cache.expire_time}")
# Example response:
- # * Cache `projects/111111111111/locations/us-central1/cachedContents/1111111111111111111` for
- # model `projects/111111111111/locations/us-central1/publishers/google/models/gemini-XXX-pro-XXX`
+ # * Cache `projects/111111111111/locations/.../cachedContents/1111111111111111111` for
+ # model `projects/111111111111/locations/.../publishers/google/models/gemini-XXX-pro-XXX`
# * Last updated at: 2025-02-13 14:46:42.620490+00:00
# * CachedContentUsageMetadata(audio_duration_seconds=None, image_count=167, text_count=153, total_token_count=43130, video_duration_seconds=None)
# ...
diff --git a/genai/content_cache/contentcache_update.py b/genai/content_cache/contentcache_update.py
index 1f1136359be..27f96743385 100644
--- a/genai/content_cache/contentcache_update.py
+++ b/genai/content_cache/contentcache_update.py
@@ -25,7 +25,7 @@ def update_content_cache(cache_name: str) -> str:
client = genai.Client(http_options=HttpOptions(api_version="v1"))
# Get content cache by name
- # cache_name = "projects/111111111111/locations/us-central1/cachedContents/1111111111111111111"
+ # cache_name = "projects/.../locations/.../cachedContents/1111111111111111111"
content_cache = client.caches.get(name=cache_name)
print("Expire time", content_cache.expire_time)
# Example response
diff --git a/genai/content_cache/contentcache_use_with_txt.py b/genai/content_cache/contentcache_use_with_txt.py
index d480e1da160..7e85e52cd72 100644
--- a/genai/content_cache/contentcache_use_with_txt.py
+++ b/genai/content_cache/contentcache_use_with_txt.py
@@ -20,9 +20,9 @@ def generate_content(cache_name: str) -> str:
client = genai.Client(http_options=HttpOptions(api_version="v1"))
# Use content cache to generate text response
- # E.g cache_name = 'projects/111111111111/locations/us-central1/cachedContents/1111111111111111111'
+ # E.g cache_name = 'projects/.../locations/.../cachedContents/1111111111111111111'
response = client.models.generate_content(
- model="gemini-2.5-flash-preview-05-20",
+ model="gemini-2.5-flash",
contents="Summarize the pdfs",
config=GenerateContentConfig(
cached_content=cache_name,
diff --git a/genai/content_cache/requirements.txt b/genai/content_cache/requirements.txt
index 7890f90e26a..1efe7b29dbc 100644
--- a/genai/content_cache/requirements.txt
+++ b/genai/content_cache/requirements.txt
@@ -1 +1 @@
-google-genai==1.16.1
+google-genai==1.42.0
diff --git a/genai/controlled_generation/ctrlgen_with_class_schema.py b/genai/controlled_generation/ctrlgen_with_class_schema.py
index c2c985bb916..8613c206a59 100644
--- a/genai/controlled_generation/ctrlgen_with_class_schema.py
+++ b/genai/controlled_generation/ctrlgen_with_class_schema.py
@@ -26,7 +26,7 @@ class Recipe(BaseModel):
client = genai.Client(http_options=HttpOptions(api_version="v1"))
response = client.models.generate_content(
- model="gemini-2.5-flash-preview-05-20",
+ model="gemini-2.5-flash",
contents="List a few popular cookie recipes.",
config=GenerateContentConfig(
response_mime_type="application/json",
diff --git a/genai/controlled_generation/ctrlgen_with_enum_class_schema.py b/genai/controlled_generation/ctrlgen_with_enum_class_schema.py
index 03064ef6e9e..0eeb869c200 100644
--- a/genai/controlled_generation/ctrlgen_with_enum_class_schema.py
+++ b/genai/controlled_generation/ctrlgen_with_enum_class_schema.py
@@ -29,7 +29,7 @@ class InstrumentClass(enum.Enum):
client = genai.Client(http_options=HttpOptions(api_version="v1"))
response = client.models.generate_content(
- model="gemini-2.5-flash-preview-05-20",
+ model="gemini-2.5-flash",
contents="What type of instrument is a guitar?",
config={
"response_mime_type": "text/x.enum",
diff --git a/genai/controlled_generation/ctrlgen_with_enum_schema.py b/genai/controlled_generation/ctrlgen_with_enum_schema.py
index def5ac1b782..3cfd358ac25 100644
--- a/genai/controlled_generation/ctrlgen_with_enum_schema.py
+++ b/genai/controlled_generation/ctrlgen_with_enum_schema.py
@@ -20,7 +20,7 @@ def generate_content() -> str:
client = genai.Client(http_options=HttpOptions(api_version="v1"))
response = client.models.generate_content(
- model="gemini-2.5-flash-preview-05-20",
+ model="gemini-2.5-flash",
contents="What type of instrument is an oboe?",
config=GenerateContentConfig(
response_mime_type="text/x.enum",
diff --git a/genai/controlled_generation/ctrlgen_with_nested_class_schema.py b/genai/controlled_generation/ctrlgen_with_nested_class_schema.py
index eec13934004..633c79bb128 100644
--- a/genai/controlled_generation/ctrlgen_with_nested_class_schema.py
+++ b/genai/controlled_generation/ctrlgen_with_nested_class_schema.py
@@ -36,7 +36,7 @@ class Recipe(BaseModel):
client = genai.Client(http_options=HttpOptions(api_version="v1"))
response = client.models.generate_content(
- model="gemini-2.5-flash-preview-05-20",
+ model="gemini-2.5-flash",
contents="List about 10 home-baked cookies and give them grades based on tastiness.",
config=GenerateContentConfig(
response_mime_type="application/json",
diff --git a/genai/controlled_generation/ctrlgen_with_nullable_schema.py b/genai/controlled_generation/ctrlgen_with_nullable_schema.py
index 518ef31958e..8aba542425e 100644
--- a/genai/controlled_generation/ctrlgen_with_nullable_schema.py
+++ b/genai/controlled_generation/ctrlgen_with_nullable_schema.py
@@ -51,7 +51,7 @@ def generate_content() -> str:
client = genai.Client(http_options=HttpOptions(api_version="v1"))
response = client.models.generate_content(
- model="gemini-2.5-flash-preview-05-20",
+ model="gemini-2.5-flash",
contents=prompt,
config=GenerateContentConfig(
response_mime_type="application/json",
diff --git a/genai/controlled_generation/ctrlgen_with_resp_schema.py b/genai/controlled_generation/ctrlgen_with_resp_schema.py
index 367837aa2c7..2e17c516d0f 100644
--- a/genai/controlled_generation/ctrlgen_with_resp_schema.py
+++ b/genai/controlled_generation/ctrlgen_with_resp_schema.py
@@ -36,7 +36,7 @@ def generate_content() -> str:
client = genai.Client(http_options=HttpOptions(api_version="v1"))
response = client.models.generate_content(
- model="gemini-2.5-flash-preview-05-20",
+ model="gemini-2.5-flash",
contents=prompt,
config={
"response_mime_type": "application/json",
diff --git a/genai/controlled_generation/requirements.txt b/genai/controlled_generation/requirements.txt
index 7890f90e26a..1efe7b29dbc 100644
--- a/genai/controlled_generation/requirements.txt
+++ b/genai/controlled_generation/requirements.txt
@@ -1 +1 @@
-google-genai==1.16.1
+google-genai==1.42.0
diff --git a/genai/count_tokens/counttoken_compute_with_txt.py b/genai/count_tokens/counttoken_compute_with_txt.py
index ccc5f346a6c..0b3af0a6bb2 100644
--- a/genai/count_tokens/counttoken_compute_with_txt.py
+++ b/genai/count_tokens/counttoken_compute_with_txt.py
@@ -20,7 +20,7 @@ def compute_tokens_example() -> int:
client = genai.Client(http_options=HttpOptions(api_version="v1"))
response = client.models.compute_tokens(
- model="gemini-2.5-flash-preview-05-20",
+ model="gemini-2.5-flash",
contents="What's the longest word in the English language?",
)
diff --git a/genai/count_tokens/counttoken_localtokenizer_compute_with_txt.py b/genai/count_tokens/counttoken_localtokenizer_compute_with_txt.py
new file mode 100644
index 00000000000..889044e63af
--- /dev/null
+++ b/genai/count_tokens/counttoken_localtokenizer_compute_with_txt.py
@@ -0,0 +1,36 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def counttoken_localtokenizer_compute_with_txt() -> int:
+ # [START googlegenaisdk_counttoken_localtokenizer_compute_with_txt]
+ from google.genai.local_tokenizer import LocalTokenizer
+
+ tokenizer = LocalTokenizer(model_name="gemini-2.5-flash")
+ response = tokenizer.compute_tokens("What's the longest word in the English language?")
+ print(response)
+ # Example output:
+ # tokens_info=[TokensInfo(
+ # role='user',
+ # token_ids=[3689, 236789, 236751, 506,
+ # 27801, 3658, 528, 506, 5422, 5192, 236881],
+ # tokens=[b'What', b"'", b's', b' the', b' longest',
+ # b' word', b' in', b' the', b' English', b' language', b'?']
+ # )]
+ # [END googlegenaisdk_counttoken_localtokenizer_compute_with_txt]
+ return response.tokens_info
+
+
+if __name__ == "__main__":
+ counttoken_localtokenizer_compute_with_txt()
diff --git a/genai/count_tokens/counttoken_localtokenizer_with_txt.py b/genai/count_tokens/counttoken_localtokenizer_with_txt.py
new file mode 100644
index 00000000000..e784d393c9b
--- /dev/null
+++ b/genai/count_tokens/counttoken_localtokenizer_with_txt.py
@@ -0,0 +1,30 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def counttoken_localtokenizer_with_txt() -> int:
+ # [START googlegenaisdk_counttoken_localtokenizer_with_txt]
+ from google.genai.local_tokenizer import LocalTokenizer
+
+ tokenizer = LocalTokenizer(model_name="gemini-2.5-flash")
+ response = tokenizer.count_tokens("What's the highest mountain in Africa?")
+ print(response)
+ # Example output:
+ # total_tokens=10
+ # [END googlegenaisdk_counttoken_localtokenizer_with_txt]
+ return response.total_tokens
+
+
+if __name__ == "__main__":
+ counttoken_localtokenizer_with_txt()
diff --git a/genai/count_tokens/counttoken_resp_with_txt.py b/genai/count_tokens/counttoken_resp_with_txt.py
index c484b7da633..f2db5309e01 100644
--- a/genai/count_tokens/counttoken_resp_with_txt.py
+++ b/genai/count_tokens/counttoken_resp_with_txt.py
@@ -24,7 +24,7 @@ def count_tokens_example() -> int:
# Send text to Gemini
response = client.models.generate_content(
- model="gemini-2.5-flash-preview-05-20", contents=prompt
+ model="gemini-2.5-flash", contents=prompt
)
# Prompt and response tokens count
diff --git a/genai/count_tokens/counttoken_with_txt.py b/genai/count_tokens/counttoken_with_txt.py
index 96ce7b2f05b..fcbf9484087 100644
--- a/genai/count_tokens/counttoken_with_txt.py
+++ b/genai/count_tokens/counttoken_with_txt.py
@@ -20,12 +20,12 @@ def count_tokens() -> int:
client = genai.Client(http_options=HttpOptions(api_version="v1"))
response = client.models.count_tokens(
- model="gemini-2.5-flash-preview-05-20",
+ model="gemini-2.5-flash",
contents="What's the highest mountain in Africa?",
)
print(response)
# Example output:
- # total_tokens=10
+ # total_tokens=9
# cached_content_token_count=None
# [END googlegenaisdk_counttoken_with_txt]
return response.total_tokens
diff --git a/genai/count_tokens/counttoken_with_txt_vid.py b/genai/count_tokens/counttoken_with_txt_vid.py
index 7ee3f4367ec..e32f14f0845 100644
--- a/genai/count_tokens/counttoken_with_txt_vid.py
+++ b/genai/count_tokens/counttoken_with_txt_vid.py
@@ -29,7 +29,7 @@ def count_tokens() -> int:
]
response = client.models.count_tokens(
- model="gemini-2.5-flash-preview-05-20",
+ model="gemini-2.5-flash",
contents=contents,
)
print(response)
diff --git a/genai/count_tokens/requirements.txt b/genai/count_tokens/requirements.txt
index 7890f90e26a..726dd09178a 100644
--- a/genai/count_tokens/requirements.txt
+++ b/genai/count_tokens/requirements.txt
@@ -1 +1,2 @@
-google-genai==1.16.1
+google-genai==1.42.0
+sentencepiece==0.2.1
diff --git a/genai/count_tokens/test_count_tokens_examples.py b/genai/count_tokens/test_count_tokens_examples.py
index b654ff872d8..e83f20cd14c 100644
--- a/genai/count_tokens/test_count_tokens_examples.py
+++ b/genai/count_tokens/test_count_tokens_examples.py
@@ -19,6 +19,8 @@
import os
import counttoken_compute_with_txt
+import counttoken_localtokenizer_compute_with_txt
+import counttoken_localtokenizer_with_txt
import counttoken_resp_with_txt
import counttoken_with_txt
import counttoken_with_txt_vid
@@ -43,3 +45,11 @@ def test_counttoken_with_txt() -> None:
def test_counttoken_with_txt_vid() -> None:
assert counttoken_with_txt_vid.count_tokens()
+
+
+def test_counttoken_localtokenizer_with_txt() -> None:
+ assert counttoken_localtokenizer_with_txt.counttoken_localtokenizer_with_txt()
+
+
+def test_counttoken_localtokenizer_compute_with_txt() -> None:
+ assert counttoken_localtokenizer_compute_with_txt.counttoken_localtokenizer_compute_with_txt()
diff --git a/genai/embeddings/embeddings_docretrieval_with_txt.py b/genai/embeddings/embeddings_docretrieval_with_txt.py
index 06c9e84e982..e9352279859 100644
--- a/genai/embeddings/embeddings_docretrieval_with_txt.py
+++ b/genai/embeddings/embeddings_docretrieval_with_txt.py
@@ -21,7 +21,11 @@ def embed_content() -> str:
client = genai.Client()
response = client.models.embed_content(
model="gemini-embedding-001",
- contents="How do I get a driver's license/learner's permit?",
+ contents=[
+ "How do I get a driver's license/learner's permit?",
+ "How long is my driver's license valid for?",
+ "Driver's knowledge test study guide",
+ ],
config=EmbedContentConfig(
task_type="RETRIEVAL_DOCUMENT", # Optional
output_dimensionality=3072, # Optional
diff --git a/genai/embeddings/requirements.txt b/genai/embeddings/requirements.txt
index 7890f90e26a..1efe7b29dbc 100644
--- a/genai/embeddings/requirements.txt
+++ b/genai/embeddings/requirements.txt
@@ -1 +1 @@
-google-genai==1.16.1
+google-genai==1.42.0
diff --git a/genai/express_mode/api_key_example.py b/genai/express_mode/api_key_example.py
index f667eb5846e..21f8ab0e81d 100644
--- a/genai/express_mode/api_key_example.py
+++ b/genai/express_mode/api_key_example.py
@@ -23,7 +23,7 @@ def generate_content() -> str:
client = genai.Client(vertexai=True, api_key=API_KEY)
response = client.models.generate_content(
- model="gemini-2.5-flash-preview-05-20",
+ model="gemini-2.5-flash",
contents="Explain bubble sort to me.",
)
diff --git a/genai/express_mode/requirements.txt b/genai/express_mode/requirements.txt
index 7890f90e26a..1efe7b29dbc 100644
--- a/genai/express_mode/requirements.txt
+++ b/genai/express_mode/requirements.txt
@@ -1 +1 @@
-google-genai==1.16.1
+google-genai==1.42.0
diff --git a/genai/express_mode/test_express_mode_examples.py b/genai/express_mode/test_express_mode_examples.py
index b7f14a4a9e4..7b2ff26511a 100644
--- a/genai/express_mode/test_express_mode_examples.py
+++ b/genai/express_mode/test_express_mode_examples.py
@@ -40,7 +40,7 @@ def test_api_key_example(mock_genai_client: MagicMock) -> None:
mock_genai_client.assert_called_once_with(vertexai=True, api_key="YOUR_API_KEY")
mock_genai_client.return_value.models.generate_content.assert_called_once_with(
- model="gemini-2.5-flash-preview-05-20",
+ model="gemini-2.5-flash",
contents="Explain bubble sort to me.",
)
assert response == "This is a mocked bubble sort explanation."
diff --git a/genai/image_generation/imggen_canny_ctrl_type_with_txt_img.py b/genai/image_generation/imggen_canny_ctrl_type_with_txt_img.py
index 2c093ade953..2c01a1e661e 100644
--- a/genai/image_generation/imggen_canny_ctrl_type_with_txt_img.py
+++ b/genai/image_generation/imggen_canny_ctrl_type_with_txt_img.py
@@ -16,7 +16,12 @@
def canny_edge_customization(output_gcs_uri: str) -> str:
# [START googlegenaisdk_imggen_canny_ctrl_type_with_txt_img]
from google import genai
- from google.genai.types import ControlReferenceConfig, ControlReferenceImage, EditImageConfig, Image
+ from google.genai.types import (
+ ControlReferenceConfig,
+ ControlReferenceImage,
+ EditImageConfig,
+ Image,
+ )
client = genai.Client()
@@ -38,7 +43,6 @@ def canny_edge_customization(output_gcs_uri: str) -> str:
config=EditImageConfig(
edit_mode="EDIT_MODE_CONTROLLED_EDITING",
number_of_images=1,
- seed=1,
safety_filter_level="BLOCK_MEDIUM_AND_ABOVE",
person_generation="ALLOW_ADULT",
output_gcs_uri=output_gcs_uri,
diff --git a/genai/image_generation/imggen_inpainting_insert_mask_with_txt_img.py b/genai/image_generation/imggen_inpainting_insert_mask_with_txt_img.py
new file mode 100644
index 00000000000..69cdbed2eef
--- /dev/null
+++ b/genai/image_generation/imggen_inpainting_insert_mask_with_txt_img.py
@@ -0,0 +1,66 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from google.genai.types import Image
+
+
+def edit_inpainting_insert_mask(output_file: str) -> Image:
+ # [START googlegenaisdk_imggen_inpainting_insert_mask_with_txt_img]
+ from google import genai
+ from google.genai.types import (
+ RawReferenceImage,
+ MaskReferenceImage,
+ MaskReferenceConfig,
+ EditImageConfig,
+ )
+
+ client = genai.Client()
+
+ # TODO(developer): Update and un-comment below line
+ # output_file = "output-image.png"
+
+ raw_ref = RawReferenceImage(
+ reference_image=Image.from_file(location="test_resources/fruit.png"),
+ reference_id=0,
+ )
+ mask_ref = MaskReferenceImage(
+ reference_id=1,
+ reference_image=Image.from_file(location="test_resources/fruit_mask.png"),
+ config=MaskReferenceConfig(
+ mask_mode="MASK_MODE_USER_PROVIDED",
+ mask_dilation=0.01,
+ ),
+ )
+
+ image = client.models.edit_image(
+ model="imagen-3.0-capability-001",
+ prompt="A plate of cookies",
+ reference_images=[raw_ref, mask_ref],
+ config=EditImageConfig(
+ edit_mode="EDIT_MODE_INPAINT_INSERTION",
+ ),
+ )
+
+ image.generated_images[0].image.save(output_file)
+
+ print(f"Created output image using {len(image.generated_images[0].image.image_bytes)} bytes")
+ # Example response:
+ # Created output image using 1234567 bytes
+
+ # [END googlegenaisdk_imggen_inpainting_insert_mask_with_txt_img]
+ return image.generated_images[0].image
+
+
+if __name__ == "__main__":
+ edit_inpainting_insert_mask(output_file="output_folder/fruit_edit.png")
diff --git a/genai/image_generation/imggen_inpainting_insert_with_txt_img.py b/genai/image_generation/imggen_inpainting_insert_with_txt_img.py
index b898a0b7d5c..484864cab12 100644
--- a/genai/image_generation/imggen_inpainting_insert_with_txt_img.py
+++ b/genai/image_generation/imggen_inpainting_insert_with_txt_img.py
@@ -18,7 +18,12 @@
def edit_inpainting_insert(output_file: str) -> Image:
# [START googlegenaisdk_imggen_inpainting_insert_with_txt_img]
from google import genai
- from google.genai.types import RawReferenceImage, MaskReferenceImage, MaskReferenceConfig, EditImageConfig
+ from google.genai.types import (
+ RawReferenceImage,
+ MaskReferenceImage,
+ MaskReferenceConfig,
+ EditImageConfig,
+ )
client = genai.Client()
@@ -26,7 +31,9 @@ def edit_inpainting_insert(output_file: str) -> Image:
# output_file = "output-image.png"
raw_ref = RawReferenceImage(
- reference_image=Image.from_file(location='test_resources/fruit.png'), reference_id=0)
+ reference_image=Image.from_file(location="test_resources/fruit.png"),
+ reference_id=0,
+ )
mask_ref = MaskReferenceImage(
reference_id=1,
reference_image=None,
@@ -56,4 +63,4 @@ def edit_inpainting_insert(output_file: str) -> Image:
if __name__ == "__main__":
- edit_inpainting_insert(output_file="test_resources/fruit_edit.png")
+ edit_inpainting_insert(output_file="output_folder/fruit_edit.png")
diff --git a/genai/image_generation/imggen_inpainting_removal_mask_with_txt_img.py b/genai/image_generation/imggen_inpainting_removal_mask_with_txt_img.py
new file mode 100644
index 00000000000..144155664d4
--- /dev/null
+++ b/genai/image_generation/imggen_inpainting_removal_mask_with_txt_img.py
@@ -0,0 +1,66 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from google.genai.types import Image
+
+
+def edit_inpainting_removal_mask(output_file: str) -> Image:
+ # [START googlegenaisdk_imggen_inpainting_removal_mask_with_txt_img]
+ from google import genai
+ from google.genai.types import (
+ RawReferenceImage,
+ MaskReferenceImage,
+ MaskReferenceConfig,
+ EditImageConfig,
+ )
+
+ client = genai.Client()
+
+ # TODO(developer): Update and un-comment below line
+ # output_file = "output-image.png"
+
+ raw_ref = RawReferenceImage(
+ reference_image=Image.from_file(location="test_resources/fruit.png"),
+ reference_id=0,
+ )
+ mask_ref = MaskReferenceImage(
+ reference_id=1,
+ reference_image=Image.from_file(location="test_resources/fruit_mask.png"),
+ config=MaskReferenceConfig(
+ mask_mode="MASK_MODE_USER_PROVIDED",
+ mask_dilation=0.01,
+ ),
+ )
+
+ image = client.models.edit_image(
+ model="imagen-3.0-capability-001",
+ prompt="",
+ reference_images=[raw_ref, mask_ref],
+ config=EditImageConfig(
+ edit_mode="EDIT_MODE_INPAINT_REMOVAL",
+ ),
+ )
+
+ image.generated_images[0].image.save(output_file)
+
+ print(f"Created output image using {len(image.generated_images[0].image.image_bytes)} bytes")
+ # Example response:
+ # Created output image using 1234567 bytes
+
+ # [END googlegenaisdk_imggen_inpainting_removal_mask_with_txt_img]
+ return image.generated_images[0].image
+
+
+if __name__ == "__main__":
+ edit_inpainting_removal_mask(output_file="output_folder/fruit_edit.png")
diff --git a/genai/image_generation/imggen_inpainting_removal_with_txt_img.py b/genai/image_generation/imggen_inpainting_removal_with_txt_img.py
index 16cb15494da..4784bccb299 100644
--- a/genai/image_generation/imggen_inpainting_removal_with_txt_img.py
+++ b/genai/image_generation/imggen_inpainting_removal_with_txt_img.py
@@ -18,7 +18,12 @@
def edit_inpainting_removal(output_file: str) -> Image:
# [START googlegenaisdk_imggen_inpainting_removal_with_txt_img]
from google import genai
- from google.genai.types import RawReferenceImage, MaskReferenceImage, MaskReferenceConfig, EditImageConfig
+ from google.genai.types import (
+ RawReferenceImage,
+ MaskReferenceImage,
+ MaskReferenceConfig,
+ EditImageConfig,
+ )
client = genai.Client()
@@ -26,7 +31,9 @@ def edit_inpainting_removal(output_file: str) -> Image:
# output_file = "output-image.png"
raw_ref = RawReferenceImage(
- reference_image=Image.from_file(location='test_resources/fruit.png'), reference_id=0)
+ reference_image=Image.from_file(location="test_resources/fruit.png"),
+ reference_id=0,
+ )
mask_ref = MaskReferenceImage(
reference_id=1,
reference_image=None,
@@ -55,4 +62,4 @@ def edit_inpainting_removal(output_file: str) -> Image:
if __name__ == "__main__":
- edit_inpainting_removal(output_file="test_resources/fruit_edit.png")
+ edit_inpainting_removal(output_file="output_folder/fruit_edit.png")
diff --git a/genai/image_generation/imggen_mask_free_edit_with_txt_img.py b/genai/image_generation/imggen_mask_free_edit_with_txt_img.py
index 0637c59ccf3..ed7691a834e 100644
--- a/genai/image_generation/imggen_mask_free_edit_with_txt_img.py
+++ b/genai/image_generation/imggen_mask_free_edit_with_txt_img.py
@@ -26,7 +26,9 @@ def edit_mask_free(output_file: str) -> Image:
# output_file = "output-image.png"
raw_ref = RawReferenceImage(
- reference_image=Image.from_file(location='test_resources/latte.jpg'), reference_id=0)
+ reference_image=Image.from_file(location="test_resources/latte.jpg"),
+ reference_id=0,
+ )
image = client.models.edit_image(
model="imagen-3.0-capability-001",
@@ -48,4 +50,4 @@ def edit_mask_free(output_file: str) -> Image:
if __name__ == "__main__":
- edit_mask_free(output_file="test_resources/latte_edit.png")
+ edit_mask_free(output_file="output_folder/latte_edit.png")
diff --git a/genai/image_generation/imggen_mmflash_edit_img_with_txt_img.py b/genai/image_generation/imggen_mmflash_edit_img_with_txt_img.py
index b446933baef..e2d9888a027 100644
--- a/genai/image_generation/imggen_mmflash_edit_img_with_txt_img.py
+++ b/genai/image_generation/imggen_mmflash_edit_img_with_txt_img.py
@@ -23,10 +23,10 @@ def generate_content() -> str:
client = genai.Client()
# Using an image of Eiffel tower, with fireworks in the background.
- image = Image.open("example-image.png")
+ image = Image.open("test_resources/example-image-eiffel-tower.png")
response = client.models.generate_content(
- model="gemini-2.0-flash-exp",
+ model="gemini-3-pro-image-preview",
contents=[image, "Edit this image to make it look like a cartoon."],
config=GenerateContentConfig(response_modalities=[Modality.TEXT, Modality.IMAGE]),
)
@@ -35,15 +35,10 @@ def generate_content() -> str:
print(part.text)
elif part.inline_data:
image = Image.open(BytesIO((part.inline_data.data)))
- image.save("bw-example-image.png")
- # Example response:
- # Here's the cartoon-style edit of the image:
- # Cartoon-style edit:
- # - Simplified the Eiffel Tower with bolder lines and slightly exaggerated proportions.
- # - Brightened and saturated the colors of the sky, fireworks, and foliage for a more vibrant, cartoonish look.
- # ....
+ image.save("output_folder/bw-example-image.png")
+
# [END googlegenaisdk_imggen_mmflash_edit_img_with_txt_img]
- return "bw-example-image.png"
+ return "output_folder/bw-example-image.png"
if __name__ == "__main__":
diff --git a/genai/image_generation/imggen_mmflash_locale_aware_with_txt.py b/genai/image_generation/imggen_mmflash_locale_aware_with_txt.py
new file mode 100644
index 00000000000..305be883d22
--- /dev/null
+++ b/genai/image_generation/imggen_mmflash_locale_aware_with_txt.py
@@ -0,0 +1,45 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def generate_content() -> str:
+ # [START googlegenaisdk_imggen_mmflash_locale_aware_with_txt]
+ from google import genai
+ from google.genai.types import GenerateContentConfig, Modality
+ from PIL import Image
+ from io import BytesIO
+
+ client = genai.Client()
+
+ response = client.models.generate_content(
+ model="gemini-2.5-flash-image",
+ contents=("Generate a photo of a breakfast meal."),
+ config=GenerateContentConfig(response_modalities=[Modality.TEXT, Modality.IMAGE]),
+ )
+ for part in response.candidates[0].content.parts:
+ if part.text:
+ print(part.text)
+ elif part.inline_data:
+ image = Image.open(BytesIO((part.inline_data.data)))
+ image.save("output_folder/example-breakfast-meal.png")
+ # Example response:
+ # Generates a photo of a vibrant and appetizing breakfast meal.
+ # The scene will feature a white plate with golden-brown pancakes
+ # stacked neatly, drizzled with rich maple syrup and ...
+ # [END googlegenaisdk_imggen_mmflash_locale_aware_with_txt]
+ return "output_folder/example-breakfast-meal.png"
+
+
+if __name__ == "__main__":
+ generate_content()
diff --git a/genai/image_generation/imggen_mmflash_multiple_imgs_with_txt.py b/genai/image_generation/imggen_mmflash_multiple_imgs_with_txt.py
new file mode 100644
index 00000000000..2b831ca97d9
--- /dev/null
+++ b/genai/image_generation/imggen_mmflash_multiple_imgs_with_txt.py
@@ -0,0 +1,58 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def generate_content() -> str:
+ # [START googlegenaisdk_imggen_mmflash_multiple_imgs_with_txt]
+ from google import genai
+ from google.genai.types import GenerateContentConfig, Modality
+ from PIL import Image
+ from io import BytesIO
+
+ client = genai.Client()
+
+ response = client.models.generate_content(
+ model="gemini-2.5-flash-image",
+ contents=("Generate 3 images a cat sitting on a chair."),
+ config=GenerateContentConfig(response_modalities=[Modality.TEXT, Modality.IMAGE]),
+ )
+ saved_files = []
+ image_counter = 1
+ for part in response.candidates[0].content.parts:
+ if part.text:
+ print(part.text)
+ elif part.inline_data:
+ image = Image.open(BytesIO((part.inline_data.data)))
+ filename = f"output_folder/example-cats-0{image_counter}.png"
+ image.save(filename)
+ saved_files.append(filename)
+ image_counter += 1
+ # Example response:
+ # Image 1: A fluffy calico cat with striking green eyes is perched elegantly on a vintage wooden
+ # chair with a woven seat. Sunlight streams through a nearby window, casting soft shadows and
+ # highlighting the cat's fur.
+ #
+ # Image 2: A sleek black cat with intense yellow eyes is sitting upright on a modern, minimalist
+ # white chair. The background is a plain grey wall, putting the focus entirely on the feline's
+ # graceful posture.
+ #
+ # Image 3: A ginger tabby cat with playful amber eyes is comfortably curled up asleep on a plush,
+ # oversized armchair upholstered in a soft, floral fabric. A corner of a cozy living room with a
+ # warm lamp in the background can be seen.
+ # [END googlegenaisdk_imggen_mmflash_multiple_imgs_with_txt]
+ return saved_files
+
+
+if __name__ == "__main__":
+ generate_content()
diff --git a/genai/image_generation/imggen_mmflash_txt_and_img_with_txt.py b/genai/image_generation/imggen_mmflash_txt_and_img_with_txt.py
index ac2f2e30de6..7a9d11103a7 100644
--- a/genai/image_generation/imggen_mmflash_txt_and_img_with_txt.py
+++ b/genai/image_generation/imggen_mmflash_txt_and_img_with_txt.py
@@ -23,26 +23,24 @@ def generate_content() -> int:
client = genai.Client()
response = client.models.generate_content(
- model="gemini-2.0-flash-exp",
+ model="gemini-3-pro-image-preview",
contents=(
"Generate an illustrated recipe for a paella."
"Create images to go alongside the text as you generate the recipe"
),
config=GenerateContentConfig(response_modalities=[Modality.TEXT, Modality.IMAGE]),
)
- with open("paella-recipe.md", "w") as fp:
+ with open("output_folder/paella-recipe.md", "w") as fp:
for i, part in enumerate(response.candidates[0].content.parts):
if part.text is not None:
fp.write(part.text)
elif part.inline_data is not None:
image = Image.open(BytesIO((part.inline_data.data)))
- image.save(f"example-image-{i+1}.png")
- fp.write(f"")
- # Example response:
- # A markdown page for a Paella recipe(`paella-recipe.md`) has been generated.
- # It includes detailed steps and several images illustrating the cooking process.
+ image.save(f"output_folder/example-image-{i+1}.png")
+ fp.write(f"")
+
# [END googlegenaisdk_imggen_mmflash_txt_and_img_with_txt]
- return i
+ return True
if __name__ == "__main__":
diff --git a/genai/image_generation/imggen_mmflash_with_txt.py b/genai/image_generation/imggen_mmflash_with_txt.py
index 503adfcc9c4..cd6c458a757 100644
--- a/genai/image_generation/imggen_mmflash_with_txt.py
+++ b/genai/image_generation/imggen_mmflash_with_txt.py
@@ -15,31 +15,34 @@
def generate_content() -> str:
# [START googlegenaisdk_imggen_mmflash_with_txt]
+ import os
+ from io import BytesIO
+
from google import genai
from google.genai.types import GenerateContentConfig, Modality
from PIL import Image
- from io import BytesIO
client = genai.Client()
response = client.models.generate_content(
- model="gemini-2.0-flash-exp",
- contents=(
- "Generate an image of the Eiffel tower with fireworks in the background."
+ model="gemini-3-pro-image-preview",
+ contents=("Generate an image of the Eiffel tower with fireworks in the background."),
+ config=GenerateContentConfig(
+ response_modalities=[Modality.TEXT, Modality.IMAGE],
),
- config=GenerateContentConfig(response_modalities=[Modality.TEXT, Modality.IMAGE]),
)
for part in response.candidates[0].content.parts:
if part.text:
print(part.text)
elif part.inline_data:
image = Image.open(BytesIO((part.inline_data.data)))
- image.save("example-image.png")
- # Example response:
- # A beautiful photograph captures the iconic Eiffel Tower in Paris, France,
- # against a backdrop of a vibrant and dynamic fireworks display. The tower itself...
+ # Ensure the output directory exists
+ output_dir = "output_folder"
+ os.makedirs(output_dir, exist_ok=True)
+ image.save(os.path.join(output_dir, "example-image-eiffel-tower.png"))
+
# [END googlegenaisdk_imggen_mmflash_with_txt]
- return "example-image.png"
+ return True
if __name__ == "__main__":
diff --git a/genai/image_generation/imggen_outpainting_with_txt_img.py b/genai/image_generation/imggen_outpainting_with_txt_img.py
index 4994553d978..f213540169e 100644
--- a/genai/image_generation/imggen_outpainting_with_txt_img.py
+++ b/genai/image_generation/imggen_outpainting_with_txt_img.py
@@ -18,7 +18,12 @@
def edit_outpainting(output_file: str) -> Image:
# [START googlegenaisdk_imggen_outpainting_with_txt_img]
from google import genai
- from google.genai.types import RawReferenceImage, MaskReferenceImage, MaskReferenceConfig, EditImageConfig
+ from google.genai.types import (
+ RawReferenceImage,
+ MaskReferenceImage,
+ MaskReferenceConfig,
+ EditImageConfig,
+ )
client = genai.Client()
@@ -26,10 +31,12 @@ def edit_outpainting(output_file: str) -> Image:
# output_file = "output-image.png"
raw_ref = RawReferenceImage(
- reference_image=Image.from_file(location='test_resources/living_room.png'), reference_id=0)
+ reference_image=Image.from_file(location="test_resources/living_room.png"),
+ reference_id=0,
+ )
mask_ref = MaskReferenceImage(
reference_id=1,
- reference_image=Image.from_file(location='test_resources/living_room_mask.png'),
+ reference_image=Image.from_file(location="test_resources/living_room_mask.png"),
config=MaskReferenceConfig(
mask_mode="MASK_MODE_USER_PROVIDED",
mask_dilation=0.03,
@@ -56,4 +63,4 @@ def edit_outpainting(output_file: str) -> Image:
if __name__ == "__main__":
- edit_outpainting(output_file="test_resources/living_room_edit.png")
+ edit_outpainting(output_file="output_folder/living_room_edit.png")
diff --git a/genai/image_generation/imggen_product_background_mask_with_txt_img.py b/genai/image_generation/imggen_product_background_mask_with_txt_img.py
new file mode 100644
index 00000000000..239fd2c1ee9
--- /dev/null
+++ b/genai/image_generation/imggen_product_background_mask_with_txt_img.py
@@ -0,0 +1,66 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from google.genai.types import Image
+
+
+def edit_product_background_mask(output_file: str) -> Image:
+ # [START googlegenaisdk_imggen_product_background_mask_with_txt_img]
+ from google import genai
+ from google.genai.types import (
+ RawReferenceImage,
+ MaskReferenceImage,
+ MaskReferenceConfig,
+ EditImageConfig,
+ )
+
+ client = genai.Client()
+
+ # TODO(developer): Update and un-comment below line
+ # output_file = "output-image.png"
+
+ raw_ref = RawReferenceImage(
+ reference_image=Image.from_file(location="test_resources/suitcase.png"),
+ reference_id=0,
+ )
+ mask_ref = MaskReferenceImage(
+ reference_id=1,
+ reference_image=Image.from_file(location="test_resources/suitcase_mask.png"),
+ config=MaskReferenceConfig(
+ mask_mode="MASK_MODE_USER_PROVIDED",
+ mask_dilation=0.0,
+ ),
+ )
+
+ image = client.models.edit_image(
+ model="imagen-3.0-capability-001",
+ prompt="A light blue suitcase in an airport",
+ reference_images=[raw_ref, mask_ref],
+ config=EditImageConfig(
+ edit_mode="EDIT_MODE_BGSWAP",
+ ),
+ )
+
+ image.generated_images[0].image.save(output_file)
+
+ print(f"Created output image using {len(image.generated_images[0].image.image_bytes)} bytes")
+ # Example response:
+ # Created output image using 1234567 bytes
+
+ # [END googlegenaisdk_imggen_product_background_mask_with_txt_img]
+ return image.generated_images[0].image
+
+
+if __name__ == "__main__":
+ edit_product_background_mask(output_file="output_folder/suitcase_edit.png")
diff --git a/genai/image_generation/imggen_product_background_with_txt_img.py b/genai/image_generation/imggen_product_background_with_txt_img.py
index f09ef691775..6dcde90c8d3 100644
--- a/genai/image_generation/imggen_product_background_with_txt_img.py
+++ b/genai/image_generation/imggen_product_background_with_txt_img.py
@@ -18,7 +18,12 @@
def edit_product_background(output_file: str) -> Image:
# [START googlegenaisdk_imggen_product_background_with_txt_img]
from google import genai
- from google.genai.types import RawReferenceImage, MaskReferenceImage, MaskReferenceConfig, EditImageConfig
+ from google.genai.types import (
+ RawReferenceImage,
+ MaskReferenceImage,
+ MaskReferenceConfig,
+ EditImageConfig,
+ )
client = genai.Client()
@@ -26,7 +31,9 @@ def edit_product_background(output_file: str) -> Image:
# output_file = "output-image.png"
raw_ref = RawReferenceImage(
- reference_image=Image.from_file(location='test_resources/suitcase.png'), reference_id=0)
+ reference_image=Image.from_file(location="test_resources/suitcase.png"),
+ reference_id=0,
+ )
mask_ref = MaskReferenceImage(
reference_id=1,
reference_image=None,
@@ -55,4 +62,4 @@ def edit_product_background(output_file: str) -> Image:
if __name__ == "__main__":
- edit_product_background(output_file="test_resources/suitcase_edit.png")
+ edit_product_background(output_file="output_folder/suitcase_edit.png")
diff --git a/genai/image_generation/imggen_raw_reference_with_txt_img.py b/genai/image_generation/imggen_raw_reference_with_txt_img.py
index b1c04268c28..c60830bc6f5 100644
--- a/genai/image_generation/imggen_raw_reference_with_txt_img.py
+++ b/genai/image_generation/imggen_raw_reference_with_txt_img.py
@@ -27,7 +27,7 @@ def style_transfer_customization(output_gcs_uri: str) -> str:
# using https://storage.googleapis.com/cloud-samples-data/generative-ai/image/teacup-1.png
raw_ref_image = RawReferenceImage(
reference_image=Image(gcs_uri="gs://cloud-samples-data/generative-ai/image/teacup-1.png"),
- reference_id=1
+ reference_id=1,
)
image = client.models.edit_image(
@@ -37,7 +37,6 @@ def style_transfer_customization(output_gcs_uri: str) -> str:
config=EditImageConfig(
edit_mode="EDIT_MODE_DEFAULT",
number_of_images=1,
- seed=1,
safety_filter_level="BLOCK_MEDIUM_AND_ABOVE",
person_generation="ALLOW_ADULT",
output_gcs_uri=output_gcs_uri,
diff --git a/genai/image_generation/imggen_scribble_ctrl_type_with_txt_img.py b/genai/image_generation/imggen_scribble_ctrl_type_with_txt_img.py
index 9e86531f9f0..64e9a95a477 100644
--- a/genai/image_generation/imggen_scribble_ctrl_type_with_txt_img.py
+++ b/genai/image_generation/imggen_scribble_ctrl_type_with_txt_img.py
@@ -16,7 +16,12 @@
def scribble_customization(output_gcs_uri: str) -> str:
# [START googlegenaisdk_imggen_scribble_ctrl_type_with_txt_img]
from google import genai
- from google.genai.types import ControlReferenceConfig, ControlReferenceImage, EditImageConfig, Image
+ from google.genai.types import (
+ ControlReferenceConfig,
+ ControlReferenceImage,
+ EditImageConfig,
+ Image,
+ )
client = genai.Client()
@@ -38,7 +43,6 @@ def scribble_customization(output_gcs_uri: str) -> str:
config=EditImageConfig(
edit_mode="EDIT_MODE_CONTROLLED_EDITING",
number_of_images=1,
- seed=1,
safety_filter_level="BLOCK_MEDIUM_AND_ABOVE",
person_generation="ALLOW_ADULT",
output_gcs_uri=output_gcs_uri,
diff --git a/genai/image_generation/imggen_style_reference_with_txt_img.py b/genai/image_generation/imggen_style_reference_with_txt_img.py
index a41be2019fb..124c9db8fbe 100644
--- a/genai/image_generation/imggen_style_reference_with_txt_img.py
+++ b/genai/image_generation/imggen_style_reference_with_txt_img.py
@@ -16,7 +16,12 @@
def style_customization(output_gcs_uri: str) -> str:
# [START googlegenaisdk_imggen_style_reference_with_txt_img]
from google import genai
- from google.genai.types import EditImageConfig, Image, StyleReferenceConfig, StyleReferenceImage
+ from google.genai.types import (
+ EditImageConfig,
+ Image,
+ StyleReferenceConfig,
+ StyleReferenceImage,
+ )
client = genai.Client()
@@ -38,7 +43,6 @@ def style_customization(output_gcs_uri: str) -> str:
config=EditImageConfig(
edit_mode="EDIT_MODE_DEFAULT",
number_of_images=1,
- seed=1,
safety_filter_level="BLOCK_MEDIUM_AND_ABOVE",
person_generation="ALLOW_ADULT",
output_gcs_uri=output_gcs_uri,
diff --git a/genai/image_generation/imggen_subj_refer_ctrl_refer_with_txt_imgs.py b/genai/image_generation/imggen_subj_refer_ctrl_refer_with_txt_imgs.py
index 554e1273c4d..50f733e61c3 100644
--- a/genai/image_generation/imggen_subj_refer_ctrl_refer_with_txt_imgs.py
+++ b/genai/image_generation/imggen_subj_refer_ctrl_refer_with_txt_imgs.py
@@ -22,7 +22,7 @@ def subject_customization(output_gcs_uri: str) -> str:
EditImageConfig,
Image,
SubjectReferenceConfig,
- SubjectReferenceImage
+ SubjectReferenceImage,
)
client = genai.Client()
@@ -36,7 +36,8 @@ def subject_customization(output_gcs_uri: str) -> str:
reference_id=1,
reference_image=Image(gcs_uri="gs://cloud-samples-data/generative-ai/image/person.png"),
config=SubjectReferenceConfig(
- subject_description="a headshot of a woman", subject_type="SUBJECT_TYPE_PERSON"
+ subject_description="a headshot of a woman",
+ subject_type="SUBJECT_TYPE_PERSON",
),
)
control_reference_image = ControlReferenceImage(
@@ -56,7 +57,6 @@ def subject_customization(output_gcs_uri: str) -> str:
config=EditImageConfig(
edit_mode="EDIT_MODE_DEFAULT",
number_of_images=1,
- seed=1,
safety_filter_level="BLOCK_MEDIUM_AND_ABOVE",
person_generation="ALLOW_ADULT",
output_gcs_uri=output_gcs_uri,
diff --git a/genai/image_generation/imggen_upscale_with_img.py b/genai/image_generation/imggen_upscale_with_img.py
new file mode 100644
index 00000000000..c3ea9ffa640
--- /dev/null
+++ b/genai/image_generation/imggen_upscale_with_img.py
@@ -0,0 +1,45 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from google.genai.types import Image
+
+
+def upscale_images(output_file: str) -> Image:
+ # [START googlegenaisdk_imggen_upscale_with_img]
+ from google import genai
+ from google.genai.types import Image
+
+ client = genai.Client()
+
+ # TODO(developer): Update and un-comment below line
+ # output_file = "output-image.png"
+
+ image = client.models.upscale_image(
+ model="imagen-4.0-upscale-preview",
+ image=Image.from_file(location="test_resources/dog_newspaper.png"),
+ upscale_factor="x2",
+ )
+
+ image.generated_images[0].image.save(output_file)
+
+ print(f"Created output image using {len(image.generated_images[0].image.image_bytes)} bytes")
+ # Example response:
+ # Created output image using 1234567 bytes
+
+ # [END googlegenaisdk_imggen_upscale_with_img]
+ return image.generated_images[0].image
+
+
+if __name__ == "__main__":
+ upscale_images(output_file="output_folder/dog_newspaper.png")
diff --git a/genai/image_generation/imggen_virtual_try_on_with_txt_img.py b/genai/image_generation/imggen_virtual_try_on_with_txt_img.py
new file mode 100644
index 00000000000..f1e6b6cc5cd
--- /dev/null
+++ b/genai/image_generation/imggen_virtual_try_on_with_txt_img.py
@@ -0,0 +1,49 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from google.genai.types import Image
+
+
+def virtual_try_on(output_file: str) -> Image:
+ # [START googlegenaisdk_imggen_virtual_try_on_with_txt_img]
+ from google import genai
+ from google.genai.types import RecontextImageSource, ProductImage
+
+ client = genai.Client()
+
+ # TODO(developer): Update and un-comment below line
+ # output_file = "output-image.png"
+
+ image = client.models.recontext_image(
+ model="virtual-try-on-001",
+ source=RecontextImageSource(
+ person_image=Image.from_file(location="test_resources/man.png"),
+ product_images=[
+ ProductImage(product_image=Image.from_file(location="test_resources/sweater.jpg"))
+ ],
+ ),
+ )
+
+ image.generated_images[0].image.save(output_file)
+
+ print(f"Created output image using {len(image.generated_images[0].image.image_bytes)} bytes")
+ # Example response:
+ # Created output image using 1234567 bytes
+
+ # [END googlegenaisdk_imggen_virtual_try_on_with_txt_img]
+ return image.generated_images[0].image
+
+
+if __name__ == "__main__":
+ virtual_try_on(output_file="output_folder/man_in_sweater.png")
diff --git a/genai/image_generation/imggen_with_txt.py b/genai/image_generation/imggen_with_txt.py
index 6a40baa7f1b..cfd673042c2 100644
--- a/genai/image_generation/imggen_with_txt.py
+++ b/genai/image_generation/imggen_with_txt.py
@@ -18,6 +18,7 @@
def generate_images(output_file: str) -> Image:
# [START googlegenaisdk_imggen_with_txt]
from google import genai
+ from google.genai.types import GenerateImagesConfig
client = genai.Client()
@@ -25,8 +26,11 @@ def generate_images(output_file: str) -> Image:
# output_file = "output-image.png"
image = client.models.generate_images(
- model="imagen-4.0-generate-preview-06-06",
+ model="imagen-4.0-generate-001",
prompt="A dog reading a newspaper",
+ config=GenerateImagesConfig(
+ image_size="2K",
+ ),
)
image.generated_images[0].image.save(output_file)
@@ -40,4 +44,4 @@ def generate_images(output_file: str) -> Image:
if __name__ == "__main__":
- generate_images(output_file="test_resources/dog_newspaper.png")
+ generate_images(output_file="output_folder/dog_newspaper.png")
diff --git a/genai/image_generation/output_folder/bw-example-image.png b/genai/image_generation/output_folder/bw-example-image.png
new file mode 100644
index 00000000000..5c2289f477c
Binary files /dev/null and b/genai/image_generation/output_folder/bw-example-image.png differ
diff --git a/genai/image_generation/output_folder/example-cats-01.png b/genai/image_generation/output_folder/example-cats-01.png
new file mode 100644
index 00000000000..6ec55171571
Binary files /dev/null and b/genai/image_generation/output_folder/example-cats-01.png differ
diff --git a/genai/image_generation/output_folder/example-cats-02.png b/genai/image_generation/output_folder/example-cats-02.png
new file mode 100644
index 00000000000..4dbdfd7ba1c
Binary files /dev/null and b/genai/image_generation/output_folder/example-cats-02.png differ
diff --git a/genai/image_generation/output_folder/example-cats-03.png b/genai/image_generation/output_folder/example-cats-03.png
new file mode 100644
index 00000000000..cbf61c27dc2
Binary files /dev/null and b/genai/image_generation/output_folder/example-cats-03.png differ
diff --git a/genai/image_generation/output_folder/example-cats-04.png b/genai/image_generation/output_folder/example-cats-04.png
new file mode 100644
index 00000000000..01f3bc44a64
Binary files /dev/null and b/genai/image_generation/output_folder/example-cats-04.png differ
diff --git a/genai/image_generation/output_folder/example-cats-06.png b/genai/image_generation/output_folder/example-cats-06.png
new file mode 100644
index 00000000000..459968ebb18
Binary files /dev/null and b/genai/image_generation/output_folder/example-cats-06.png differ
diff --git a/genai/image_generation/output_folder/example-image-10.png b/genai/image_generation/output_folder/example-image-10.png
new file mode 100644
index 00000000000..36aeb3bd7c7
Binary files /dev/null and b/genai/image_generation/output_folder/example-image-10.png differ
diff --git a/genai/image_generation/output_folder/example-image-12.png b/genai/image_generation/output_folder/example-image-12.png
new file mode 100644
index 00000000000..02f1dfc1682
Binary files /dev/null and b/genai/image_generation/output_folder/example-image-12.png differ
diff --git a/genai/image_generation/output_folder/example-image-14.png b/genai/image_generation/output_folder/example-image-14.png
new file mode 100644
index 00000000000..c0bfae5496e
Binary files /dev/null and b/genai/image_generation/output_folder/example-image-14.png differ
diff --git a/genai/image_generation/output_folder/example-image-16.png b/genai/image_generation/output_folder/example-image-16.png
new file mode 100644
index 00000000000..b264d152e1f
Binary files /dev/null and b/genai/image_generation/output_folder/example-image-16.png differ
diff --git a/genai/image_generation/output_folder/example-image-18.png b/genai/image_generation/output_folder/example-image-18.png
new file mode 100644
index 00000000000..0fcd0826de6
Binary files /dev/null and b/genai/image_generation/output_folder/example-image-18.png differ
diff --git a/genai/image_generation/output_folder/example-image-2.png b/genai/image_generation/output_folder/example-image-2.png
new file mode 100644
index 00000000000..2c0593ab004
Binary files /dev/null and b/genai/image_generation/output_folder/example-image-2.png differ
diff --git a/genai/image_generation/output_folder/example-image-4.png b/genai/image_generation/output_folder/example-image-4.png
new file mode 100644
index 00000000000..3b567a5ce1e
Binary files /dev/null and b/genai/image_generation/output_folder/example-image-4.png differ
diff --git a/genai/image_generation/output_folder/example-image-6.png b/genai/image_generation/output_folder/example-image-6.png
new file mode 100644
index 00000000000..837519dd752
Binary files /dev/null and b/genai/image_generation/output_folder/example-image-6.png differ
diff --git a/genai/image_generation/output_folder/example-image-8.png b/genai/image_generation/output_folder/example-image-8.png
new file mode 100644
index 00000000000..6341d5f1772
Binary files /dev/null and b/genai/image_generation/output_folder/example-image-8.png differ
diff --git a/genai/image_generation/output_folder/example-image-eiffel-tower.png b/genai/image_generation/output_folder/example-image-eiffel-tower.png
new file mode 100644
index 00000000000..0cf9b0e50de
Binary files /dev/null and b/genai/image_generation/output_folder/example-image-eiffel-tower.png differ
diff --git a/genai/image_generation/output_folder/example-image.png b/genai/image_generation/output_folder/example-image.png
new file mode 100644
index 00000000000..2a602e62698
Binary files /dev/null and b/genai/image_generation/output_folder/example-image.png differ
diff --git a/genai/image_generation/output_folder/example-meal.png b/genai/image_generation/output_folder/example-meal.png
new file mode 100644
index 00000000000..be1cc9ffe92
Binary files /dev/null and b/genai/image_generation/output_folder/example-meal.png differ
diff --git a/genai/image_generation/output_folder/paella-recipe.md b/genai/image_generation/output_folder/paella-recipe.md
new file mode 100644
index 00000000000..0191dc3bc03
--- /dev/null
+++ b/genai/image_generation/output_folder/paella-recipe.md
@@ -0,0 +1,55 @@
+Okay, I will generate an illustrated recipe for paella, creating an image for each step.
+
+**Step 1: Gather Your Ingredients**
+
+An overhead shot of a rustic wooden table displaying all the necessary ingredients for paella. This includes short-grain rice, chicken thighs and drumsticks, chorizo sausage, shrimp, mussels, clams, a red bell pepper, a yellow onion, garlic cloves, peas (fresh or frozen), saffron threads, paprika, olive oil, chicken broth, a lemon, fresh parsley, salt, and pepper. Each ingredient should be clearly visible and arranged artfully.
+
+
+
+**Step 2: Prepare the Vegetables and Meat**
+
+An image showing hands chopping a yellow onion on a wooden cutting board, with a diced red bell pepper and minced garlic in separate small bowls nearby. In the background, seasoned chicken pieces and sliced chorizo are ready in other bowls.
+
+
+
+**Step 3: Sauté the Chicken and Chorizo**
+
+A close-up shot of a wide, shallow paella pan over a stove burner. Chicken pieces are browning in olive oil, and slices of chorizo are nestled amongst them, releasing their vibrant red color and oils.
+
+
+
+**Step 4: Add Vegetables and Aromatics**
+
+The paella pan now contains sautéed onions and bell peppers, softened and slightly translucent, mixed with the browned chicken and chorizo. Minced garlic and a pinch of paprika are being stirred into the mixture.
+
+
+
+**Step 5: Introduce the Rice and Saffron**
+
+Short-grain rice is being poured into the paella pan, distributed evenly among the other ingredients. A few strands of saffron are being sprinkled over the rice, adding a golden hue.
+
+
+
+**Step 6: Add the Broth and Simmer**
+
+Chicken broth is being poured into the paella pan, completely covering the rice and other ingredients. The mixture is starting to simmer gently, with small bubbles forming on the surface.
+
+
+
+**Step 7: Add Seafood and Peas**
+
+Shrimp, mussels, and clams are being carefully arranged on top of the rice in the paella pan. Frozen peas are being scattered over the surface. The broth has reduced slightly.
+
+
+
+**Step 8: Let it Rest**
+
+A finished paella in the pan, off the heat and resting. The rice looks fluffy, the seafood is cooked, and the mussels and clams have opened. Steam is gently rising from the dish. A lemon wedge and some fresh parsley sprigs are placed on top as a garnish.
+
+
+
+**Step 9: Serve and Enjoy!**
+
+A portion of the vibrant paella is being served onto a plate, showcasing the different textures and colors of the rice, seafood, meat, and vegetables. A lemon wedge and a sprinkle of fresh parsley complete the serving.
+
+
\ No newline at end of file
diff --git a/genai/image_generation/requirements.txt b/genai/image_generation/requirements.txt
index 9653154bf93..86da356810f 100644
--- a/genai/image_generation/requirements.txt
+++ b/genai/image_generation/requirements.txt
@@ -1,2 +1,2 @@
-google-genai==1.16.1
+google-genai==1.42.0
pillow==11.1.0
diff --git a/genai/image_generation/test_image_generation.py b/genai/image_generation/test_image_generation.py
index 3e2d89d58fb..f30b295f85e 100644
--- a/genai/image_generation/test_image_generation.py
+++ b/genai/image_generation/test_image_generation.py
@@ -25,21 +25,22 @@
import pytest
import imggen_canny_ctrl_type_with_txt_img
+import imggen_inpainting_insert_mask_with_txt_img
import imggen_inpainting_insert_with_txt_img
+import imggen_inpainting_removal_mask_with_txt_img
import imggen_inpainting_removal_with_txt_img
import imggen_mask_free_edit_with_txt_img
-import imggen_mmflash_edit_img_with_txt_img
-import imggen_mmflash_txt_and_img_with_txt
-import imggen_mmflash_with_txt
import imggen_outpainting_with_txt_img
+import imggen_product_background_mask_with_txt_img
import imggen_product_background_with_txt_img
import imggen_raw_reference_with_txt_img
import imggen_scribble_ctrl_type_with_txt_img
import imggen_style_reference_with_txt_img
import imggen_subj_refer_ctrl_refer_with_txt_imgs
+import imggen_upscale_with_img
+import imggen_virtual_try_on_with_txt_img
import imggen_with_txt
-
os.environ["GOOGLE_GENAI_USE_VERTEXAI"] = "True"
os.environ["GOOGLE_CLOUD_LOCATION"] = "us-central1"
# The project name is included in the CICD pipeline
@@ -64,49 +65,55 @@ def output_gcs_uri() -> str:
def test_img_generation() -> None:
OUTPUT_FILE = os.path.join(RESOURCES, "dog_newspaper.png")
- response = imggen_with_txt.generate_images(
- OUTPUT_FILE
- )
+ response = imggen_with_txt.generate_images(OUTPUT_FILE)
+ assert response
+
+
+def test_img_edit_inpainting_insert_with_mask() -> None:
+ OUTPUT_FILE = os.path.join(RESOURCES, "fruit_edit.png")
+ response = imggen_inpainting_insert_mask_with_txt_img.edit_inpainting_insert_mask(OUTPUT_FILE)
assert response
def test_img_edit_inpainting_insert() -> None:
OUTPUT_FILE = os.path.join(RESOURCES, "fruit_edit.png")
- response = imggen_inpainting_insert_with_txt_img.edit_inpainting_insert(
- OUTPUT_FILE
- )
+ response = imggen_inpainting_insert_with_txt_img.edit_inpainting_insert(OUTPUT_FILE)
+ assert response
+
+
+def test_img_edit_inpainting_removal_mask() -> None:
+ OUTPUT_FILE = os.path.join(RESOURCES, "fruit_edit.png")
+ response = imggen_inpainting_removal_mask_with_txt_img.edit_inpainting_removal_mask(OUTPUT_FILE)
assert response
def test_img_edit_inpainting_removal() -> None:
OUTPUT_FILE = os.path.join(RESOURCES, "fruit_edit.png")
- response = imggen_inpainting_removal_with_txt_img.edit_inpainting_removal(
- OUTPUT_FILE
- )
+ response = imggen_inpainting_removal_with_txt_img.edit_inpainting_removal(OUTPUT_FILE)
+ assert response
+
+
+def test_img_edit_product_background_mask() -> None:
+ OUTPUT_FILE = os.path.join(RESOURCES, "suitcase_edit.png")
+ response = imggen_product_background_mask_with_txt_img.edit_product_background_mask(OUTPUT_FILE)
assert response
def test_img_edit_product_background() -> None:
OUTPUT_FILE = os.path.join(RESOURCES, "suitcase_edit.png")
- response = imggen_product_background_with_txt_img.edit_product_background(
- OUTPUT_FILE
- )
+ response = imggen_product_background_with_txt_img.edit_product_background(OUTPUT_FILE)
assert response
def test_img_edit_outpainting() -> None:
OUTPUT_FILE = os.path.join(RESOURCES, "living_room_edit.png")
- response = imggen_outpainting_with_txt_img.edit_outpainting(
- OUTPUT_FILE
- )
+ response = imggen_outpainting_with_txt_img.edit_outpainting(OUTPUT_FILE)
assert response
def test_img_edit_mask_free() -> None:
OUTPUT_FILE = os.path.join(RESOURCES, "latte_edit.png")
- response = imggen_mask_free_edit_with_txt_img.edit_mask_free(
- OUTPUT_FILE
- )
+ response = imggen_mask_free_edit_with_txt_img.edit_mask_free(OUTPUT_FILE)
assert response
@@ -118,53 +125,32 @@ def test_img_customization_subject(output_gcs_uri: str) -> None:
def test_img_customization_style(output_gcs_uri: str) -> None:
- response = imggen_style_reference_with_txt_img.style_customization(
- output_gcs_uri=output_gcs_uri
- )
+ response = imggen_style_reference_with_txt_img.style_customization(output_gcs_uri=output_gcs_uri)
assert response
def test_img_customization_style_transfer(output_gcs_uri: str) -> None:
- response = imggen_raw_reference_with_txt_img.style_transfer_customization(
- output_gcs_uri=output_gcs_uri
- )
+ response = imggen_raw_reference_with_txt_img.style_transfer_customization(output_gcs_uri=output_gcs_uri)
assert response
def test_img_customization_scribble(output_gcs_uri: str) -> None:
- response = imggen_scribble_ctrl_type_with_txt_img.scribble_customization(
- output_gcs_uri=output_gcs_uri
- )
+ response = imggen_scribble_ctrl_type_with_txt_img.scribble_customization(output_gcs_uri=output_gcs_uri)
assert response
def test_img_customization_canny_edge(output_gcs_uri: str) -> None:
- response = imggen_canny_ctrl_type_with_txt_img.canny_edge_customization(
- output_gcs_uri=output_gcs_uri
- )
+ response = imggen_canny_ctrl_type_with_txt_img.canny_edge_customization(output_gcs_uri=output_gcs_uri)
assert response
-def test_imggen_mmflash_examples() -> None:
- # generate image
- fname = imggen_mmflash_with_txt.generate_content()
- assert os.path.isfile(fname)
- # edit generate image
- new_fname = imggen_mmflash_edit_img_with_txt_img.generate_content()
- assert os.path.isfile(new_fname)
-
- # clean-up
- os.remove(fname)
- os.remove(new_fname)
+def test_img_virtual_try_on() -> None:
+ OUTPUT_FILE = os.path.join(RESOURCES, "man_in_sweater.png")
+ response = imggen_virtual_try_on_with_txt_img.virtual_try_on(OUTPUT_FILE)
+ assert response
-def test_imggen_mmflash_txt_and_img_with_txt() -> None:
- last_image_id = imggen_mmflash_txt_and_img_with_txt.generate_content()
- # clean-up
- for i in range(last_image_id + 1):
- img_name = f"example-image-{i+1}.png"
- if os.path.isfile(img_name):
- os.remove(img_name)
- fname = "paella-recipe.md"
- if os.path.isfile(fname):
- os.remove(fname)
+def test_img_upscale() -> None:
+ OUTPUT_FILE = os.path.join(RESOURCES, "dog_newspaper.png")
+ response = imggen_upscale_with_img.upscale_images(OUTPUT_FILE)
+ assert response
diff --git a/genai/image_generation/test_image_generation_mmflash.py b/genai/image_generation/test_image_generation_mmflash.py
new file mode 100644
index 00000000000..3ae60ec66ba
--- /dev/null
+++ b/genai/image_generation/test_image_generation_mmflash.py
@@ -0,0 +1,51 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+#
+# Using Google Cloud Vertex AI to test the code samples.
+#
+
+import os
+
+import imggen_mmflash_edit_img_with_txt_img
+import imggen_mmflash_locale_aware_with_txt
+import imggen_mmflash_multiple_imgs_with_txt
+import imggen_mmflash_txt_and_img_with_txt
+import imggen_mmflash_with_txt
+
+
+os.environ["GOOGLE_GENAI_USE_VERTEXAI"] = "True"
+os.environ["GOOGLE_CLOUD_LOCATION"] = "global"
+# The project name is included in the CICD pipeline
+# os.environ['GOOGLE_CLOUD_PROJECT'] = "add-your-project-name"
+
+
+def test_imggen_mmflash_with_txt() -> None:
+ assert imggen_mmflash_with_txt.generate_content()
+
+
+def test_imggen_mmflash_edit_img_with_txt_img() -> None:
+ assert imggen_mmflash_edit_img_with_txt_img.generate_content()
+
+
+def test_imggen_mmflash_txt_and_img_with_txt() -> None:
+ assert imggen_mmflash_txt_and_img_with_txt.generate_content()
+
+
+def test_imggen_mmflash_locale_aware_with_txt() -> None:
+ assert imggen_mmflash_locale_aware_with_txt.generate_content()
+
+
+def test_imggen_mmflash_multiple_imgs_with_txt() -> None:
+ assert imggen_mmflash_multiple_imgs_with_txt.generate_content()
diff --git a/genai/image_generation/test_resources/dog_newspaper.png b/genai/image_generation/test_resources/dog_newspaper.png
index 0e502cdbb61..5f8961e6c10 100644
Binary files a/genai/image_generation/test_resources/dog_newspaper.png and b/genai/image_generation/test_resources/dog_newspaper.png differ
diff --git a/genai/image_generation/test_resources/example-image-eiffel-tower.png b/genai/image_generation/test_resources/example-image-eiffel-tower.png
new file mode 100644
index 00000000000..2a602e62698
Binary files /dev/null and b/genai/image_generation/test_resources/example-image-eiffel-tower.png differ
diff --git a/genai/image_generation/test_resources/fruit_mask.png b/genai/image_generation/test_resources/fruit_mask.png
new file mode 100644
index 00000000000..fd4e8dbf4f0
Binary files /dev/null and b/genai/image_generation/test_resources/fruit_mask.png differ
diff --git a/genai/image_generation/test_resources/latte_edit.png b/genai/image_generation/test_resources/latte_edit.png
index ec8f61ef661..f5f7465c36f 100644
Binary files a/genai/image_generation/test_resources/latte_edit.png and b/genai/image_generation/test_resources/latte_edit.png differ
diff --git a/genai/image_generation/test_resources/living_room_edit.png b/genai/image_generation/test_resources/living_room_edit.png
index 635498dc1e2..c949440e101 100644
Binary files a/genai/image_generation/test_resources/living_room_edit.png and b/genai/image_generation/test_resources/living_room_edit.png differ
diff --git a/genai/image_generation/test_resources/man.png b/genai/image_generation/test_resources/man.png
new file mode 100644
index 00000000000..7cf652e8e6e
Binary files /dev/null and b/genai/image_generation/test_resources/man.png differ
diff --git a/genai/image_generation/test_resources/man_in_sweater.png b/genai/image_generation/test_resources/man_in_sweater.png
new file mode 100644
index 00000000000..81bad264117
Binary files /dev/null and b/genai/image_generation/test_resources/man_in_sweater.png differ
diff --git a/genai/image_generation/test_resources/suitcase_mask.png b/genai/image_generation/test_resources/suitcase_mask.png
new file mode 100644
index 00000000000..45cc99b7a3e
Binary files /dev/null and b/genai/image_generation/test_resources/suitcase_mask.png differ
diff --git a/genai/image_generation/test_resources/sweater.jpg b/genai/image_generation/test_resources/sweater.jpg
new file mode 100644
index 00000000000..69cc18f921f
Binary files /dev/null and b/genai/image_generation/test_resources/sweater.jpg differ
diff --git a/genai/live/live_audio_with_txt.py b/genai/live/live_audio_with_txt.py
new file mode 100644
index 00000000000..3860b9f0128
--- /dev/null
+++ b/genai/live/live_audio_with_txt.py
@@ -0,0 +1,85 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Test file: https://storage.googleapis.com/generativeai-downloads/data/16000.wav
+# Install helpers for converting files: pip install librosa soundfile simpleaudio
+
+import asyncio
+
+
+async def generate_content() -> list:
+ # [START googlegenaisdk_live_audio_with_txt]
+ from google import genai
+ from google.genai.types import (
+ Content, LiveConnectConfig, Modality, Part,
+ PrebuiltVoiceConfig, SpeechConfig, VoiceConfig
+ )
+ import numpy as np
+ import soundfile as sf
+ import simpleaudio as sa
+
+ def play_audio(audio_array: np.ndarray, sample_rate: int = 24000) -> None:
+ sf.write("output.wav", audio_array, sample_rate)
+ wave_obj = sa.WaveObject.from_wave_file("output.wav")
+ play_obj = wave_obj.play()
+ play_obj.wait_done()
+
+ client = genai.Client()
+ voice_name = "Aoede"
+ model = "gemini-live-2.5-flash-native-audio"
+
+ config = LiveConnectConfig(
+ response_modalities=[Modality.AUDIO],
+ speech_config=SpeechConfig(
+ voice_config=VoiceConfig(
+ prebuilt_voice_config=PrebuiltVoiceConfig(
+ voice_name=voice_name,
+ )
+ ),
+ ),
+ )
+
+ async with client.aio.live.connect(
+ model=model,
+ config=config,
+ ) as session:
+ text_input = "Hello? Gemini are you there?"
+ print("> ", text_input, "\n")
+
+ await session.send_client_content(
+ turns=Content(role="user", parts=[Part(text=text_input)])
+ )
+
+ audio_data = []
+ async for message in session.receive():
+ if (
+ message.server_content.model_turn
+ and message.server_content.model_turn.parts
+ ):
+ for part in message.server_content.model_turn.parts:
+ if part.inline_data:
+ audio_data.append(
+ np.frombuffer(part.inline_data.data, dtype=np.int16)
+ )
+
+ if audio_data:
+ print("Received audio answer: ")
+ play_audio(np.concatenate(audio_data), sample_rate=24000)
+
+ # [END googlegenaisdk_live_audio_with_txt]
+ return []
+
+
+if __name__ == "__main__":
+ asyncio.run(generate_content())
diff --git a/genai/live/live_audiogen_with_txt.py b/genai/live/live_audiogen_with_txt.py
new file mode 100644
index 00000000000..29e20e8d661
--- /dev/null
+++ b/genai/live/live_audiogen_with_txt.py
@@ -0,0 +1,89 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# Test file: https://storage.googleapis.com/generativeai-downloads/data/16000.wav
+# Install helpers for converting files: pip install librosa soundfile
+
+import asyncio
+
+
+async def generate_content() -> None:
+ # [START googlegenaisdk_live_audiogen_with_txt]
+ import numpy as np
+ import scipy.io.wavfile as wavfile
+ from google import genai
+ from google.genai.types import (Content, LiveConnectConfig, Modality, Part,
+ PrebuiltVoiceConfig, SpeechConfig,
+ VoiceConfig)
+
+ client = genai.Client()
+ model = "gemini-live-2.5-flash-native-audio"
+ # For more Voice options, check https://cloud.google.com/vertex-ai/generative-ai/docs/models/gemini/2-5-flash#live-api-native-audio
+ voice_name = "Aoede"
+
+ config = LiveConnectConfig(
+ response_modalities=[Modality.AUDIO],
+ speech_config=SpeechConfig(
+ voice_config=VoiceConfig(
+ prebuilt_voice_config=PrebuiltVoiceConfig(
+ voice_name=voice_name,
+ )
+ ),
+ ),
+ )
+
+ async with client.aio.live.connect(
+ model=model,
+ config=config,
+ ) as session:
+ text_input = "Hello? Gemini are you there?"
+ print("> ", text_input, "\n")
+
+ await session.send_client_content(
+ turns=Content(role="user", parts=[Part(text=text_input)])
+ )
+
+ audio_data_chunks = []
+ async for message in session.receive():
+ if (
+ message.server_content.model_turn
+ and message.server_content.model_turn.parts
+ ):
+ for part in message.server_content.model_turn.parts:
+ if part.inline_data:
+ audio_data_chunks.append(
+ np.frombuffer(part.inline_data.data, dtype=np.int16)
+ )
+
+ if audio_data_chunks:
+ print("Received audio answer. Saving to local file...")
+ full_audio_array = np.concatenate(audio_data_chunks)
+
+ output_filename = "gemini_response.wav"
+ sample_rate = 24000
+
+ wavfile.write(output_filename, sample_rate, full_audio_array)
+ print(f"Audio saved to {output_filename}")
+
+ # Example output:
+ # > Hello? Gemini are you there?
+ # Received audio answer. Saving to local file...
+ # Audio saved to gemini_response.wav
+ # [END googlegenaisdk_live_audiogen_with_txt]
+ return True
+
+
+if __name__ == "__main__":
+ asyncio.run(generate_content())
diff --git a/genai/live/live_code_exec_with_txt.py b/genai/live/live_code_exec_with_txt.py
new file mode 100644
index 00000000000..ce36fc9f7b1
--- /dev/null
+++ b/genai/live/live_code_exec_with_txt.py
@@ -0,0 +1,62 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import asyncio
+
+
+async def generate_content() -> list[str]:
+ # [START googlegenaisdk_live_code_exec_with_txt]
+ from google import genai
+ from google.genai.types import (Content, LiveConnectConfig, Modality, Part,
+ Tool, ToolCodeExecution)
+
+ client = genai.Client()
+ model_id = "gemini-2.0-flash-live-preview-04-09"
+ config = LiveConnectConfig(
+ response_modalities=[Modality.TEXT],
+ tools=[Tool(code_execution=ToolCodeExecution())],
+ )
+ async with client.aio.live.connect(model=model_id, config=config) as session:
+ text_input = "Compute the largest prime palindrome under 10"
+ print("> ", text_input, "\n")
+ await session.send_client_content(
+ turns=Content(role="user", parts=[Part(text=text_input)])
+ )
+
+ response = []
+
+ async for chunk in session.receive():
+ if chunk.server_content:
+ if chunk.text is not None:
+ response.append(chunk.text)
+
+ model_turn = chunk.server_content.model_turn
+ if model_turn:
+ for part in model_turn.parts:
+ if part.executable_code is not None:
+ print(part.executable_code.code)
+
+ if part.code_execution_result is not None:
+ print(part.code_execution_result.output)
+
+ print("".join(response))
+ # Example output:
+ # > Compute the largest prime palindrome under 10
+ # Final Answer: The final answer is $\boxed{7}$
+ # [END googlegenaisdk_live_code_exec_with_txt]
+ return True
+
+
+if __name__ == "__main__":
+ asyncio.run(generate_content())
diff --git a/genai/live/live_conversation_audio_with_audio.py b/genai/live/live_conversation_audio_with_audio.py
new file mode 100644
index 00000000000..5d5b5a05445
--- /dev/null
+++ b/genai/live/live_conversation_audio_with_audio.py
@@ -0,0 +1,133 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# [START googlegenaisdk_live_conversation_audio_with_audio]
+
+import asyncio
+import base64
+
+from google import genai
+from google.genai.types import (
+ AudioTranscriptionConfig,
+ Blob,
+ HttpOptions,
+ LiveConnectConfig,
+ Modality,
+)
+import numpy as np
+
+from scipy.io import wavfile
+
+# The number of audio frames to send in each chunk.
+CHUNK = 4200
+CHANNELS = 1
+MODEL = "gemini-live-2.5-flash-native-audio"
+
+# The audio sample rate expected by the model.
+INPUT_RATE = 16000
+# The audio sample rate of the audio generated by the model.
+OUTPUT_RATE = 24000
+
+# The sample width for 16-bit audio, which is standard for this type of audio data.
+SAMPLE_WIDTH = 2
+
+client = genai.Client(http_options=HttpOptions(api_version="v1beta1"), location="us-central1")
+
+
+def read_wavefile(filepath: str) -> tuple[str, str]:
+ # Read the .wav file using scipy.io.wavfile.read
+ rate, data = wavfile.read(filepath)
+ # Convert the NumPy array of audio samples back to raw bytes
+ raw_audio_bytes = data.tobytes()
+ # Encode the raw bytes to a base64 string.
+ # The result needs to be decoded from bytes to a UTF-8 string
+ base64_encoded_data = base64.b64encode(raw_audio_bytes).decode("ascii")
+ mime_type = f"audio/pcm;rate={rate}"
+ return base64_encoded_data, mime_type
+
+
+def write_wavefile(filepath: str, audio_frames: list[bytes], rate: int) -> None:
+ """Writes a list of audio byte frames to a WAV file using scipy."""
+ # Combine the list of byte frames into a single byte string
+ raw_audio_bytes = b"".join(audio_frames)
+
+ # Convert the raw bytes to a NumPy array.
+ # The sample width is 2 bytes (16-bit), so we use np.int16
+ audio_data = np.frombuffer(raw_audio_bytes, dtype=np.int16)
+
+ # Write the NumPy array to a .wav file
+ wavfile.write(filepath, rate, audio_data)
+ print(f"Model response saved to {filepath}")
+
+
+async def main() -> bool:
+ print("Starting the code")
+
+ async with client.aio.live.connect(
+ model=MODEL,
+ config=LiveConnectConfig(
+ # Set Model responses to be in Audio
+ response_modalities=[Modality.AUDIO],
+ # To generate transcript for input audio
+ input_audio_transcription=AudioTranscriptionConfig(),
+ # To generate transcript for output audio
+ output_audio_transcription=AudioTranscriptionConfig(),
+ ),
+ ) as session:
+
+ async def send() -> None:
+ # using local file as an example for live audio input
+ wav_file_path = "hello_gemini_are_you_there.wav"
+ base64_data, mime_type = read_wavefile(wav_file_path)
+ audio_bytes = base64.b64decode(base64_data)
+ await session.send_realtime_input(media=Blob(data=audio_bytes, mime_type=mime_type))
+
+ async def receive() -> None:
+ audio_frames = []
+
+ async for message in session.receive():
+ if message.server_content.input_transcription:
+ print(message.server_content.model_dump(mode="json", exclude_none=True))
+ if message.server_content.output_transcription:
+ print(message.server_content.model_dump(mode="json", exclude_none=True))
+ if message.server_content.model_turn:
+ for part in message.server_content.model_turn.parts:
+ if part.inline_data.data:
+ audio_data = part.inline_data.data
+ audio_frames.append(audio_data)
+
+ if audio_frames:
+ write_wavefile(
+ "example_model_response.wav",
+ audio_frames,
+ OUTPUT_RATE,
+ )
+
+ send_task = asyncio.create_task(send())
+ receive_task = asyncio.create_task(receive())
+ await asyncio.gather(send_task, receive_task)
+ # Example response:
+ # gemini-live-2.5-flash-native-audio
+ # {'input_transcription': {'text': 'Hello.'}}
+ # {'output_transcription': {}}
+ # {'output_transcription': {'text': 'Hi'}}
+ # {'output_transcription': {'text': ' there. What can I do for you today?'}}
+ # {'output_transcription': {'finished': True}}
+ # Model response saved to example_model_response.wav
+
+# [END googlegenaisdk_live_conversation_audio_with_audio]
+ return True
+
+if __name__ == "__main__":
+ asyncio.run(main())
diff --git a/genai/live/live_func_call_with_txt.py b/genai/live/live_func_call_with_txt.py
new file mode 100644
index 00000000000..615ad1a8c9a
--- /dev/null
+++ b/genai/live/live_func_call_with_txt.py
@@ -0,0 +1,74 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import asyncio
+
+from google.genai.types import FunctionResponse
+
+
+async def generate_content() -> list[FunctionResponse]:
+ # [START googlegenaisdk_live_func_call_with_txt]
+ from google import genai
+ from google.genai.types import (Content, FunctionDeclaration,
+ FunctionResponse, LiveConnectConfig,
+ Modality, Part, Tool)
+
+ client = genai.Client()
+ model_id = "gemini-2.0-flash-live-preview-04-09"
+
+ # Simple function definitions
+ turn_on_the_lights = FunctionDeclaration(name="turn_on_the_lights")
+ turn_off_the_lights = FunctionDeclaration(name="turn_off_the_lights")
+
+ config = LiveConnectConfig(
+ response_modalities=[Modality.TEXT],
+ tools=[Tool(function_declarations=[turn_on_the_lights, turn_off_the_lights])],
+ )
+ async with client.aio.live.connect(model=model_id, config=config) as session:
+ text_input = "Turn on the lights please"
+ print("> ", text_input, "\n")
+ await session.send_client_content(
+ turns=Content(role="user", parts=[Part(text=text_input)])
+ )
+
+ function_responses = []
+
+ async for chunk in session.receive():
+ if chunk.server_content:
+ if chunk.text is not None:
+ print(chunk.text)
+
+ elif chunk.tool_call:
+
+ for fc in chunk.tool_call.function_calls:
+ function_response = FunctionResponse(
+ name=fc.name,
+ response={
+ "result": "ok"
+ }, # simple, hard-coded function response
+ )
+ function_responses.append(function_response)
+ print(function_response.response["result"])
+
+ await session.send_tool_response(function_responses=function_responses)
+
+ # Example output:
+ # > Turn on the lights please
+ # ok
+ # [END googlegenaisdk_live_func_call_with_txt]
+ return True
+
+
+if __name__ == "__main__":
+ asyncio.run(generate_content())
diff --git a/genai/live/live_ground_googsearch_with_txt.py b/genai/live/live_ground_googsearch_with_txt.py
new file mode 100644
index 00000000000..d160b286649
--- /dev/null
+++ b/genai/live/live_ground_googsearch_with_txt.py
@@ -0,0 +1,63 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import asyncio
+
+
+async def generate_content() -> list[str]:
+ # [START googlegenaisdk_live_ground_googsearch_with_txt]
+ from google import genai
+ from google.genai.types import (Content, GoogleSearch, LiveConnectConfig,
+ Modality, Part, Tool)
+
+ client = genai.Client()
+ model_id = "gemini-2.0-flash-live-preview-04-09"
+ config = LiveConnectConfig(
+ response_modalities=[Modality.TEXT],
+ tools=[Tool(google_search=GoogleSearch())],
+ )
+ async with client.aio.live.connect(model=model_id, config=config) as session:
+ text_input = "When did the last Brazil vs. Argentina soccer match happen?"
+ await session.send_client_content(
+ turns=Content(role="user", parts=[Part(text=text_input)])
+ )
+
+ response = []
+
+ async for chunk in session.receive():
+ if chunk.server_content:
+ if chunk.text is not None:
+ response.append(chunk.text)
+
+ # The model might generate and execute Python code to use Search
+ model_turn = chunk.server_content.model_turn
+ if model_turn:
+ for part in model_turn.parts:
+ if part.executable_code is not None:
+ print(part.executable_code.code)
+
+ if part.code_execution_result is not None:
+ print(part.code_execution_result.output)
+
+ print("".join(response))
+ # Example output:
+ # > When did the last Brazil vs. Argentina soccer match happen?
+ # The last Brazil vs. Argentina soccer match was on March 25, 2025, a 2026 World Cup qualifier, where Argentina defeated Brazil 4-1.
+ # [END googlegenaisdk_live_ground_googsearch_with_txt]
+ return True
+
+
+if __name__ == "__main__":
+ asyncio.run(generate_content())
diff --git a/genai/live/live_ground_ragengine_with_txt.py b/genai/live/live_ground_ragengine_with_txt.py
new file mode 100644
index 00000000000..09b133ad7cf
--- /dev/null
+++ b/genai/live/live_ground_ragengine_with_txt.py
@@ -0,0 +1,63 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import asyncio
+
+
+async def generate_content(memory_corpus: str) -> list[str]:
+ # [START googlegenaisdk_live_ground_ragengine_with_txt]
+ from google import genai
+ from google.genai.types import (Content, LiveConnectConfig, Modality, Part,
+ Retrieval, Tool, VertexRagStore,
+ VertexRagStoreRagResource)
+
+ client = genai.Client()
+ model_id = "gemini-2.0-flash-live-preview-04-09"
+ rag_store = VertexRagStore(
+ rag_resources=[
+ VertexRagStoreRagResource(
+ rag_corpus=memory_corpus # Use memory corpus if you want to store context.
+ )
+ ],
+ # Set `store_context` to true to allow Live API sink context into your memory corpus.
+ store_context=True,
+ )
+ config = LiveConnectConfig(
+ response_modalities=[Modality.TEXT],
+ tools=[Tool(retrieval=Retrieval(vertex_rag_store=rag_store))],
+ )
+
+ async with client.aio.live.connect(model=model_id, config=config) as session:
+ text_input = "What are newest gemini models?"
+ print("> ", text_input, "\n")
+
+ await session.send_client_content(
+ turns=Content(role="user", parts=[Part(text=text_input)])
+ )
+
+ response = []
+
+ async for message in session.receive():
+ if message.text:
+ response.append(message.text)
+
+ print("".join(response))
+ # Example output:
+ # > What are newest gemini models?
+ # In December 2023, Google launched Gemini, their "most capable and general model". It's multimodal, meaning it understands and combines different types of information like text, code, audio, images, and video.
+ # [END googlegenaisdk_live_ground_ragengine_with_txt]
+ return response
+
+
+if __name__ == "__main__":
+ asyncio.run(generate_content("test_memory_corpus"))
diff --git a/genai/live/live_structured_output_with_txt.py b/genai/live/live_structured_output_with_txt.py
new file mode 100644
index 00000000000..2727fbcb08e
--- /dev/null
+++ b/genai/live/live_structured_output_with_txt.py
@@ -0,0 +1,86 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# Test file: https://storage.googleapis.com/generativeai-downloads/data/16000.wav
+# Install helpers for converting files: pip install librosa soundfile
+
+from pydantic import BaseModel
+
+
+class CalendarEvent(BaseModel):
+ name: str
+ date: str
+ participants: list[str]
+
+
+def generate_content() -> CalendarEvent:
+ # [START googlegenaisdk_live_structured_output_with_txt]
+ import os
+
+ import google.auth.transport.requests
+ import openai
+ from google.auth import default
+ from openai.types.chat import (ChatCompletionSystemMessageParam,
+ ChatCompletionUserMessageParam)
+
+ project_id = os.environ["GOOGLE_CLOUD_PROJECT"]
+ location = "us-central1"
+
+ # Programmatically get an access token
+ credentials, _ = default(scopes=["https://www.googleapis.com/auth/cloud-platform"])
+ credentials.refresh(google.auth.transport.requests.Request())
+ # Note: the credential lives for 1 hour by default (https://cloud.google.com/docs/authentication/token-types#at-lifetime); after expiration, it must be refreshed.
+
+ ##############################
+ # Choose one of the following:
+ ##############################
+
+ # If you are calling a Gemini model, set the ENDPOINT_ID variable to use openapi.
+ ENDPOINT_ID = "openapi"
+
+ # If you are calling a self-deployed model from Model Garden, set the
+ # ENDPOINT_ID variable and set the client's base URL to use your endpoint.
+ # ENDPOINT_ID = "YOUR_ENDPOINT_ID"
+
+ # OpenAI Client
+ client = openai.OpenAI(
+ base_url=f"https://{location}-aiplatform.googleapis.com/v1/projects/{project_id}/locations/{location}/endpoints/{ENDPOINT_ID}",
+ api_key=credentials.token,
+ )
+
+ completion = client.beta.chat.completions.parse(
+ model="google/gemini-2.5-flash",
+ messages=[
+ ChatCompletionSystemMessageParam(
+ role="system", content="Extract the event information."
+ ),
+ ChatCompletionUserMessageParam(
+ role="user",
+ content="Alice and Bob are going to a science fair on Friday.",
+ ),
+ ],
+ response_format=CalendarEvent,
+ )
+
+ response = completion.choices[0].message.parsed
+ print(response)
+
+ # System message: Extract the event information.
+ # User message: Alice and Bob are going to a science fair on Friday.
+ # Output message: name='science fair' date='Friday' participants=['Alice', 'Bob']
+ # [END googlegenaisdk_live_structured_output_with_txt]
+ return response
+
+
+if __name__ == "__main__":
+ generate_content()
diff --git a/genai/live/live_transcribe_with_audio.py b/genai/live/live_transcribe_with_audio.py
new file mode 100644
index 00000000000..4a6b185d7ce
--- /dev/null
+++ b/genai/live/live_transcribe_with_audio.py
@@ -0,0 +1,67 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# Test file: https://storage.googleapis.com/generativeai-downloads/data/16000.wav
+# Install helpers for converting files: pip install librosa soundfile
+
+import asyncio
+
+
+async def generate_content() -> list[str]:
+ # [START googlegenaisdk_live_transcribe_with_audio]
+ from google import genai
+ from google.genai.types import (AudioTranscriptionConfig, Content,
+ LiveConnectConfig, Modality, Part)
+
+ client = genai.Client()
+ model = "gemini-live-2.5-flash-preview-native-audio"
+ config = LiveConnectConfig(
+ response_modalities=[Modality.AUDIO],
+ input_audio_transcription=AudioTranscriptionConfig(),
+ output_audio_transcription=AudioTranscriptionConfig(),
+ )
+
+ async with client.aio.live.connect(model=model, config=config) as session:
+ input_txt = "Hello? Gemini are you there?"
+ print(f"> {input_txt}")
+
+ await session.send_client_content(
+ turns=Content(role="user", parts=[Part(text=input_txt)])
+ )
+
+ response = []
+
+ async for message in session.receive():
+ if message.server_content.model_turn:
+ print("Model turn:", message.server_content.model_turn)
+ if message.server_content.input_transcription:
+ print(
+ "Input transcript:", message.server_content.input_transcription.text
+ )
+ if message.server_content.output_transcription:
+ if message.server_content.output_transcription.text:
+ response.append(message.server_content.output_transcription.text)
+
+ print("".join(response))
+
+ # Example output:
+ # > Hello? Gemini are you there?
+ # Yes, I'm here. What would you like to talk about?
+ # [END googlegenaisdk_live_transcribe_with_audio]
+ return True
+
+
+if __name__ == "__main__":
+ asyncio.run(generate_content())
diff --git a/genai/live/live_txt_with_audio.py b/genai/live/live_txt_with_audio.py
new file mode 100644
index 00000000000..30e9004d76f
--- /dev/null
+++ b/genai/live/live_txt_with_audio.py
@@ -0,0 +1,72 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# Test file: https://storage.googleapis.com/generativeai-downloads/data/16000.wav
+# Install helpers for converting files: pip install librosa soundfile
+
+import asyncio
+
+
+async def generate_content() -> list[str]:
+ # [START googlegenaisdk_live_txt_with_audio]
+ import io
+
+ import librosa
+ import requests
+ import soundfile as sf
+ from google import genai
+ from google.genai.types import Blob, LiveConnectConfig, Modality
+
+ client = genai.Client()
+ model = "gemini-2.0-flash-live-preview-04-09"
+ config = LiveConnectConfig(response_modalities=[Modality.TEXT])
+
+ async with client.aio.live.connect(model=model, config=config) as session:
+ audio_url = (
+ "https://storage.googleapis.com/generativeai-downloads/data/16000.wav"
+ )
+ response = requests.get(audio_url)
+ response.raise_for_status()
+ buffer = io.BytesIO(response.content)
+ y, sr = librosa.load(buffer, sr=16000)
+ sf.write(buffer, y, sr, format="RAW", subtype="PCM_16")
+ buffer.seek(0)
+ audio_bytes = buffer.read()
+
+ # If you've pre-converted to sample.pcm using ffmpeg, use this instead:
+ # audio_bytes = Path("sample.pcm").read_bytes()
+
+ print("> Answer to this audio url", audio_url, "\n")
+
+ await session.send_realtime_input(
+ media=Blob(data=audio_bytes, mime_type="audio/pcm;rate=16000")
+ )
+
+ response = []
+
+ async for message in session.receive():
+ if message.text is not None:
+ response.append(message.text)
+
+ print("".join(response))
+ # Example output:
+ # > Answer to this audio url https://storage.googleapis.com/generativeai-downloads/data/16000.wav
+ # Yes, I can hear you. How can I help you today?
+ # [END googlegenaisdk_live_txt_with_audio]
+ return response
+
+
+if __name__ == "__main__":
+ asyncio.run(generate_content())
diff --git a/genai/live/live_txtgen_with_audio.py b/genai/live/live_txtgen_with_audio.py
new file mode 100644
index 00000000000..7daf4073a48
--- /dev/null
+++ b/genai/live/live_txtgen_with_audio.py
@@ -0,0 +1,78 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# Test file: https://storage.googleapis.com/generativeai-downloads/data/16000.wav
+# Install helpers for converting files: pip install librosa soundfile
+
+import asyncio
+from pathlib import Path
+
+
+async def generate_content() -> list[str]:
+ # [START googlegenaisdk_live_txtgen_with_audio]
+ import requests
+ import soundfile as sf
+ from google import genai
+ from google.genai.types import Blob, LiveConnectConfig, Modality
+
+ client = genai.Client()
+ model = "gemini-2.0-flash-live-preview-04-09"
+ config = LiveConnectConfig(response_modalities=[Modality.TEXT])
+
+ def get_audio(url: str) -> bytes:
+ input_path = Path("temp_input.wav")
+ output_path = Path("temp_output.pcm")
+
+ input_path.write_bytes(requests.get(url).content)
+
+ y, sr = sf.read(input_path)
+ sf.write(output_path, y, sr, format="RAW", subtype="PCM_16")
+
+ audio = output_path.read_bytes()
+
+ input_path.unlink(missing_ok=True)
+ output_path.unlink(missing_ok=True)
+ return audio
+
+ async with client.aio.live.connect(model=model, config=config) as session:
+ audio_url = "https://storage.googleapis.com/generativeai-downloads/data/16000.wav"
+ audio_bytes = get_audio(audio_url)
+
+ # If you've pre-converted to sample.pcm using ffmpeg, use this instead:
+ # from pathlib import Path
+ # audio_bytes = Path("sample.pcm").read_bytes()
+
+ print("> Answer to this audio url", audio_url, "\n")
+
+ await session.send_realtime_input(
+ media=Blob(data=audio_bytes, mime_type="audio/pcm;rate=16000")
+ )
+
+ response = []
+
+ async for message in session.receive():
+ if message.text is not None:
+ response.append(message.text)
+
+ print("".join(response))
+ # Example output:
+ # > Answer to this audio url https://storage.googleapis.com/generativeai-downloads/data/16000.wav
+ # Yes, I can hear you. How can I help you today?
+ # [END googlegenaisdk_live_txtgen_with_audio]
+ return True
+
+
+if __name__ == "__main__":
+ asyncio.run(generate_content())
diff --git a/genai/live/live_websocket_audiogen_with_txt.py b/genai/live/live_websocket_audiogen_with_txt.py
index f7b6f07e5f8..d81c685cf0e 100644
--- a/genai/live/live_websocket_audiogen_with_txt.py
+++ b/genai/live/live_websocket_audiogen_with_txt.py
@@ -39,15 +39,15 @@ async def generate_content() -> str:
# [START googlegenaisdk_live_audiogen_websocket_with_txt]
import base64
import json
- import numpy as np
- from websockets.asyncio.client import connect
+ import numpy as np
from scipy.io import wavfile
+ from websockets.asyncio.client import connect
# Configuration Constants
PROJECT_ID = os.getenv("GOOGLE_CLOUD_PROJECT")
LOCATION = "us-central1"
- GEMINI_MODEL_NAME = "gemini-2.0-flash-live-preview-04-09"
+ GEMINI_MODEL_NAME = "gemini-live-2.5-flash-native-audio"
# To generate a bearer token in CLI, use:
# $ gcloud auth application-default print-access-token
# It's recommended to fetch this token dynamically rather than hardcoding.
@@ -143,7 +143,7 @@ async def generate_content() -> str:
# Input: Hello? Gemini are you there?
# Audio Response: Hello there. I'm here. What can I do for you today?
# [END googlegenaisdk_live_audiogen_websocket_with_txt]
- return "output.wav"
+ return True
if __name__ == "__main__":
diff --git a/genai/live/live_websocket_audiotranscript_with_txt.py b/genai/live/live_websocket_audiotranscript_with_txt.py
index 5192b81ef17..8b6ce59fb79 100644
--- a/genai/live/live_websocket_audiotranscript_with_txt.py
+++ b/genai/live/live_websocket_audiotranscript_with_txt.py
@@ -39,15 +39,15 @@ async def generate_content() -> str:
# [START googlegenaisdk_live_websocket_audiotranscript_with_txt]
import base64
import json
- import numpy as np
- from websockets.asyncio.client import connect
+ import numpy as np
from scipy.io import wavfile
+ from websockets.asyncio.client import connect
# Configuration Constants
PROJECT_ID = os.getenv("GOOGLE_CLOUD_PROJECT")
LOCATION = "us-central1"
- GEMINI_MODEL_NAME = "gemini-2.0-flash-live-preview-04-09"
+ GEMINI_MODEL_NAME = "gemini-live-2.5-flash-native-audio"
# To generate a bearer token in CLI, use:
# $ gcloud auth application-default print-access-token
# It's recommended to fetch this token dynamically rather than hardcoding.
@@ -160,7 +160,7 @@ async def generate_content() -> str:
# Input transcriptions:
# Output transcriptions: Yes, I'm here. How can I help you today?
# [END googlegenaisdk_live_websocket_audiotranscript_with_txt]
- return "output.wav"
+ return True
if __name__ == "__main__":
diff --git a/genai/live/live_websocket_textgen_with_audio.py b/genai/live/live_websocket_textgen_with_audio.py
index de6fd9d55c3..781ffc96d78 100644
--- a/genai/live/live_websocket_textgen_with_audio.py
+++ b/genai/live/live_websocket_textgen_with_audio.py
@@ -40,8 +40,8 @@ async def generate_content() -> str:
import base64
import json
- from websockets.asyncio.client import connect
from scipy.io import wavfile
+ from websockets.asyncio.client import connect
def read_wavefile(filepath: str) -> tuple[str, str]:
# Read the .wav file using scipy.io.wavfile.read
@@ -154,7 +154,7 @@ def read_wavefile(filepath: str) -> tuple[str, str]:
# Setup Response: {'setupComplete': {}}
# Response: Hey there. What's on your mind today?
# [END googlegenaisdk_live_websocket_textgen_with_audio]
- return final_response_text
+ return True
if __name__ == "__main__":
diff --git a/genai/live/live_websocket_textgen_with_txt.py b/genai/live/live_websocket_textgen_with_txt.py
index b36487cc9a0..13515b30062 100644
--- a/genai/live/live_websocket_textgen_with_txt.py
+++ b/genai/live/live_websocket_textgen_with_txt.py
@@ -38,6 +38,7 @@ async def generate_content() -> str:
"""
# [START googlegenaisdk_live_websocket_with_txt]
import json
+
from websockets.asyncio.client import connect
# Configuration Constants
@@ -129,7 +130,7 @@ async def generate_content() -> str:
# Input: Hello? Gemini are you there?
# Response: Hello there. I'm here. What can I do for you today?
# [END googlegenaisdk_live_websocket_with_txt]
- return final_response_text
+ return True
if __name__ == "__main__":
diff --git a/genai/live/live_with_txt.py b/genai/live/live_with_txt.py
index a3c75188439..78df0ccd700 100644
--- a/genai/live/live_with_txt.py
+++ b/genai/live/live_with_txt.py
@@ -18,13 +18,8 @@
async def generate_content() -> list[str]:
# [START googlegenaisdk_live_with_txt]
from google import genai
- from google.genai.types import (
- Content,
- LiveConnectConfig,
- HttpOptions,
- Modality,
- Part,
- )
+ from google.genai.types import (Content, HttpOptions, LiveConnectConfig,
+ Modality, Part)
client = genai.Client(http_options=HttpOptions(api_version="v1beta1"))
model_id = "gemini-2.0-flash-live-preview-04-09"
@@ -35,7 +30,9 @@ async def generate_content() -> list[str]:
) as session:
text_input = "Hello? Gemini, are you there?"
print("> ", text_input, "\n")
- await session.send_client_content(turns=Content(role="user", parts=[Part(text=text_input)]))
+ await session.send_client_content(
+ turns=Content(role="user", parts=[Part(text=text_input)])
+ )
response = []
@@ -48,7 +45,7 @@ async def generate_content() -> list[str]:
# > Hello? Gemini, are you there?
# Yes, I'm here. What would you like to talk about?
# [END googlegenaisdk_live_with_txt]
- return response
+ return True
if __name__ == "__main__":
diff --git a/genai/live/requirements-test.txt b/genai/live/requirements-test.txt
index 4fb57f7f08d..7d5998c481d 100644
--- a/genai/live/requirements-test.txt
+++ b/genai/live/requirements-test.txt
@@ -1,4 +1,5 @@
backoff==2.2.1
-google-api-core==2.19.0
-pytest==8.2.0
-pytest-asyncio==0.25.3
+google-api-core==2.25.1
+pytest==8.4.1
+pytest-asyncio==1.1.0
+pytest-mock==3.14.0
\ No newline at end of file
diff --git a/genai/live/requirements.txt b/genai/live/requirements.txt
index eaaeffe525c..ee7f068754b 100644
--- a/genai/live/requirements.txt
+++ b/genai/live/requirements.txt
@@ -1,3 +1,10 @@
-google-genai==1.19.0
-scipy==1.15.3
-websockets==15.0.1
\ No newline at end of file
+google-genai==1.42.0
+scipy==1.16.1
+websockets==15.0.1
+numpy==1.26.4
+soundfile==0.12.1
+openai==1.99.1
+setuptools==80.9.0
+pyaudio==0.2.14
+librosa==0.11.0
+simpleaudio==1.0.0
\ No newline at end of file
diff --git a/genai/live/test_live_examples.py b/genai/live/test_live_examples.py
index ce382539861..ffb0f10c689 100644
--- a/genai/live/test_live_examples.py
+++ b/genai/live/test_live_examples.py
@@ -15,31 +15,181 @@
#
# Using Google Cloud Vertex AI to test the code samples.
#
-
+import base64
import os
+import sys
+import types
+
+from unittest.mock import AsyncMock, MagicMock, patch
import pytest
+import pytest_mock
+import live_audio_with_txt
+import live_audiogen_with_txt
+import live_code_exec_with_txt
+import live_func_call_with_txt
+import live_ground_googsearch_with_txt
+import live_ground_ragengine_with_txt
+import live_structured_output_with_txt
+import live_transcribe_with_audio
+import live_txt_with_audio
+import live_txtgen_with_audio
import live_websocket_audiogen_with_txt
import live_websocket_audiotranscript_with_txt
-import live_websocket_textgen_with_audio
+# import live_websocket_textgen_with_audio
import live_websocket_textgen_with_txt
import live_with_txt
+
os.environ["GOOGLE_GENAI_USE_VERTEXAI"] = "True"
os.environ["GOOGLE_CLOUD_LOCATION"] = "us-central1"
# The project name is included in the CICD pipeline
# os.environ['GOOGLE_CLOUD_PROJECT'] = "add-your-project-name"
+@pytest.fixture
+def mock_live_session() -> tuple[MagicMock, MagicMock]:
+ async def async_gen(items: list) -> AsyncMock:
+ for i in items:
+ yield i
+
+ mock_session = MagicMock()
+ mock_session.__aenter__.return_value = mock_session
+ mock_session.send_client_content = AsyncMock()
+ mock_session.send = AsyncMock()
+ mock_session.receive = lambda: async_gen([])
+
+ mock_client = MagicMock()
+ mock_client.aio.live.connect.return_value = mock_session
+
+ return mock_client, mock_session
+
+
+@pytest.fixture()
+def mock_rag_components(mocker: pytest_mock.MockerFixture) -> None:
+ mock_client_cls = mocker.patch("google.genai.Client")
+
+ class AsyncIterator:
+ def __init__(self) -> None:
+ self.used = False
+
+ def __aiter__(self) -> "AsyncIterator":
+ return self
+
+ async def __anext__(self) -> object:
+ if not self.used:
+ self.used = True
+ return mocker.MagicMock(
+ text="""In December 2023, Google launched Gemini, their "most capable and general model". It's multimodal, meaning it understands and combines different types of information like text, code, audio, images, and video."""
+ )
+ raise StopAsyncIteration
+
+ mock_session = mocker.AsyncMock()
+ mock_session.__aenter__.return_value = mock_session
+ mock_session.receive = lambda: AsyncIterator()
+ mock_client_cls.return_value.aio.live.connect.return_value = mock_session
+
+
+@pytest.fixture()
+def live_conversation() -> None:
+ google_mod = types.ModuleType("google")
+ genai_mod = types.ModuleType("google.genai")
+ genai_types_mod = types.ModuleType("google.genai.types")
+
+ class AudioTranscriptionConfig:
+ def __init__(self, *args: object, **kwargs: object) -> None:
+ pass
+
+ class Blob:
+ def __init__(self, data: bytes, mime_type: str) -> None:
+ self.data = data
+ self.mime_type = mime_type
+
+ class HttpOptions:
+ def __init__(self, api_version: str | None = None) -> None:
+ self.api_version = api_version
+
+ class LiveConnectConfig:
+ def __init__(self, *args: object, **kwargs: object) -> None:
+ self.kwargs = kwargs
+
+ class Modality:
+ AUDIO = "AUDIO"
+
+ genai_types_mod.AudioTranscriptionConfig = AudioTranscriptionConfig
+ genai_types_mod.Blob = Blob
+ genai_types_mod.HttpOptions = HttpOptions
+ genai_types_mod.LiveConnectConfig = LiveConnectConfig
+ genai_types_mod.Modality = Modality
+
+ class FakeSession:
+ async def __aenter__(self) -> "FakeSession":
+ print("MOCK: entering FakeSession")
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc: BaseException | None,
+ tb: types.TracebackType | None,
+ ) -> None:
+ print("MOCK: exiting FakeSession")
+
+ async def send_realtime_input(self, media: object) -> None:
+ print("MOCK: send_realtime_input called (no network)")
+
+ async def receive(self) -> object:
+ print("MOCK: receive started")
+ if False:
+ yield
+
+ class FakeClient:
+ def __init__(self, *args: object, **kwargs: object) -> None:
+ self.aio = MagicMock()
+ self.aio.live = MagicMock()
+ self.aio.live.connect = MagicMock(return_value=FakeSession())
+ print("MOCK: FakeClient created")
+
+ def fake_client_constructor(*args: object, **kwargs: object) -> FakeClient:
+ return FakeClient()
+
+ genai_mod.Client = fake_client_constructor
+ genai_mod.types = genai_types_mod
+
+ old_modules = sys.modules.copy()
+
+ sys.modules["google"] = google_mod
+ sys.modules["google.genai"] = genai_mod
+ sys.modules["google.genai.types"] = genai_types_mod
+
+ import live_conversation_audio_with_audio as live
+
+ def fake_read_wavefile(path: str) -> tuple[str, str]:
+ print("MOCK: read_wavefile called")
+ fake_bytes = b"\x00\x00" * 1000
+ return base64.b64encode(fake_bytes).decode("ascii"), "audio/pcm;rate=16000"
+
+ def fake_write_wavefile(path: str, frames: bytes, rate: int) -> None:
+ print(f"MOCK: write_wavefile called (no file written) rate={rate}")
+
+ live.read_wavefile = fake_read_wavefile
+ live.write_wavefile = fake_write_wavefile
+
+ yield live
+
+ sys.modules.clear()
+ sys.modules.update(old_modules)
+
+
@pytest.mark.asyncio
async def test_live_with_text() -> None:
assert await live_with_txt.generate_content()
-@pytest.mark.asyncio
-async def test_live_websocket_textgen_with_audio() -> None:
- assert await live_websocket_textgen_with_audio.generate_content()
+# @pytest.mark.asyncio
+# async def test_live_websocket_textgen_with_audio() -> None:
+# assert await live_websocket_textgen_with_audio.generate_content()
@pytest.mark.asyncio
@@ -55,3 +205,68 @@ async def test_live_websocket_audiogen_with_txt() -> None:
@pytest.mark.asyncio
async def test_live_websocket_audiotranscript_with_txt() -> None:
assert await live_websocket_audiotranscript_with_txt.generate_content()
+
+
+@pytest.mark.asyncio
+async def test_live_audiogen_with_txt() -> None:
+ assert live_audiogen_with_txt.generate_content()
+
+
+@pytest.mark.asyncio
+async def test_live_code_exec_with_txt() -> None:
+ assert await live_code_exec_with_txt.generate_content()
+
+
+@pytest.mark.asyncio
+async def test_live_func_call_with_txt() -> None:
+ assert await live_func_call_with_txt.generate_content()
+
+
+@pytest.mark.asyncio
+async def test_live_ground_googsearch_with_txt() -> None:
+ assert await live_ground_googsearch_with_txt.generate_content()
+
+
+@pytest.mark.asyncio
+async def test_live_transcribe_with_audio() -> None:
+ assert await live_transcribe_with_audio.generate_content()
+
+
+@pytest.mark.asyncio
+async def test_live_txtgen_with_audio() -> None:
+ assert await live_txtgen_with_audio.generate_content()
+
+
+@pytest.mark.asyncio
+def test_live_structured_output_with_txt() -> None:
+ assert live_structured_output_with_txt.generate_content()
+
+
+@pytest.mark.asyncio
+async def test_live_ground_ragengine_with_txt(mock_rag_components: None) -> None:
+ assert await live_ground_ragengine_with_txt.generate_content("test")
+
+
+@pytest.mark.asyncio
+async def test_live_txt_with_audio() -> None:
+ assert await live_txt_with_audio.generate_content()
+
+
+@pytest.mark.asyncio
+async def test_live_audio_with_txt(mock_live_session: None) -> None:
+ mock_client, mock_session = mock_live_session
+
+ with patch("google.genai.Client", return_value=mock_client):
+ with patch("simpleaudio.WaveObject.from_wave_file") as mock_wave:
+ with patch("soundfile.write"):
+ mock_wave_obj = mock_wave.return_value
+ mock_wave_obj.play.return_value = MagicMock()
+ result = await live_audio_with_txt.generate_content()
+
+ assert result is not None
+
+
+@pytest.mark.asyncio
+async def test_live_conversation_audio_with_audio(live_conversation: types.ModuleType) -> None:
+ result = await live_conversation.main()
+ assert result is True or result is None
diff --git a/genai/model_optimizer/requirements.txt b/genai/model_optimizer/requirements.txt
index 7890f90e26a..1efe7b29dbc 100644
--- a/genai/model_optimizer/requirements.txt
+++ b/genai/model_optimizer/requirements.txt
@@ -1 +1 @@
-google-genai==1.16.1
+google-genai==1.42.0
diff --git a/genai/provisioned_throughput/provisionedthroughput_with_txt.py b/genai/provisioned_throughput/provisionedthroughput_with_txt.py
index 54a4602e606..a85362ee6d8 100644
--- a/genai/provisioned_throughput/provisionedthroughput_with_txt.py
+++ b/genai/provisioned_throughput/provisionedthroughput_with_txt.py
@@ -31,7 +31,7 @@ def generate_content() -> str:
)
)
response = client.models.generate_content(
- model="gemini-2.5-flash-preview-05-20",
+ model="gemini-2.5-flash",
contents="How does AI work?",
)
print(response.text)
diff --git a/genai/provisioned_throughput/requirements.txt b/genai/provisioned_throughput/requirements.txt
index 7890f90e26a..1efe7b29dbc 100644
--- a/genai/provisioned_throughput/requirements.txt
+++ b/genai/provisioned_throughput/requirements.txt
@@ -1 +1 @@
-google-genai==1.16.1
+google-genai==1.42.0
diff --git a/genai/safety/requirements.txt b/genai/safety/requirements.txt
index 7890f90e26a..1efe7b29dbc 100644
--- a/genai/safety/requirements.txt
+++ b/genai/safety/requirements.txt
@@ -1 +1 @@
-google-genai==1.16.1
+google-genai==1.42.0
diff --git a/genai/safety/safety_with_txt.py b/genai/safety/safety_with_txt.py
index dd1cc469acc..308a45cb154 100644
--- a/genai/safety/safety_with_txt.py
+++ b/genai/safety/safety_with_txt.py
@@ -54,7 +54,7 @@ def generate_content() -> GenerateContentResponse:
]
response = client.models.generate_content(
- model="gemini-2.5-flash-preview-05-20",
+ model="gemini-2.5-flash",
contents=prompt,
config=GenerateContentConfig(
system_instruction=system_instruction,
diff --git a/genai/template_folder/requirements.txt b/genai/template_folder/requirements.txt
index 7890f90e26a..1efe7b29dbc 100644
--- a/genai/template_folder/requirements.txt
+++ b/genai/template_folder/requirements.txt
@@ -1 +1 @@
-google-genai==1.16.1
+google-genai==1.42.0
diff --git a/genai/text_generation/model_optimizer_textgen_with_txt.py b/genai/text_generation/model_optimizer_textgen_with_txt.py
index b353ce2e836..adc4551cdca 100644
--- a/genai/text_generation/model_optimizer_textgen_with_txt.py
+++ b/genai/text_generation/model_optimizer_textgen_with_txt.py
@@ -1,49 +1,49 @@
-# Copyright 2025 Google LLC
+# # Copyright 2025 Google LLC
+# #
+# # Licensed under the Apache License, Version 2.0 (the "License");
+# # you may not use this file except in compliance with the License.
+# # You may obtain a copy of the License at
+# #
+# # https://www.apache.org/licenses/LICENSE-2.0
+# #
+# # Unless required by applicable law or agreed to in writing, software
+# # distributed under the License is distributed on an "AS IS" BASIS,
+# # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# # See the License for the specific language governing permissions and
+# # limitations under the License.
#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
#
-# https://www.apache.org/licenses/LICENSE-2.0
+# # TODO: Migrate model_optimizer samples to /model_optimizer
+# # and deprecate following sample
+# def generate_content() -> str:
+# # [START googlegenaisdk_model_optimizer_textgen_with_txt]
+# from google import genai
+# from google.genai.types import (
+# FeatureSelectionPreference,
+# GenerateContentConfig,
+# HttpOptions,
+# ModelSelectionConfig
+# )
#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-# TODO: Migrate model_optimizer samples to /model_optimizer
-# and deprecate following sample
-def generate_content() -> str:
- # [START googlegenaisdk_model_optimizer_textgen_with_txt]
- from google import genai
- from google.genai.types import (
- FeatureSelectionPreference,
- GenerateContentConfig,
- HttpOptions,
- ModelSelectionConfig
- )
-
- client = genai.Client(http_options=HttpOptions(api_version="v1beta1"))
- response = client.models.generate_content(
- model="model-optimizer-exp-04-09",
- contents="How does AI work?",
- config=GenerateContentConfig(
- model_selection_config=ModelSelectionConfig(
- feature_selection_preference=FeatureSelectionPreference.BALANCED # Options: PRIORITIZE_QUALITY, BALANCED, PRIORITIZE_COST
- ),
- ),
- )
- print(response.text)
- # Example response:
- # Okay, let's break down how AI works. It's a broad field, so I'll focus on the ...
- #
- # Here's a simplified overview:
- # ...
- # [END googlegenaisdk_model_optimizer_textgen_with_txt]
- return response.text
-
-
-if __name__ == "__main__":
- generate_content()
+# client = genai.Client(http_options=HttpOptions(api_version="v1beta1"))
+# response = client.models.generate_content(
+# model="model-optimizer-exp-04-09",
+# contents="How does AI work?",
+# config=GenerateContentConfig(
+# model_selection_config=ModelSelectionConfig(
+# feature_selection_preference=FeatureSelectionPreference.BALANCED # Options: PRIORITIZE_QUALITY, BALANCED, PRIORITIZE_COST
+# ),
+# ),
+# )
+# print(response.text)
+# # Example response:
+# # Okay, let's break down how AI works. It's a broad field, so I'll focus on the ...
+# #
+# # Here's a simplified overview:
+# # ...
+# # [END googlegenaisdk_model_optimizer_textgen_with_txt]
+# return response.text
+#
+#
+# if __name__ == "__main__":
+# generate_content()
diff --git a/genai/text_generation/requirements.txt b/genai/text_generation/requirements.txt
index 7890f90e26a..1efe7b29dbc 100644
--- a/genai/text_generation/requirements.txt
+++ b/genai/text_generation/requirements.txt
@@ -1 +1 @@
-google-genai==1.16.1
+google-genai==1.42.0
diff --git a/genai/text_generation/test_text_generation_examples.py b/genai/text_generation/test_text_generation_examples.py
index eefc15111c5..3477caef9df 100644
--- a/genai/text_generation/test_text_generation_examples.py
+++ b/genai/text_generation/test_text_generation_examples.py
@@ -18,10 +18,11 @@
import os
-import model_optimizer_textgen_with_txt
+# import model_optimizer_textgen_with_txt
import textgen_async_with_txt
import textgen_chat_stream_with_txt
import textgen_chat_with_txt
+import textgen_code_with_pdf
import textgen_config_with_txt
import textgen_sys_instr_with_txt
import textgen_transcript_with_gcs_audio
@@ -137,8 +138,13 @@ def test_textgen_with_youtube_video() -> None:
assert response
-def test_model_optimizer_textgen_with_txt() -> None:
- os.environ["GOOGLE_CLOUD_LOCATION"] = "us-central1"
- response = model_optimizer_textgen_with_txt.generate_content()
- os.environ["GOOGLE_CLOUD_LOCATION"] = "global" # "us-central1"
+def test_textgen_code_with_pdf() -> None:
+ response = textgen_code_with_pdf.generate_content()
assert response
+
+# Migrated to Model Optimser Folder
+# def test_model_optimizer_textgen_with_txt() -> None:
+# os.environ["GOOGLE_CLOUD_LOCATION"] = "us-central1"
+# response = model_optimizer_textgen_with_txt.generate_content()
+# os.environ["GOOGLE_CLOUD_LOCATION"] = "global" # "us-central1"
+# assert response
diff --git a/genai/text_generation/textgen_async_with_txt.py b/genai/text_generation/textgen_async_with_txt.py
index 91c193098c3..ccbb5cdc443 100644
--- a/genai/text_generation/textgen_async_with_txt.py
+++ b/genai/text_generation/textgen_async_with_txt.py
@@ -21,7 +21,7 @@ async def generate_content() -> str:
from google.genai.types import GenerateContentConfig, HttpOptions
client = genai.Client(http_options=HttpOptions(api_version="v1"))
- model_id = "gemini-2.5-flash-preview-05-20"
+ model_id = "gemini-2.5-flash"
response = await client.aio.models.generate_content(
model=model_id,
diff --git a/genai/text_generation/textgen_chat_stream_with_txt.py b/genai/text_generation/textgen_chat_stream_with_txt.py
index 3f49f31df38..d5a5cf9b6c6 100644
--- a/genai/text_generation/textgen_chat_stream_with_txt.py
+++ b/genai/text_generation/textgen_chat_stream_with_txt.py
@@ -19,7 +19,7 @@ def generate_content() -> bool:
from google.genai.types import HttpOptions
client = genai.Client(http_options=HttpOptions(api_version="v1"))
- chat_session = client.chats.create(model="gemini-2.5-flash-preview-05-20")
+ chat_session = client.chats.create(model="gemini-2.5-flash")
for chunk in chat_session.send_message_stream("Why is the sky blue?"):
print(chunk.text, end="")
diff --git a/genai/text_generation/textgen_chat_with_txt.py b/genai/text_generation/textgen_chat_with_txt.py
index 74a63972220..0b1bc928e0c 100644
--- a/genai/text_generation/textgen_chat_with_txt.py
+++ b/genai/text_generation/textgen_chat_with_txt.py
@@ -20,7 +20,7 @@ def generate_content() -> str:
client = genai.Client(http_options=HttpOptions(api_version="v1"))
chat_session = client.chats.create(
- model="gemini-2.5-flash-preview-05-20",
+ model="gemini-2.5-flash",
history=[
UserContent(parts=[Part(text="Hello")]),
ModelContent(
diff --git a/genai/text_generation/textgen_code_with_pdf.py b/genai/text_generation/textgen_code_with_pdf.py
new file mode 100644
index 00000000000..da4ca76b73a
--- /dev/null
+++ b/genai/text_generation/textgen_code_with_pdf.py
@@ -0,0 +1,55 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# !This sample works with Google Cloud Vertex AI API only.
+
+
+def generate_content() -> str:
+ # [START googlegenaisdk_textgen_code_with_pdf]
+ from google import genai
+ from google.genai.types import HttpOptions, Part
+
+ client = genai.Client(http_options=HttpOptions(api_version="v1beta1"))
+ model_id = "gemini-2.5-flash"
+ prompt = "Convert this python code to use Google Python Style Guide."
+ print("> ", prompt, "\n")
+ pdf_uri = "https://storage.googleapis.com/cloud-samples-data/generative-ai/text/inefficient_fibonacci_series_python_code.pdf"
+
+ pdf_file = Part.from_uri(
+ file_uri=pdf_uri,
+ mime_type="application/pdf",
+ )
+
+ response = client.models.generate_content(
+ model=model_id,
+ contents=[pdf_file, prompt],
+ )
+
+ print(response.text)
+ # Example response:
+ # > Convert this python code to use Google Python Style Guide.
+ #
+ # def generate_fibonacci_sequence(num_terms: int) -> list[int]:
+ # """Generates the Fibonacci sequence up to a specified number of terms.
+ #
+ # This function calculates the Fibonacci sequence starting with 0 and 1.
+ # It handles base cases for 0, 1, and 2 terms efficiently.
+ #
+ # # ...
+ # [END googlegenaisdk_textgen_code_with_pdf]
+ return response.text
+
+
+if __name__ == "__main__":
+ generate_content()
diff --git a/genai/text_generation/textgen_config_with_txt.py b/genai/text_generation/textgen_config_with_txt.py
index d71f8a0dc55..0a54b2cb5ab 100644
--- a/genai/text_generation/textgen_config_with_txt.py
+++ b/genai/text_generation/textgen_config_with_txt.py
@@ -20,7 +20,7 @@ def generate_content() -> str:
client = genai.Client(http_options=HttpOptions(api_version="v1"))
response = client.models.generate_content(
- model="gemini-2.5-flash-preview-05-20",
+ model="gemini-2.5-flash",
contents="Why is the sky blue?",
# See the SDK documentation at
# https://googleapis.github.io/python-genai/genai.html#genai.types.GenerateContentConfig
@@ -31,7 +31,7 @@ def generate_content() -> str:
top_p=0.95,
top_k=20,
seed=5,
- max_output_tokens=100,
+ max_output_tokens=500,
stop_sequences=["STOP!"],
presence_penalty=0.0,
frequency_penalty=0.0,
diff --git a/genai/text_generation/textgen_sys_instr_with_txt.py b/genai/text_generation/textgen_sys_instr_with_txt.py
index ac832efa270..1bdd3d74128 100644
--- a/genai/text_generation/textgen_sys_instr_with_txt.py
+++ b/genai/text_generation/textgen_sys_instr_with_txt.py
@@ -20,7 +20,7 @@ def generate_content() -> str:
client = genai.Client(http_options=HttpOptions(api_version="v1"))
response = client.models.generate_content(
- model="gemini-2.5-flash-preview-05-20",
+ model="gemini-2.5-flash",
contents="Why is the sky blue?",
config=GenerateContentConfig(
system_instruction=[
diff --git a/genai/text_generation/textgen_transcript_with_gcs_audio.py b/genai/text_generation/textgen_transcript_with_gcs_audio.py
index 32bf8a8215a..1cac5ee4bef 100644
--- a/genai/text_generation/textgen_transcript_with_gcs_audio.py
+++ b/genai/text_generation/textgen_transcript_with_gcs_audio.py
@@ -24,7 +24,7 @@ def generate_content() -> str:
Use speaker A, speaker B, etc. to identify speakers.
"""
response = client.models.generate_content(
- model="gemini-2.5-flash-preview-05-20",
+ model="gemini-2.5-flash",
contents=[
prompt,
Part.from_uri(
diff --git a/genai/text_generation/textgen_with_gcs_audio.py b/genai/text_generation/textgen_with_gcs_audio.py
index ded99a9b40d..f65818dc652 100644
--- a/genai/text_generation/textgen_with_gcs_audio.py
+++ b/genai/text_generation/textgen_with_gcs_audio.py
@@ -23,7 +23,7 @@ def generate_content() -> str:
Provide a concise summary of the main points in the audio file.
"""
response = client.models.generate_content(
- model="gemini-2.5-flash-preview-05-20",
+ model="gemini-2.5-flash",
contents=[
prompt,
Part.from_uri(
diff --git a/genai/text_generation/textgen_with_local_video.py b/genai/text_generation/textgen_with_local_video.py
index 968497499e7..be1b1a7ad9c 100644
--- a/genai/text_generation/textgen_with_local_video.py
+++ b/genai/text_generation/textgen_with_local_video.py
@@ -19,7 +19,7 @@ def generate_content() -> str:
from google.genai.types import HttpOptions, Part
client = genai.Client(http_options=HttpOptions(api_version="v1"))
- model_id = "gemini-2.5-flash-preview-05-20"
+ model_id = "gemini-2.5-flash"
# Read local video file content
with open("test_data/describe_video_content.mp4", "rb") as fp:
diff --git a/genai/text_generation/textgen_with_multi_img.py b/genai/text_generation/textgen_with_multi_img.py
index aa92741dadd..71b617baf71 100644
--- a/genai/text_generation/textgen_with_multi_img.py
+++ b/genai/text_generation/textgen_with_multi_img.py
@@ -28,7 +28,7 @@ def generate_content() -> str:
local_file_img_bytes = f.read()
response = client.models.generate_content(
- model="gemini-2.5-flash-preview-05-20",
+ model="gemini-2.5-flash",
contents=[
"Generate a list of all the objects contained in both images.",
Part.from_uri(file_uri=gcs_file_img_path, mime_type="image/jpeg"),
diff --git a/genai/text_generation/textgen_with_multi_local_img.py b/genai/text_generation/textgen_with_multi_local_img.py
index cd21fb9f486..9419c186bdd 100644
--- a/genai/text_generation/textgen_with_multi_local_img.py
+++ b/genai/text_generation/textgen_with_multi_local_img.py
@@ -28,7 +28,7 @@ def generate_content(image_path_1: str, image_path_2: str) -> str:
image_2_bytes = f.read()
response = client.models.generate_content(
- model="gemini-2.5-flash-preview-05-20",
+ model="gemini-2.5-flash",
contents=[
"Generate a list of all the objects contained in both images.",
Part.from_bytes(data=image_1_bytes, mime_type="image/jpeg"),
diff --git a/genai/text_generation/textgen_with_mute_video.py b/genai/text_generation/textgen_with_mute_video.py
index 1744daf06a8..1c644c94ead 100644
--- a/genai/text_generation/textgen_with_mute_video.py
+++ b/genai/text_generation/textgen_with_mute_video.py
@@ -20,7 +20,7 @@ def generate_content() -> str:
client = genai.Client(http_options=HttpOptions(api_version="v1"))
response = client.models.generate_content(
- model="gemini-2.5-flash-preview-05-20",
+ model="gemini-2.5-flash",
contents=[
Part.from_uri(
file_uri="gs://cloud-samples-data/generative-ai/video/ad_copy_from_video.mp4",
diff --git a/genai/text_generation/textgen_with_pdf.py b/genai/text_generation/textgen_with_pdf.py
index 5e591f6aefa..31de8b5e46c 100644
--- a/genai/text_generation/textgen_with_pdf.py
+++ b/genai/text_generation/textgen_with_pdf.py
@@ -21,7 +21,7 @@ def generate_content() -> str:
from google.genai.types import HttpOptions, Part
client = genai.Client(http_options=HttpOptions(api_version="v1"))
- model_id = "gemini-2.5-flash-preview-05-20"
+ model_id = "gemini-2.5-flash"
prompt = """
You are a highly skilled document summarization specialist.
diff --git a/genai/text_generation/textgen_with_txt.py b/genai/text_generation/textgen_with_txt.py
index bbca543ed3a..c2e4a879f02 100644
--- a/genai/text_generation/textgen_with_txt.py
+++ b/genai/text_generation/textgen_with_txt.py
@@ -20,7 +20,7 @@ def generate_content() -> str:
client = genai.Client(http_options=HttpOptions(api_version="v1"))
response = client.models.generate_content(
- model="gemini-2.5-flash-preview-05-20",
+ model="gemini-2.5-flash",
contents="How does AI work?",
)
print(response.text)
diff --git a/genai/text_generation/textgen_with_txt_img.py b/genai/text_generation/textgen_with_txt_img.py
index 51cdb55f031..99d2bc87e96 100644
--- a/genai/text_generation/textgen_with_txt_img.py
+++ b/genai/text_generation/textgen_with_txt_img.py
@@ -20,7 +20,7 @@ def generate_content() -> str:
client = genai.Client(http_options=HttpOptions(api_version="v1"))
response = client.models.generate_content(
- model="gemini-2.5-flash-preview-05-20",
+ model="gemini-2.5-flash",
contents=[
"What is shown in this image?",
Part.from_uri(
diff --git a/genai/text_generation/textgen_with_txt_stream.py b/genai/text_generation/textgen_with_txt_stream.py
index 3d353a79ed3..30ce428c4f8 100644
--- a/genai/text_generation/textgen_with_txt_stream.py
+++ b/genai/text_generation/textgen_with_txt_stream.py
@@ -21,7 +21,7 @@ def generate_content() -> bool:
client = genai.Client(http_options=HttpOptions(api_version="v1"))
for chunk in client.models.generate_content_stream(
- model="gemini-2.5-flash-preview-05-20",
+ model="gemini-2.5-flash",
contents="Why is the sky blue?",
):
print(chunk.text, end="")
diff --git a/genai/text_generation/textgen_with_video.py b/genai/text_generation/textgen_with_video.py
index bf55712f701..7cd4cc97d15 100644
--- a/genai/text_generation/textgen_with_video.py
+++ b/genai/text_generation/textgen_with_video.py
@@ -25,7 +25,7 @@ def generate_content() -> str:
Create a chapter breakdown with timestamps for key sections or topics discussed.
"""
response = client.models.generate_content(
- model="gemini-2.5-flash-preview-05-20",
+ model="gemini-2.5-flash",
contents=[
Part.from_uri(
file_uri="gs://cloud-samples-data/generative-ai/video/pixel8.mp4",
diff --git a/genai/text_generation/textgen_with_youtube_video.py b/genai/text_generation/textgen_with_youtube_video.py
index 12ca844c9af..26eaddcce62 100644
--- a/genai/text_generation/textgen_with_youtube_video.py
+++ b/genai/text_generation/textgen_with_youtube_video.py
@@ -21,7 +21,7 @@ def generate_content() -> str:
from google.genai.types import HttpOptions, Part
client = genai.Client(http_options=HttpOptions(api_version="v1"))
- model_id = "gemini-2.5-flash-preview-05-20"
+ model_id = "gemini-2.5-flash"
response = client.models.generate_content(
model=model_id,
diff --git a/genai/text_generation/thinking_textgen_with_txt.py b/genai/text_generation/thinking_textgen_with_txt.py
index 87f7b6902ef..00f72e919e3 100644
--- a/genai/text_generation/thinking_textgen_with_txt.py
+++ b/genai/text_generation/thinking_textgen_with_txt.py
@@ -20,7 +20,7 @@ def generate_content() -> str:
client = genai.Client()
response = client.models.generate_content(
- model="gemini-2.5-pro-preview-05-06",
+ model="gemini-2.5-pro",
contents="solve x^2 + 4x + 4 = 0",
)
print(response.text)
diff --git a/genai/thinking/requirements.txt b/genai/thinking/requirements.txt
index 7890f90e26a..1efe7b29dbc 100644
--- a/genai/thinking/requirements.txt
+++ b/genai/thinking/requirements.txt
@@ -1 +1 @@
-google-genai==1.16.1
+google-genai==1.42.0
diff --git a/genai/thinking/thinking_budget_with_txt.py b/genai/thinking/thinking_budget_with_txt.py
index ef261456c6d..5e8bc3cba27 100644
--- a/genai/thinking/thinking_budget_with_txt.py
+++ b/genai/thinking/thinking_budget_with_txt.py
@@ -21,7 +21,7 @@ def generate_content() -> str:
client = genai.Client()
response = client.models.generate_content(
- model="gemini-2.5-flash-preview-05-20",
+ model="gemini-2.5-flash",
contents="solve x^2 + 4x + 4 = 0",
config=GenerateContentConfig(
thinking_config=ThinkingConfig(
diff --git a/genai/thinking/thinking_includethoughts_with_txt.py b/genai/thinking/thinking_includethoughts_with_txt.py
index bf183f26f55..0eafd71b24a 100644
--- a/genai/thinking/thinking_includethoughts_with_txt.py
+++ b/genai/thinking/thinking_includethoughts_with_txt.py
@@ -20,7 +20,7 @@ def generate_content() -> str:
client = genai.Client()
response = client.models.generate_content(
- model="gemini-2.5-pro-preview-05-06",
+ model="gemini-2.5-pro",
contents="solve x^2 + 4x + 4 = 0",
config=GenerateContentConfig(
thinking_config=ThinkingConfig(include_thoughts=True)
diff --git a/genai/thinking/thinking_with_txt.py b/genai/thinking/thinking_with_txt.py
index f6002ef503b..0eccf44b93a 100644
--- a/genai/thinking/thinking_with_txt.py
+++ b/genai/thinking/thinking_with_txt.py
@@ -19,7 +19,7 @@ def generate_content() -> str:
client = genai.Client()
response = client.models.generate_content(
- model="gemini-2.5-pro-preview-05-06",
+ model="gemini-2.5-pro",
contents="solve x^2 + 4x + 4 = 0",
)
print(response.text)
diff --git a/genai/tools/requirements.txt b/genai/tools/requirements.txt
index fd1b338d41a..9f6fafbe8ec 100644
--- a/genai/tools/requirements.txt
+++ b/genai/tools/requirements.txt
@@ -1,3 +1,3 @@
-google-genai==1.18.0
+google-genai==1.45.0
# PIl is required for tools_code_execution_with_txt_img.py
pillow==11.1.0
diff --git a/genai/tools/test_tools_examples.py b/genai/tools/test_tools_examples.py
index 26e5eb8ff5d..60ed069e1a4 100644
--- a/genai/tools/test_tools_examples.py
+++ b/genai/tools/test_tools_examples.py
@@ -24,8 +24,11 @@
import tools_enterprise_web_search_with_txt
import tools_func_def_with_txt
import tools_func_desc_with_txt
+import tools_google_maps_coordinates_with_txt
import tools_google_maps_with_txt
+import tools_google_search_and_urlcontext_with_txt
import tools_google_search_with_txt
+import tools_urlcontext_with_txt
import tools_vais_with_txt
os.environ["GOOGLE_GENAI_USE_VERTEXAI"] = "True"
@@ -35,45 +38,49 @@
def test_tools_code_exec_with_txt() -> None:
- response = tools_code_exec_with_txt.generate_content()
- assert response
+ assert tools_code_exec_with_txt.generate_content()
def test_tools_code_exec_with_txt_local_img() -> None:
- response = tools_code_exec_with_txt_local_img.generate_content()
- assert response
+ assert tools_code_exec_with_txt_local_img.generate_content()
def test_tools_enterprise_web_search_with_txt() -> None:
- response = tools_enterprise_web_search_with_txt.generate_content()
- assert response
+ assert tools_enterprise_web_search_with_txt.generate_content()
def test_tools_func_def_with_txt() -> None:
- response = tools_func_def_with_txt.generate_content()
- assert response
+ assert tools_func_def_with_txt.generate_content()
def test_tools_func_desc_with_txt() -> None:
- response = tools_func_desc_with_txt.generate_content()
- assert response
+ assert tools_func_desc_with_txt.generate_content()
@pytest.mark.skip(
reason="Google Maps Grounding allowlisting is not set up for the test project."
)
def test_tools_google_maps_with_txt() -> None:
- response = tools_google_maps_with_txt.generate_content()
- assert response
+ assert tools_google_maps_with_txt.generate_content()
def test_tools_google_search_with_txt() -> None:
- response = tools_google_search_with_txt.generate_content()
- assert response
+ assert tools_google_search_with_txt.generate_content()
def test_tools_vais_with_txt() -> None:
PROJECT_ID = os.environ.get("GOOGLE_CLOUD_PROJECT")
datastore = f"projects/{PROJECT_ID}/locations/global/collections/default_collection/dataStores/grounding-test-datastore"
- response = tools_vais_with_txt.generate_content(datastore)
- assert response
+ assert tools_vais_with_txt.generate_content(datastore)
+
+
+def test_tools_google_maps_coordinates_with_txt() -> None:
+ assert tools_google_maps_coordinates_with_txt.generate_content()
+
+
+def test_tools_urlcontext_with_txt() -> None:
+ assert tools_urlcontext_with_txt.generate_content()
+
+
+def test_tools_google_search_and_urlcontext_with_txt() -> None:
+ assert tools_google_search_and_urlcontext_with_txt.generate_content()
diff --git a/genai/tools/tools_code_exec_with_txt.py b/genai/tools/tools_code_exec_with_txt.py
index 53e6afd427e..a97cd913446 100644
--- a/genai/tools/tools_code_exec_with_txt.py
+++ b/genai/tools/tools_code_exec_with_txt.py
@@ -24,7 +24,7 @@ def generate_content() -> str:
)
client = genai.Client(http_options=HttpOptions(api_version="v1"))
- model_id = "gemini-2.5-flash-preview-05-20"
+ model_id = "gemini-2.5-flash"
code_execution_tool = Tool(code_execution=ToolCodeExecution())
response = client.models.generate_content(
diff --git a/genai/tools/tools_code_exec_with_txt_local_img.py b/genai/tools/tools_code_exec_with_txt_local_img.py
index bfb52f27db4..b58102afb39 100644
--- a/genai/tools/tools_code_exec_with_txt_local_img.py
+++ b/genai/tools/tools_code_exec_with_txt_local_img.py
@@ -46,7 +46,7 @@ def generate_content() -> GenerateContentResponse:
image_data = Image.open(image_file)
response = client.models.generate_content(
- model="gemini-2.5-flash-preview-05-20",
+ model="gemini-2.5-flash",
contents=[image_data, prompt],
config=GenerateContentConfig(
tools=[code_execution_tool],
diff --git a/genai/tools/tools_enterprise_web_search_with_txt.py b/genai/tools/tools_enterprise_web_search_with_txt.py
index 98b9288f6d0..429f58600a9 100644
--- a/genai/tools/tools_enterprise_web_search_with_txt.py
+++ b/genai/tools/tools_enterprise_web_search_with_txt.py
@@ -26,7 +26,7 @@ def generate_content() -> str:
client = genai.Client(http_options=HttpOptions(api_version="v1"))
response = client.models.generate_content(
- model="gemini-2.5-flash-preview-05-20",
+ model="gemini-2.5-flash",
contents="When is the next total solar eclipse in the United States?",
config=GenerateContentConfig(
tools=[
diff --git a/genai/tools/tools_func_def_with_txt.py b/genai/tools/tools_func_def_with_txt.py
index c39531c179f..89327dcd0cc 100644
--- a/genai/tools/tools_func_def_with_txt.py
+++ b/genai/tools/tools_func_def_with_txt.py
@@ -34,7 +34,7 @@ def get_current_weather(location: str) -> str:
return weather_map.get(location, "unknown")
client = genai.Client(http_options=HttpOptions(api_version="v1"))
- model_id = "gemini-2.0-flash-001"
+ model_id = "gemini-2.5-flash"
response = client.models.generate_content(
model=model_id,
diff --git a/genai/tools/tools_func_desc_with_txt.py b/genai/tools/tools_func_desc_with_txt.py
index a517fabc19e..6d89ede0fae 100644
--- a/genai/tools/tools_func_desc_with_txt.py
+++ b/genai/tools/tools_func_desc_with_txt.py
@@ -24,7 +24,7 @@ def generate_content() -> str:
)
client = genai.Client(http_options=HttpOptions(api_version="v1"))
- model_id = "gemini-2.0-flash-001"
+ model_id = "gemini-2.5-flash"
get_album_sales = FunctionDeclaration(
name="get_album_sales",
@@ -88,7 +88,7 @@ def generate_content() -> str:
# },
# )]
# [END googlegenaisdk_tools_func_desc_with_txt]
- return str(response.function_calls[0])
+ return str(response.function_calls)
if __name__ == "__main__":
diff --git a/genai/tools/tools_google_maps_coordinates_with_txt.py b/genai/tools/tools_google_maps_coordinates_with_txt.py
new file mode 100644
index 00000000000..dbeafa66578
--- /dev/null
+++ b/genai/tools/tools_google_maps_coordinates_with_txt.py
@@ -0,0 +1,59 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def generate_content() -> str:
+ # [START googlegenaisdk_tools_google_maps_coordinates_with_txt]
+ from google import genai
+ from google.genai.types import (
+ GenerateContentConfig,
+ GoogleMaps,
+ HttpOptions,
+ Tool,
+ ToolConfig,
+ RetrievalConfig,
+ LatLng
+ )
+
+ client = genai.Client(http_options=HttpOptions(api_version="v1"))
+
+ response = client.models.generate_content(
+ model="gemini-2.5-flash",
+ contents="Where can I get the best espresso near me?",
+ config=GenerateContentConfig(
+ tools=[
+ # Use Google Maps Tool
+ Tool(google_maps=GoogleMaps())
+ ],
+ tool_config=ToolConfig(
+ retrieval_config=RetrievalConfig(
+ lat_lng=LatLng( # Pass coordinates for location-aware grounding
+ latitude=40.7128,
+ longitude=-74.006
+ ),
+ language_code="en_US", # Optional: localize Maps results
+ ),
+ ),
+ ),
+ )
+
+ print(response.text)
+ # Example response:
+ # 'Here are some of the top-rated places to get espresso near you: ...'
+ # [END googlegenaisdk_tools_google_maps_coordinates_with_txt]
+ return response.text
+
+
+if __name__ == "__main__":
+ generate_content()
diff --git a/genai/tools/tools_google_maps_with_txt.py b/genai/tools/tools_google_maps_with_txt.py
index 901fa397fd7..e2ff93e63b7 100644
--- a/genai/tools/tools_google_maps_with_txt.py
+++ b/genai/tools/tools_google_maps_with_txt.py
@@ -31,7 +31,7 @@ def generate_content() -> str:
client = genai.Client(http_options=HttpOptions(api_version="v1"))
response = client.models.generate_content(
- model="gemini-2.5-flash-preview-05-20",
+ model="gemini-2.5-flash",
contents="Recommend a good restaurant in San Francisco.",
config=GenerateContentConfig(
tools=[
diff --git a/genai/tools/tools_google_search_and_urlcontext_with_txt.py b/genai/tools/tools_google_search_and_urlcontext_with_txt.py
new file mode 100644
index 00000000000..f55353985c4
--- /dev/null
+++ b/genai/tools/tools_google_search_and_urlcontext_with_txt.py
@@ -0,0 +1,95 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def generate_content() -> str:
+ # [START googlegenaisdk_tools_google_search_and_urlcontext_with_txt]
+ from google import genai
+ from google.genai.types import Tool, GenerateContentConfig, HttpOptions, UrlContext, GoogleSearch
+
+ client = genai.Client(http_options=HttpOptions(api_version="v1beta1"))
+ model_id = "gemini-2.5-flash"
+
+ tools = [
+ Tool(url_context=UrlContext),
+ Tool(google_search=GoogleSearch),
+ ]
+
+ # TODO(developer): Here put your URLs!
+ url = 'https://www.google.com/search?q=events+in+New+York'
+
+ response = client.models.generate_content(
+ model=model_id,
+ contents=f"Give me three day events schedule based on {url}. Also let me know what needs to taken care of considering weather and commute.",
+ config=GenerateContentConfig(
+ tools=tools,
+ response_modalities=["TEXT"],
+ )
+ )
+
+ for each in response.candidates[0].content.parts:
+ print(each.text)
+ # Here is a possible three-day event schedule for New York City, focusing on the dates around October 7-9, 2025, along with weather and commute considerations.
+ #
+ # ### Three-Day Event Schedule: New York City (October 7-9, 2025)
+ #
+ # **Day 1: Tuesday, October 7, 2025 - Art and Culture**
+ #
+ # * **Morning (10:00 AM - 1:00 PM):** Visit "Phillips Visual Language: The Art of Irving Penn" at 432 Park Avenue. This exhibition is scheduled to end on this day, offering a last chance to see it.
+ # * **Lunch (1:00 PM - 2:00 PM):** Grab a quick lunch near Park Avenue.
+ # * **Afternoon (2:30 PM - 5:30 PM):** Explore the "Lincoln Center Festival of Firsts" at Lincoln Center. This festival runs until October 23rd, offering various performances or exhibits. Check their specific schedule for the day.
+ # * **Evening (7:00 PM onwards):** Experience a classic Broadway show. Popular options mentioned for October 2025 include "Six The Musical," "Wicked," "Hadestown," or "MJ - The Musical."
+ #
+ # **Day 2: Wednesday, October 8, 2025 - Unique Experiences and SoHo Vibes**
+ #
+ # * **Morning (11:00 AM - 1:00 PM):** Head to Brooklyn for the "Secret Room at IKEA Brooklyn" at 1 Beard Street. This unique event is scheduled to end on October 9th.
+ # * **Lunch (1:00 PM - 2:00 PM):** Enjoy lunch in Brooklyn, perhaps exploring local eateries in the area.
+ # * **Afternoon (2:30 PM - 5:30 PM):** Immerse yourself in the "The Weeknd & Nespresso Samra Origins Vinyl Cafe" at 579 Broadway in SoHo. This pop-up, curated by The Weeknd, combines coffee and music and runs until October 14th.
+ # * **Evening (6:00 PM onwards):** Explore the vibrant SoHo neighborhood, known for its shopping and dining. You could also consider a dinner cruise to see the illuminated Manhattan skyline and the Statue of Liberty.
+ #
+ # **Day 3: Thursday, October 9, 2025 - Film and Scenic Views**
+ #
+ # * **Morning (10:00 AM - 1:00 PM):** Attend a screening at the New York Greek Film Expo, which runs until October 12th in New York City.
+ # * **Lunch (1:00 PM - 2:00 PM):** Have lunch near the film expo's location.
+ # * **Afternoon (2:30 PM - 5:30 PM):** Take advantage of the pleasant October weather and enjoy outdoor activities. Consider biking along the rivers or through Central Park to admire the early autumn foliage.
+ # * **Evening (6:00 PM onwards):** Visit an observation deck like the Empire State Building or Top of the Rock for panoramic city views. Afterwards, enjoy dinner in a neighborhood of your choice.
+ #
+ # ### Weather and Commute Considerations:
+ #
+ # **Weather in Early October:**
+ #
+ # * **Temperatures:** Expect mild to cool temperatures. Average daily temperatures in early October range from 10°C (50°F) to 18°C (64°F), with occasional warmer days reaching the mid-20s°C (mid-70s°F). Evenings can be quite chilly.
+ # * **Rainfall:** October has a higher chance of rainfall compared to other months, with an average of 33mm and a 32% chance of rain on any given day.
+ # * **Sunshine:** You can generally expect about 7 hours of sunshine per day.
+ # * **What to Pack:** Pack layers! Bring a light jacket or sweater for the daytime, and a warmer coat for the evenings. An umbrella or a light raincoat is highly recommended due to the chance of showers. Comfortable walking shoes are a must for exploring the city.
+ #
+ # **Commute in New York City:**
+ #
+ # * **Public Transportation is Key:** The subway is generally the fastest and most efficient way to get around New York City, especially during the day. Buses are good for East-West travel, but can be slower due to traffic.
+ # * **Using Apps:** Utilize Google Maps or official MTA apps to plan your routes and check for real-time service updates. The subway runs 24/7, but expect potential delays or changes to routes during nights and weekends due to maintenance.
+ # * **Rush Hour:** Avoid subway and commuter train travel during peak rush hours (8 AM - 10 AM and 5 PM - 7 PM) if possible, as trains can be extremely crowded.
+ # * **Subway Etiquette:** When on the subway, stand to the side of the doors to let people exit before boarding, and move to the center of the car to make space. Hold onto a pole or seat, and remove your backpack to free up space.
+ # * **Transfers:** Subway fare is $2.90 per ride, and you get one free transfer between the subway and bus within a two-hour window.
+ # * **Walking:** New York City is very walkable. If the weather is pleasant, walking between nearby attractions is an excellent way to see the city.
+ # * **Taxis/Ride-sharing:** Uber, Lyft, and Curb (for NYC taxis) are available, but driving in the city is generally discouraged due to traffic and parking difficulties.
+ # * **Allow Extra Time:** Always factor in an additional 20-30 minutes for travel time, as delays can occur.
+
+ # get URLs retrieved for context
+ print(response.candidates[0].url_context_metadata)
+ # [END googlegenaisdk_tools_google_search_and_urlcontext_with_txt]
+ return response.text
+
+
+if __name__ == "__main__":
+ generate_content()
diff --git a/genai/tools/tools_google_search_with_txt.py b/genai/tools/tools_google_search_with_txt.py
index 4d21a10da2c..4069071d0c3 100644
--- a/genai/tools/tools_google_search_with_txt.py
+++ b/genai/tools/tools_google_search_with_txt.py
@@ -26,12 +26,17 @@ def generate_content() -> str:
client = genai.Client(http_options=HttpOptions(api_version="v1"))
response = client.models.generate_content(
- model="gemini-2.5-flash-preview-05-20",
+ model="gemini-2.5-flash",
contents="When is the next total solar eclipse in the United States?",
config=GenerateContentConfig(
tools=[
# Use Google Search Tool
- Tool(google_search=GoogleSearch())
+ Tool(
+ google_search=GoogleSearch(
+ # Optional: Domains to exclude from results
+ exclude_domains=["domain.com", "domain2.com"]
+ )
+ )
],
),
)
diff --git a/genai/tools/tools_urlcontext_with_txt.py b/genai/tools/tools_urlcontext_with_txt.py
new file mode 100644
index 00000000000..0d7551afe23
--- /dev/null
+++ b/genai/tools/tools_urlcontext_with_txt.py
@@ -0,0 +1,85 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def generate_content() -> str:
+ # [START googlegenaisdk_tools_urlcontext_with_txt]
+ from google import genai
+ from google.genai.types import Tool, GenerateContentConfig, HttpOptions, UrlContext
+
+ client = genai.Client(http_options=HttpOptions(api_version="v1"))
+ model_id = "gemini-2.5-flash"
+
+ url_context_tool = Tool(
+ url_context=UrlContext
+ )
+
+ # TODO(developer): Here put your URLs
+ url1 = "https://cloud.google.com/vertex-ai/docs/generative-ai/start"
+ url2 = "https://cloud.google.com/docs/overview"
+
+ response = client.models.generate_content(
+ model=model_id,
+ contents=f"Compare the content, purpose, and audiences of {url1} and {url2}.",
+ config=GenerateContentConfig(
+ tools=[url_context_tool],
+ response_modalities=["TEXT"],
+ )
+ )
+
+ for each in response.candidates[0].content.parts:
+ print(each.text)
+ # Gemini 2.5 Pro and Gemini 2.5 Flash are both advanced models offered by Google AI, but they are optimized for different use cases.
+ #
+ # Here's a comparison:
+ #
+ # **Gemini 2.5 Pro**
+ # * **Description**: This is Google's most advanced model, described as a "state-of-the-art thinking model". It excels at reasoning over complex problems in areas like code, mathematics, and STEM, and can analyze large datasets, codebases, and documents using a long context window.
+ # * **Input Data Types**: It supports audio, images, video, text, and PDF inputs.
+ # * **Output Data Types**: It produces text outputs.
+ # * **Token Limits**: It has an input token limit of 1,048,576 and an output token limit of 65,536.
+ # * **Supported Capabilities**: Gemini 2.5 Pro supports Batch API, Caching, Code execution, Function calling, Search grounding, Structured outputs, Thinking, and URL context.
+ # * **Knowledge Cutoff**: January 2025.
+ #
+ # **Gemini 2.5 Flash**
+ # * **Description**: Positioned as "fast and intelligent," Gemini 2.5 Flash is highlighted as Google's best model in terms of price-performance, offering well-rounded capabilities. It is ideal for large-scale processing, low-latency, high-volume tasks that require thinking, and agentic use cases.
+ # * **Input Data Types**: It supports text, images, video, and audio inputs.
+ # * **Output Data Types**: It produces text outputs.
+ # * **Token Limits**: Similar to Pro, it has an input token limit of 1,048,576 and an output token limit of 65,536.
+ # * **Supported Capabilities**: Gemini 2.5 Flash supports Batch API, Caching, Code execution, Function calling, Search grounding, Structured outputs, Thinking, and URL context.
+ # * **Knowledge Cutoff**: January 2025.
+ #
+ # **Key Differences and Similarities:**
+ #
+ # * **Primary Focus**: Gemini 2.5 Pro is geared towards advanced reasoning and in-depth analysis of complex problems and large documents. Gemini 2.5 Flash, on the other hand, is optimized for efficiency, scale, and high-volume, low-latency applications, making it a strong choice for price-performance sensitive scenarios.
+ # * **Input Modalities**: Both models handle various input types including text, images, video, and audio. Gemini 2.5 Pro explicitly lists PDF as an input type, while Gemini 2.5 Flash lists text, images, video, audio.
+ # * **Technical Specifications (for primary stable versions)**: Both models share the same substantial input and output token limits (1,048,576 input and 65,536 output). They also support a very similar set of core capabilities, including code execution, function calling, and URL context. Neither model supports audio generation, image generation, or Live API in their standard stable versions.
+ # * **Knowledge Cutoff**: Both models have a knowledge cutoff of January 2025.
+ #
+ # In essence, while both models are powerful and capable, Gemini 2.5 Pro is designed for maximum performance in complex reasoning tasks, whereas Gemini 2.5 Flash prioritizes cost-effectiveness and speed for broader, high-throughput applications.
+ # get URLs retrieved for context
+ print(response.candidates[0].url_context_metadata)
+ # url_metadata=[UrlMetadata(
+ # retrieved_url='https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash',
+ # url_retrieval_status=
+ # ), UrlMetadata(
+ # retrieved_url='https://ai.google.dev/gemini-api/docs/models#gemini-2.5-pro',
+ # url_retrieval_status=
+ # )]
+ # [END googlegenaisdk_tools_urlcontext_with_txt]
+ return response.text
+
+
+if __name__ == "__main__":
+ generate_content()
diff --git a/genai/tools/tools_vais_with_txt.py b/genai/tools/tools_vais_with_txt.py
index dbc90b64d15..8c6e51d3b0e 100644
--- a/genai/tools/tools_vais_with_txt.py
+++ b/genai/tools/tools_vais_with_txt.py
@@ -30,7 +30,7 @@ def generate_content(datastore: str) -> str:
# datastore = "projects/111111111111/locations/global/collections/default_collection/dataStores/data-store-id"
response = client.models.generate_content(
- model="gemini-2.0-flash-001",
+ model="gemini-2.5-flash",
contents="How do I make an appointment to renew my driver's license?",
config=GenerateContentConfig(
tools=[
@@ -50,7 +50,7 @@ def generate_content(datastore: str) -> str:
# Example response:
# 'The process for making an appointment to renew your driver's license varies depending on your location. To provide you with the most accurate instructions...'
# [END googlegenaisdk_tools_vais_with_txt]
- return response.text
+ return True
if __name__ == "__main__":
diff --git a/genai/tuning/preference_tuning_job_create.py b/genai/tuning/preference_tuning_job_create.py
new file mode 100644
index 00000000000..13fa05d61d0
--- /dev/null
+++ b/genai/tuning/preference_tuning_job_create.py
@@ -0,0 +1,74 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def create_tuning_job() -> str:
+ # [START googlegenaisdk_preference_tuning_job_create]
+ import time
+
+ from google import genai
+ from google.genai.types import HttpOptions, CreateTuningJobConfig, TuningDataset
+
+ client = genai.Client(http_options=HttpOptions(api_version="v1"))
+
+ training_dataset = TuningDataset(
+ gcs_uri="gs://mybucket/preference_tuning/data/train_data.jsonl",
+ )
+ validation_dataset = TuningDataset(
+ gcs_uri="gs://mybucket/preference_tuning/data/validation_data.jsonl",
+ )
+
+ # Refer to https://docs.cloud.google.com/vertex-ai/generative-ai/docs/models/gemini-use-continuous-tuning#google-gen-ai-sdk
+ # for example to continuous tune from SFT tuned model.
+ tuning_job = client.tunings.tune(
+ base_model="gemini-2.5-flash",
+ training_dataset=training_dataset,
+ config=CreateTuningJobConfig(
+ tuned_model_display_name="Example tuning job",
+ method="PREFERENCE_TUNING",
+ validation_dataset=validation_dataset,
+ ),
+ )
+
+ running_states = set([
+ "JOB_STATE_PENDING",
+ "JOB_STATE_RUNNING",
+ ])
+
+ while tuning_job.state in running_states:
+ print(tuning_job.state)
+ tuning_job = client.tunings.get(name=tuning_job.name)
+ time.sleep(60)
+
+ print(tuning_job.tuned_model.model)
+ print(tuning_job.tuned_model.endpoint)
+ print(tuning_job.experiment)
+ # Example response:
+ # projects/123456789012/locations/us-central1/models/1234567890@1
+ # projects/123456789012/locations/us-central1/endpoints/123456789012345
+ # projects/123456789012/locations/us-central1/metadataStores/default/contexts/tuning-experiment-2025010112345678
+
+ if tuning_job.tuned_model.checkpoints:
+ for i, checkpoint in enumerate(tuning_job.tuned_model.checkpoints):
+ print(f"Checkpoint {i + 1}: ", checkpoint)
+ # Example response:
+ # Checkpoint 1: checkpoint_id='1' epoch=1 step=10 endpoint='projects/123456789012/locations/us-central1/endpoints/123456789000000'
+ # Checkpoint 2: checkpoint_id='2' epoch=2 step=20 endpoint='projects/123456789012/locations/us-central1/endpoints/123456789012345'
+
+ # [END googlegenaisdk_preference_tuning_job_create]
+ return tuning_job.name
+
+
+if __name__ == "__main__":
+ create_tuning_job()
diff --git a/genai/tuning/requirements.txt b/genai/tuning/requirements.txt
index 7890f90e26a..e5fdb322ca4 100644
--- a/genai/tuning/requirements.txt
+++ b/genai/tuning/requirements.txt
@@ -1 +1 @@
-google-genai==1.16.1
+google-genai==1.47.0
diff --git a/genai/tuning/test_tuning_examples.py b/genai/tuning/test_tuning_examples.py
index 1c829d0cafa..25b46402622 100644
--- a/genai/tuning/test_tuning_examples.py
+++ b/genai/tuning/test_tuning_examples.py
@@ -12,10 +12,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+from datetime import datetime as dt
+
from unittest.mock import call, MagicMock, patch
+from google.cloud import storage
from google.genai import types
+import pytest
+import preference_tuning_job_create
import tuning_job_create
import tuning_job_get
import tuning_job_list
@@ -25,10 +30,27 @@
import tuning_with_checkpoints_list_checkpoints
import tuning_with_checkpoints_set_default_checkpoint
import tuning_with_checkpoints_textgen_with_txt
+import tuning_with_pretuned_model
+
+
+GCS_OUTPUT_BUCKET = "python-docs-samples-tests"
+
+
+@pytest.fixture(scope="session")
+def output_gcs_uri() -> str:
+ prefix = f"text_output/{dt.now()}"
+
+ yield f"gs://{GCS_OUTPUT_BUCKET}/{prefix}"
+
+ storage_client = storage.Client()
+ bucket = storage_client.get_bucket(GCS_OUTPUT_BUCKET)
+ blobs = bucket.list_blobs(prefix=prefix)
+ for blob in blobs:
+ blob.delete()
@patch("google.genai.Client")
-def test_tuning_job_create(mock_genai_client: MagicMock) -> None:
+def test_tuning_job_create(mock_genai_client: MagicMock, output_gcs_uri: str) -> None:
# Mock the API response
mock_tuning_job = types.TuningJob(
name="test-tuning-job",
@@ -40,9 +62,9 @@ def test_tuning_job_create(mock_genai_client: MagicMock) -> None:
)
mock_genai_client.return_value.tunings.tune.return_value = mock_tuning_job
- response = tuning_job_create.create_tuning_job()
+ response = tuning_job_create.create_tuning_job(output_gcs_uri=output_gcs_uri)
- mock_genai_client.assert_called_once_with(http_options=types.HttpOptions(api_version="v1"))
+ mock_genai_client.assert_called_once_with(http_options=types.HttpOptions(api_version="v1beta1"))
mock_genai_client.return_value.tunings.tune.assert_called_once()
assert response == "test-tuning-job"
@@ -121,7 +143,7 @@ def test_tuning_textgen_with_txt(mock_genai_client: MagicMock) -> None:
@patch("google.genai.Client")
-def test_tuning_job_create_with_checkpoints(mock_genai_client: MagicMock) -> None:
+def test_tuning_job_create_with_checkpoints(mock_genai_client: MagicMock, output_gcs_uri: str) -> None:
# Mock the API response
mock_tuning_job = types.TuningJob(
name="test-tuning-job",
@@ -137,9 +159,9 @@ def test_tuning_job_create_with_checkpoints(mock_genai_client: MagicMock) -> Non
)
mock_genai_client.return_value.tunings.tune.return_value = mock_tuning_job
- response = tuning_with_checkpoints_create.create_with_checkpoints()
+ response = tuning_with_checkpoints_create.create_with_checkpoints(output_gcs_uri=output_gcs_uri)
- mock_genai_client.assert_called_once_with(http_options=types.HttpOptions(api_version="v1"))
+ mock_genai_client.assert_called_once_with(http_options=types.HttpOptions(api_version="v1beta1"))
mock_genai_client.return_value.tunings.tune.assert_called_once()
assert response == "test-tuning-job"
@@ -286,3 +308,43 @@ def test_tuning_with_checkpoints_textgen_with_txt(mock_genai_client: MagicMock)
call(model="test-endpoint-1", contents="Why is the sky blue?"),
call(model="test-endpoint-2", contents="Why is the sky blue?"),
]
+
+
+@patch("google.genai.Client")
+def test_tuning_with_pretuned_model(mock_genai_client: MagicMock) -> None:
+ # Mock the API response
+ mock_tuning_job = types.TuningJob(
+ name="test-tuning-job",
+ experiment="test-experiment",
+ tuned_model=types.TunedModel(
+ model="test-model-2",
+ endpoint="test-endpoint"
+ )
+ )
+ mock_genai_client.return_value.tunings.tune.return_value = mock_tuning_job
+
+ response = tuning_with_pretuned_model.create_continuous_tuning_job(tuned_model_name="test-model", checkpoint_id="1")
+
+ mock_genai_client.assert_called_once_with(http_options=types.HttpOptions(api_version="v1beta1"))
+ mock_genai_client.return_value.tunings.tune.assert_called_once()
+ assert response == "test-tuning-job"
+
+
+@patch("google.genai.Client")
+def test_preference_tuning_job_create(mock_genai_client: MagicMock) -> None:
+ # Mock the API response
+ mock_tuning_job = types.TuningJob(
+ name="test-tuning-job",
+ experiment="test-experiment",
+ tuned_model=types.TunedModel(
+ model="test-model",
+ endpoint="test-endpoint"
+ )
+ )
+ mock_genai_client.return_value.tunings.tune.return_value = mock_tuning_job
+
+ response = preference_tuning_job_create.create_tuning_job()
+
+ mock_genai_client.assert_called_once_with(http_options=types.HttpOptions(api_version="v1"))
+ mock_genai_client.return_value.tunings.tune.assert_called_once()
+ assert response == "test-tuning-job"
diff --git a/genai/tuning/tuning_job_create.py b/genai/tuning/tuning_job_create.py
index e411027451b..168b8a50c3b 100644
--- a/genai/tuning/tuning_job_create.py
+++ b/genai/tuning/tuning_job_create.py
@@ -13,20 +13,46 @@
# limitations under the License.
-def create_tuning_job() -> str:
+def create_tuning_job(output_gcs_uri: str) -> str:
# [START googlegenaisdk_tuning_job_create]
import time
from google import genai
- from google.genai.types import HttpOptions, CreateTuningJobConfig
+ from google.genai.types import HttpOptions, CreateTuningJobConfig, TuningDataset, EvaluationConfig, OutputConfig, GcsDestination, Metric
- client = genai.Client(http_options=HttpOptions(api_version="v1"))
+ # TODO(developer): Update and un-comment below line
+ # output_gcs_uri = "gs://your-bucket/your-prefix"
+
+ client = genai.Client(http_options=HttpOptions(api_version="v1beta1"))
+
+ training_dataset = TuningDataset(
+ gcs_uri="gs://cloud-samples-data/ai-platform/generative_ai/gemini/text/sft_train_data.jsonl",
+ )
+ validation_dataset = TuningDataset(
+ gcs_uri="gs://cloud-samples-data/ai-platform/generative_ai/gemini/text/sft_validation_data.jsonl",
+ )
+
+ evaluation_config = EvaluationConfig(
+ metrics=[
+ Metric(
+ name="FLUENCY",
+ prompt_template="""Evaluate this {prediction}"""
+ )
+ ],
+ output_config=OutputConfig(
+ gcs_destination=GcsDestination(
+ output_uri_prefix=output_gcs_uri,
+ )
+ ),
+ )
tuning_job = client.tunings.tune(
- base_model="gemini-2.0-flash-lite-001",
- training_dataset="gs://cloud-samples-data/ai-platform/generative_ai/gemini-2_0/text/sft_train_data.jsonl",
+ base_model="gemini-2.5-flash",
+ training_dataset=training_dataset,
config=CreateTuningJobConfig(
tuned_model_display_name="Example tuning job",
+ validation_dataset=validation_dataset,
+ evaluation_config=evaluation_config,
),
)
@@ -60,4 +86,4 @@ def create_tuning_job() -> str:
if __name__ == "__main__":
- create_tuning_job()
+ create_tuning_job(output_gcs_uri="gs://your-bucket/your-prefix")
diff --git a/genai/tuning/tuning_with_checkpoints_create.py b/genai/tuning/tuning_with_checkpoints_create.py
index 5427f2fa57c..d15db2bc819 100644
--- a/genai/tuning/tuning_with_checkpoints_create.py
+++ b/genai/tuning/tuning_with_checkpoints_create.py
@@ -13,22 +13,48 @@
# limitations under the License.
-def create_with_checkpoints() -> str:
+def create_with_checkpoints(output_gcs_uri: str) -> str:
# [START googlegenaisdk_tuning_with_checkpoints_create]
import time
from google import genai
- from google.genai.types import HttpOptions, CreateTuningJobConfig
+ from google.genai.types import HttpOptions, CreateTuningJobConfig, TuningDataset, EvaluationConfig, OutputConfig, GcsDestination, Metric
- client = genai.Client(http_options=HttpOptions(api_version="v1"))
+ # TODO(developer): Update and un-comment below line
+ # output_gcs_uri = "gs://your-bucket/your-prefix"
+
+ client = genai.Client(http_options=HttpOptions(api_version="v1beta1"))
+
+ training_dataset = TuningDataset(
+ gcs_uri="gs://cloud-samples-data/ai-platform/generative_ai/gemini/text/sft_train_data.jsonl",
+ )
+ validation_dataset = TuningDataset(
+ gcs_uri="gs://cloud-samples-data/ai-platform/generative_ai/gemini/text/sft_validation_data.jsonl",
+ )
+
+ evaluation_config = EvaluationConfig(
+ metrics=[
+ Metric(
+ name="FLUENCY",
+ prompt_template="""Evaluate this {prediction}"""
+ )
+ ],
+ output_config=OutputConfig(
+ gcs_destination=GcsDestination(
+ output_uri_prefix=output_gcs_uri,
+ )
+ ),
+ )
tuning_job = client.tunings.tune(
- base_model="gemini-2.0-flash-lite-001",
- training_dataset="gs://cloud-samples-data/ai-platform/generative_ai/gemini-2_0/text/sft_train_data.jsonl",
+ base_model="gemini-2.5-flash",
+ training_dataset=training_dataset,
config=CreateTuningJobConfig(
tuned_model_display_name="Example tuning job",
# Set to True to disable tuning intermediate checkpoints. Default is False.
export_last_checkpoint_only=False,
+ validation_dataset=validation_dataset,
+ evaluation_config=evaluation_config,
),
)
@@ -62,4 +88,4 @@ def create_with_checkpoints() -> str:
if __name__ == "__main__":
- create_with_checkpoints()
+ create_with_checkpoints(output_gcs_uri="gs://your-bucket/your-prefix")
diff --git a/genai/tuning/tuning_with_pretuned_model.py b/genai/tuning/tuning_with_pretuned_model.py
new file mode 100644
index 00000000000..75911b51206
--- /dev/null
+++ b/genai/tuning/tuning_with_pretuned_model.py
@@ -0,0 +1,78 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def create_continuous_tuning_job(tuned_model_name: str, checkpoint_id: str) -> str:
+ # [START googlegenaisdk_tuning_with_pretuned_model]
+ import time
+
+ from google import genai
+ from google.genai.types import HttpOptions, TuningDataset, CreateTuningJobConfig
+
+ # TODO(developer): Update and un-comment below line
+ # tuned_model_name = "projects/123456789012/locations/us-central1/models/1234567890@1"
+ # checkpoint_id = "1"
+
+ client = genai.Client(http_options=HttpOptions(api_version="v1beta1"))
+
+ training_dataset = TuningDataset(
+ gcs_uri="gs://cloud-samples-data/ai-platform/generative_ai/gemini/text/sft_train_data.jsonl",
+ )
+ validation_dataset = TuningDataset(
+ gcs_uri="gs://cloud-samples-data/ai-platform/generative_ai/gemini/text/sft_validation_data.jsonl",
+ )
+
+ tuning_job = client.tunings.tune(
+ base_model=tuned_model_name, # Note: Using a Tuned Model
+ training_dataset=training_dataset,
+ config=CreateTuningJobConfig(
+ tuned_model_display_name="Example tuning job",
+ validation_dataset=validation_dataset,
+ pre_tuned_model_checkpoint_id=checkpoint_id,
+ ),
+ )
+
+ running_states = set([
+ "JOB_STATE_PENDING",
+ "JOB_STATE_RUNNING",
+ ])
+
+ while tuning_job.state in running_states:
+ print(tuning_job.state)
+ tuning_job = client.tunings.get(name=tuning_job.name)
+ time.sleep(60)
+
+ print(tuning_job.tuned_model.model)
+ print(tuning_job.tuned_model.endpoint)
+ print(tuning_job.experiment)
+ # Example response:
+ # projects/123456789012/locations/us-central1/models/1234567890@2
+ # projects/123456789012/locations/us-central1/endpoints/123456789012345
+ # projects/123456789012/locations/us-central1/metadataStores/default/contexts/tuning-experiment-2025010112345678
+
+ if tuning_job.tuned_model.checkpoints:
+ for i, checkpoint in enumerate(tuning_job.tuned_model.checkpoints):
+ print(f"Checkpoint {i + 1}: ", checkpoint)
+ # Example response:
+ # Checkpoint 1: checkpoint_id='1' epoch=1 step=10 endpoint='projects/123456789012/locations/us-central1/endpoints/123456789000000'
+ # Checkpoint 2: checkpoint_id='2' epoch=2 step=20 endpoint='projects/123456789012/locations/us-central1/endpoints/123456789012345'
+
+ # [END googlegenaisdk_tuning_with_pretuned_model]
+ return tuning_job.name
+
+
+if __name__ == "__main__":
+ pre_tuned_model_name = input("Pre-tuned model name: ")
+ pre_tuned_model_checkpoint_id = input("Pre-tuned model checkpoint id: ")
+ create_continuous_tuning_job(pre_tuned_model_name, pre_tuned_model_checkpoint_id)
diff --git a/genai/video_generation/requirements.txt b/genai/video_generation/requirements.txt
index 7890f90e26a..b83c25fae61 100644
--- a/genai/video_generation/requirements.txt
+++ b/genai/video_generation/requirements.txt
@@ -1 +1 @@
-google-genai==1.16.1
+google-genai==1.43.0
diff --git a/genai/video_generation/test_video_generation_examples.py b/genai/video_generation/test_video_generation_examples.py
index 479494258da..639793ff9e8 100644
--- a/genai/video_generation/test_video_generation_examples.py
+++ b/genai/video_generation/test_video_generation_examples.py
@@ -24,10 +24,22 @@
import pytest
+import videogen_with_first_last_frame
+
import videogen_with_img
+import videogen_with_no_rewrite
+
+import videogen_with_reference
+
import videogen_with_txt
+import videogen_with_vid
+
+import videogen_with_vid_edit_insert
+
+import videogen_with_vid_edit_remove
+
os.environ["GOOGLE_GENAI_USE_VERTEXAI"] = "True"
os.environ["GOOGLE_CLOUD_LOCATION"] = "us-central1"
@@ -58,3 +70,33 @@ def test_videogen_with_txt(output_gcs_uri: str) -> None:
def test_videogen_with_img(output_gcs_uri: str) -> None:
response = videogen_with_img.generate_videos_from_image(output_gcs_uri=output_gcs_uri)
assert response
+
+
+def test_videogen_with_first_last_frame(output_gcs_uri: str) -> None:
+ response = videogen_with_first_last_frame.generate_videos_from_first_last_frame(output_gcs_uri=output_gcs_uri)
+ assert response
+
+
+def test_videogen_with_vid(output_gcs_uri: str) -> None:
+ response = videogen_with_vid.generate_videos_from_video(output_gcs_uri=output_gcs_uri)
+ assert response
+
+
+def test_videogen_with_no_rewriter(output_gcs_uri: str) -> None:
+ response = videogen_with_no_rewrite.generate_videos_no_rewriter(output_gcs_uri=output_gcs_uri)
+ assert response
+
+
+def test_videogen_with_reference(output_gcs_uri: str) -> None:
+ response = videogen_with_reference.generate_videos_from_reference(output_gcs_uri=output_gcs_uri)
+ assert response
+
+
+def test_videogen_with_edit_insert(output_gcs_uri: str) -> None:
+ response = videogen_with_vid_edit_insert.edit_videos_insert_from_video(output_gcs_uri=output_gcs_uri)
+ assert response
+
+
+def test_videogen_with_edit_remove(output_gcs_uri: str) -> None:
+ response = videogen_with_vid_edit_remove.edit_videos_remove_from_video(output_gcs_uri=output_gcs_uri)
+ assert response
diff --git a/genai/video_generation/videogen_with_first_last_frame.py b/genai/video_generation/videogen_with_first_last_frame.py
new file mode 100644
index 00000000000..52b5ab3a58a
--- /dev/null
+++ b/genai/video_generation/videogen_with_first_last_frame.py
@@ -0,0 +1,59 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def generate_videos_from_first_last_frame(output_gcs_uri: str) -> str:
+ # [START googlegenaisdk_videogen_with_first_last_frame]
+ import time
+ from google import genai
+ from google.genai.types import GenerateVideosConfig, Image
+
+ client = genai.Client()
+
+ # TODO(developer): Update and un-comment below line
+ # output_gcs_uri = "gs://your-bucket/your-prefix"
+
+ operation = client.models.generate_videos(
+ model="veo-3.1-generate-001",
+ prompt="a hand reaches in and places a glass of milk next to the plate of cookies",
+ image=Image(
+ gcs_uri="gs://cloud-samples-data/generative-ai/image/cookies.png",
+ mime_type="image/png",
+ ),
+ config=GenerateVideosConfig(
+ aspect_ratio="16:9",
+ last_frame=Image(
+ gcs_uri="gs://cloud-samples-data/generative-ai/image/cookies-milk.png",
+ mime_type="image/png",
+ ),
+ output_gcs_uri=output_gcs_uri,
+ ),
+ )
+
+ while not operation.done:
+ time.sleep(15)
+ operation = client.operations.get(operation)
+ print(operation)
+
+ if operation.response:
+ print(operation.result.generated_videos[0].video.uri)
+
+ # Example response:
+ # gs://your-bucket/your-prefix
+ # [END googlegenaisdk_videogen_with_first_last_frame]
+ return operation.result.generated_videos[0].video.uri
+
+
+if __name__ == "__main__":
+ generate_videos_from_first_last_frame(output_gcs_uri="gs://your-bucket/your-prefix")
diff --git a/genai/video_generation/videogen_with_img.py b/genai/video_generation/videogen_with_img.py
index e8a3ac3dd41..ce725b1b03c 100644
--- a/genai/video_generation/videogen_with_img.py
+++ b/genai/video_generation/videogen_with_img.py
@@ -25,7 +25,8 @@ def generate_videos_from_image(output_gcs_uri: str) -> str:
# output_gcs_uri = "gs://your-bucket/your-prefix"
operation = client.models.generate_videos(
- model="veo-3.0-generate-preview",
+ model="veo-3.1-generate-001",
+ prompt="Extreme close-up of a cluster of vibrant wildflowers swaying gently in a sun-drenched meadow.",
image=Image(
gcs_uri="gs://cloud-samples-data/generative-ai/image/flowers.png",
mime_type="image/png",
diff --git a/genai/video_generation/videogen_with_no_rewrite.py b/genai/video_generation/videogen_with_no_rewrite.py
new file mode 100644
index 00000000000..a48af5dcfcd
--- /dev/null
+++ b/genai/video_generation/videogen_with_no_rewrite.py
@@ -0,0 +1,55 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def generate_videos_no_rewriter(output_gcs_uri: str) -> str:
+ # [START googlegenaisdk_videogen_with_no_rewrite]
+ import time
+ from google import genai
+ from google.genai.types import GenerateVideosConfig
+
+ client = genai.Client()
+
+ # TODO(developer): Update and un-comment below line
+ # output_gcs_uri = "gs://your-bucket/your-prefix"
+
+ operation = client.models.generate_videos(
+ model="veo-2.0-generate-001",
+ prompt="a cat reading a book",
+ config=GenerateVideosConfig(
+ aspect_ratio="16:9",
+ output_gcs_uri=output_gcs_uri,
+ number_of_videos=1,
+ duration_seconds=5,
+ person_generation="dont_allow",
+ enhance_prompt=False,
+ ),
+ )
+
+ while not operation.done:
+ time.sleep(15)
+ operation = client.operations.get(operation)
+ print(operation)
+
+ if operation.response:
+ print(operation.result.generated_videos[0].video.uri)
+
+ # Example response:
+ # gs://your-bucket/your-prefix
+ # [END googlegenaisdk_videogen_with_no_rewrite]
+ return operation.result.generated_videos[0].video.uri
+
+
+if __name__ == "__main__":
+ generate_videos_no_rewriter(output_gcs_uri="gs://your-bucket/your-prefix")
diff --git a/genai/video_generation/videogen_with_reference.py b/genai/video_generation/videogen_with_reference.py
new file mode 100644
index 00000000000..6543530ff9d
--- /dev/null
+++ b/genai/video_generation/videogen_with_reference.py
@@ -0,0 +1,60 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def generate_videos_from_reference(output_gcs_uri: str) -> str:
+ # [START googlegenaisdk_videogen_with_img_reference]
+ import time
+ from google import genai
+ from google.genai.types import GenerateVideosConfig, Image, VideoGenerationReferenceImage
+
+ client = genai.Client()
+
+ # TODO(developer): Update and un-comment below line
+ # output_gcs_uri = "gs://your-bucket/your-prefix"
+
+ operation = client.models.generate_videos(
+ model="veo-3.1-generate-preview",
+ prompt="A person walks in carrying a vase full of flowers and places the vase on a kitchen table.",
+ config=GenerateVideosConfig(
+ reference_images=[
+ VideoGenerationReferenceImage(
+ image=Image(
+ gcs_uri="gs://cloud-samples-data/generative-ai/image/vase.png",
+ mime_type="image/png",
+ ),
+ reference_type="asset",
+ ),
+ ],
+ aspect_ratio="9:16",
+ output_gcs_uri=output_gcs_uri,
+ ),
+ )
+
+ while not operation.done:
+ time.sleep(15)
+ operation = client.operations.get(operation)
+ print(operation)
+
+ if operation.response:
+ print(operation.result.generated_videos[0].video.uri)
+
+ # Example response:
+ # gs://your-bucket/your-prefix
+ # [END googlegenaisdk_videogen_with_img_reference]
+ return operation.result.generated_videos[0].video.uri
+
+
+if __name__ == "__main__":
+ generate_videos_from_reference(output_gcs_uri="gs://your-bucket/your-prefix")
diff --git a/genai/video_generation/videogen_with_txt.py b/genai/video_generation/videogen_with_txt.py
index 2a4d6d3b49a..17ad11df4a3 100644
--- a/genai/video_generation/videogen_with_txt.py
+++ b/genai/video_generation/videogen_with_txt.py
@@ -25,7 +25,7 @@ def generate_videos(output_gcs_uri: str) -> str:
# output_gcs_uri = "gs://your-bucket/your-prefix"
operation = client.models.generate_videos(
- model="veo-3.0-generate-preview",
+ model="veo-3.1-generate-001",
prompt="a cat reading a book",
config=GenerateVideosConfig(
aspect_ratio="16:9",
diff --git a/genai/video_generation/videogen_with_vid.py b/genai/video_generation/videogen_with_vid.py
new file mode 100644
index 00000000000..efcd63bcb4b
--- /dev/null
+++ b/genai/video_generation/videogen_with_vid.py
@@ -0,0 +1,54 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def generate_videos_from_video(output_gcs_uri: str) -> str:
+ # [START googlegenaisdk_videogen_with_vid]
+ import time
+ from google import genai
+ from google.genai.types import GenerateVideosConfig, Video
+
+ client = genai.Client()
+
+ # TODO(developer): Update and un-comment below line
+ # output_gcs_uri = "gs://your-bucket/your-prefix"
+
+ operation = client.models.generate_videos(
+ model="veo-3.1-generate-preview",
+ prompt="a butterfly flies in and lands on the flower",
+ video=Video(
+ uri="gs://cloud-samples-data/generative-ai/video/flower.mp4",
+ mime_type="video/mp4",
+ ),
+ config=GenerateVideosConfig(
+ output_gcs_uri=output_gcs_uri,
+ ),
+ )
+
+ while not operation.done:
+ time.sleep(15)
+ operation = client.operations.get(operation)
+ print(operation)
+
+ if operation.response:
+ print(operation.result.generated_videos[0].video.uri)
+
+ # Example response:
+ # gs://your-bucket/your-prefix
+ # [END googlegenaisdk_videogen_with_vid]
+ return operation.result.generated_videos[0].video.uri
+
+
+if __name__ == "__main__":
+ generate_videos_from_video(output_gcs_uri="gs://your-bucket/your-prefix")
diff --git a/genai/video_generation/videogen_with_vid_edit_insert.py b/genai/video_generation/videogen_with_vid_edit_insert.py
new file mode 100644
index 00000000000..e45b1da5863
--- /dev/null
+++ b/genai/video_generation/videogen_with_vid_edit_insert.py
@@ -0,0 +1,60 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def edit_videos_insert_from_video(output_gcs_uri: str) -> str:
+ # [START googlegenaisdk_videogen_with_vid_edit_insert]
+ import time
+ from google import genai
+ from google.genai.types import GenerateVideosSource, GenerateVideosConfig, Image, Video, VideoGenerationMask, VideoGenerationMaskMode
+
+ client = genai.Client()
+
+ # TODO(developer): Update and un-comment below line
+ # output_gcs_uri = "gs://your-bucket/your-prefix"
+
+ operation = client.models.generate_videos(
+ model="veo-2.0-generate-preview",
+ source=GenerateVideosSource(
+ prompt="a sheep",
+ video=Video(uri="gs://cloud-samples-data/generative-ai/video/truck.mp4", mime_type="video/mp4")
+ ),
+ config=GenerateVideosConfig(
+ mask=VideoGenerationMask(
+ image=Image(
+ gcs_uri="gs://cloud-samples-data/generative-ai/image/truck-inpainting-dynamic-mask.png",
+ mime_type="image/png",
+ ),
+ mask_mode=VideoGenerationMaskMode.INSERT,
+ ),
+ output_gcs_uri=output_gcs_uri,
+ ),
+ )
+
+ while not operation.done:
+ time.sleep(15)
+ operation = client.operations.get(operation)
+ print(operation)
+
+ if operation.response:
+ print(operation.result.generated_videos[0].video.uri)
+
+ # Example response:
+ # gs://your-bucket/your-prefix
+ # [END googlegenaisdk_videogen_with_vid_edit_insert]
+ return operation.result.generated_videos[0].video.uri
+
+
+if __name__ == "__main__":
+ edit_videos_insert_from_video(output_gcs_uri="gs://your-bucket/your-prefix")
diff --git a/genai/video_generation/videogen_with_vid_edit_remove.py b/genai/video_generation/videogen_with_vid_edit_remove.py
new file mode 100644
index 00000000000..ef0cd5cd2cc
--- /dev/null
+++ b/genai/video_generation/videogen_with_vid_edit_remove.py
@@ -0,0 +1,59 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def edit_videos_remove_from_video(output_gcs_uri: str) -> str:
+ # [START googlegenaisdk_videogen_with_vid_edit_remove]
+ import time
+ from google import genai
+ from google.genai.types import GenerateVideosSource, GenerateVideosConfig, Image, Video, VideoGenerationMask, VideoGenerationMaskMode
+
+ client = genai.Client()
+
+ # TODO(developer): Update and un-comment below line
+ # output_gcs_uri = "gs://your-bucket/your-prefix"
+
+ operation = client.models.generate_videos(
+ model="veo-2.0-generate-preview",
+ source=GenerateVideosSource(
+ video=Video(uri="gs://cloud-samples-data/generative-ai/video/truck.mp4", mime_type="video/mp4")
+ ),
+ config=GenerateVideosConfig(
+ mask=VideoGenerationMask(
+ image=Image(
+ gcs_uri="gs://cloud-samples-data/generative-ai/image/truck-inpainting-dynamic-mask.png",
+ mime_type="image/png",
+ ),
+ mask_mode=VideoGenerationMaskMode.REMOVE,
+ ),
+ output_gcs_uri=output_gcs_uri,
+ ),
+ )
+
+ while not operation.done:
+ time.sleep(15)
+ operation = client.operations.get(operation)
+ print(operation)
+
+ if operation.response:
+ print(operation.result.generated_videos[0].video.uri)
+
+ # Example response:
+ # gs://your-bucket/your-prefix
+ # [END googlegenaisdk_videogen_with_vid_edit_remove]
+ return operation.result.generated_videos[0].video.uri
+
+
+if __name__ == "__main__":
+ edit_videos_remove_from_video(output_gcs_uri="gs://your-bucket/your-prefix")
diff --git a/generative_ai/image_generation/edit_image_inpainting_insert_mask_mode_test.py b/generative_ai/image_generation/edit_image_inpainting_insert_mask_mode_test.py
index 1185c60c3c5..bdae7e6041c 100644
--- a/generative_ai/image_generation/edit_image_inpainting_insert_mask_mode_test.py
+++ b/generative_ai/image_generation/edit_image_inpainting_insert_mask_mode_test.py
@@ -17,6 +17,7 @@
import backoff
from google.api_core.exceptions import ResourceExhausted
+import pytest
import edit_image_inpainting_insert_mask_mode
@@ -28,6 +29,7 @@
_PROMPT = "beach"
+@pytest.mark.skip("imagegeneration@006 samples pending deprecation")
@backoff.on_exception(backoff.expo, ResourceExhausted, max_time=60)
def test_edit_image_inpainting_insert_mask_mode() -> None:
response = (
diff --git a/generative_ai/image_generation/edit_image_inpainting_insert_mask_test.py b/generative_ai/image_generation/edit_image_inpainting_insert_mask_test.py
index 5154baa1fca..5fadcfa78d5 100644
--- a/generative_ai/image_generation/edit_image_inpainting_insert_mask_test.py
+++ b/generative_ai/image_generation/edit_image_inpainting_insert_mask_test.py
@@ -16,6 +16,7 @@
import backoff
from google.api_core.exceptions import ResourceExhausted
+import pytest
import edit_image_inpainting_insert_mask
@@ -27,6 +28,7 @@
_PROMPT = "hat"
+@pytest.mark.skip("imagegeneration@006 samples pending deprecation")
@backoff.on_exception(backoff.expo, ResourceExhausted, max_time=60)
def test_edit_image_inpainting_insert_mask() -> None:
response = edit_image_inpainting_insert_mask.edit_image_inpainting_insert_mask(
diff --git a/generative_ai/image_generation/edit_image_inpainting_remove_mask_mode_test.py b/generative_ai/image_generation/edit_image_inpainting_remove_mask_mode_test.py
index 54633a87fee..68dea245513 100644
--- a/generative_ai/image_generation/edit_image_inpainting_remove_mask_mode_test.py
+++ b/generative_ai/image_generation/edit_image_inpainting_remove_mask_mode_test.py
@@ -17,6 +17,7 @@
import backoff
from google.api_core.exceptions import ResourceExhausted
+import pytest
import edit_image_inpainting_remove_mask_mode
@@ -28,6 +29,7 @@
_PROMPT = "sports car"
+@pytest.mark.skip("imagegeneration@006 samples pending deprecation")
@backoff.on_exception(backoff.expo, ResourceExhausted, max_time=60)
def test_edit_image_inpainting_remove_mask_mode() -> None:
response = (
diff --git a/generative_ai/image_generation/edit_image_inpainting_remove_mask_test.py b/generative_ai/image_generation/edit_image_inpainting_remove_mask_test.py
index 43c965c8bf5..b11b1b1605f 100644
--- a/generative_ai/image_generation/edit_image_inpainting_remove_mask_test.py
+++ b/generative_ai/image_generation/edit_image_inpainting_remove_mask_test.py
@@ -17,6 +17,7 @@
import backoff
from google.api_core.exceptions import ResourceExhausted
+import pytest
import edit_image_inpainting_remove_mask
@@ -28,6 +29,7 @@
_PROMPT = "volleyball game"
+@pytest.mark.skip("imagegeneration@006 samples pending deprecation")
@backoff.on_exception(backoff.expo, ResourceExhausted, max_time=60)
def test_edit_image_inpainting_remove_mask() -> None:
response = edit_image_inpainting_remove_mask.edit_image_inpainting_remove_mask(
diff --git a/generative_ai/image_generation/edit_image_mask_free_test.py b/generative_ai/image_generation/edit_image_mask_free_test.py
index 96b6e717dd2..078578f8bd9 100644
--- a/generative_ai/image_generation/edit_image_mask_free_test.py
+++ b/generative_ai/image_generation/edit_image_mask_free_test.py
@@ -17,6 +17,7 @@
import backoff
from google.api_core.exceptions import ResourceExhausted
+import pytest
import edit_image_mask_free
@@ -27,6 +28,7 @@
_PROMPT = "a dog"
+@pytest.mark.skip("imagegeneration@002 samples pending deprecation")
@backoff.on_exception(backoff.expo, ResourceExhausted, max_time=60)
def test_edit_image_mask_free() -> None:
response = edit_image_mask_free.edit_image_mask_free(
diff --git a/generative_ai/image_generation/edit_image_mask_test.py b/generative_ai/image_generation/edit_image_mask_test.py
index fee71f5ab8a..fa244f6ef73 100644
--- a/generative_ai/image_generation/edit_image_mask_test.py
+++ b/generative_ai/image_generation/edit_image_mask_test.py
@@ -17,6 +17,7 @@
import backoff
from google.api_core.exceptions import ResourceExhausted
+import pytest
import edit_image_mask
@@ -28,6 +29,7 @@
_PROMPT = "a big book"
+@pytest.mark.skip("imagegeneration@002 samples pending deprecation")
@backoff.on_exception(backoff.expo, ResourceExhausted, max_time=60)
def test_edit_image_mask() -> None:
response = edit_image_mask.edit_image_mask(
diff --git a/generative_ai/image_generation/edit_image_outpainting_mask_test.py b/generative_ai/image_generation/edit_image_outpainting_mask_test.py
index e54ba9c5e61..1827d871694 100644
--- a/generative_ai/image_generation/edit_image_outpainting_mask_test.py
+++ b/generative_ai/image_generation/edit_image_outpainting_mask_test.py
@@ -17,6 +17,7 @@
import backoff
from google.api_core.exceptions import ResourceExhausted
+import pytest
import edit_image_outpainting_mask
@@ -28,6 +29,7 @@
_PROMPT = "city with skyscrapers"
+@pytest.mark.skip("imagegeneration@006 samples pending deprecation")
@backoff.on_exception(backoff.expo, ResourceExhausted, max_time=60)
def test_edit_image_outpainting_mask() -> None:
response = edit_image_outpainting_mask.edit_image_outpainting_mask(
diff --git a/generative_ai/image_generation/edit_image_product_image_test.py b/generative_ai/image_generation/edit_image_product_image_test.py
index 487a55435f7..d0256eafc93 100644
--- a/generative_ai/image_generation/edit_image_product_image_test.py
+++ b/generative_ai/image_generation/edit_image_product_image_test.py
@@ -17,6 +17,7 @@
import backoff
from google.api_core.exceptions import ResourceExhausted
+import pytest
import edit_image_product_image
@@ -27,6 +28,7 @@
_PROMPT = "beach"
+@pytest.mark.skip("imagegeneration@006 samples pending deprecation")
@backoff.on_exception(backoff.expo, ResourceExhausted, max_time=60)
def test_edit_image_product_image() -> None:
response = edit_image_product_image.edit_image_product_image(
diff --git a/generative_ai/image_generation/get_short_form_image_captions_test.py b/generative_ai/image_generation/get_short_form_image_captions_test.py
index ed56049c070..2364d45d306 100644
--- a/generative_ai/image_generation/get_short_form_image_captions_test.py
+++ b/generative_ai/image_generation/get_short_form_image_captions_test.py
@@ -17,6 +17,7 @@
import backoff
from google.api_core.exceptions import ResourceExhausted
+import pytest
import get_short_form_image_captions
@@ -25,6 +26,7 @@
_INPUT_FILE = os.path.join(_RESOURCES, "cat.png")
+@pytest.mark.skip("Sample pending deprecation b/452720552")
@backoff.on_exception(backoff.expo, ResourceExhausted, max_time=60)
def test_get_short_form_image_captions() -> None:
response = get_short_form_image_captions.get_short_form_image_captions(
diff --git a/generative_ai/image_generation/get_short_form_image_responses_test.py b/generative_ai/image_generation/get_short_form_image_responses_test.py
index 00c7827517a..c901a8734bd 100644
--- a/generative_ai/image_generation/get_short_form_image_responses_test.py
+++ b/generative_ai/image_generation/get_short_form_image_responses_test.py
@@ -17,6 +17,7 @@
import backoff
from google.api_core.exceptions import ResourceExhausted
+import pytest
import get_short_form_image_responses
@@ -26,6 +27,7 @@
_QUESTION = "What breed of cat is this a picture of?"
+@pytest.mark.skip("Sample pending deprecation b/452720552")
@backoff.on_exception(backoff.expo, ResourceExhausted, max_time=60)
def test_get_short_form_image_responses() -> None:
response = get_short_form_image_responses.get_short_form_image_responses(
diff --git a/generative_ai/labels/requirements.txt b/generative_ai/labels/requirements.txt
index 913473b5ef0..44964bbf7b1 100644
--- a/generative_ai/labels/requirements.txt
+++ b/generative_ai/labels/requirements.txt
@@ -1 +1 @@
-google-cloud-aiplatform==1.74.0
+google-cloud-aiplatform==1.133.0
diff --git a/generative_ai/prompts/test_prompt_template.py b/generative_ai/prompts/test_prompt_template.py
index 2eb73057834..92c358e5d1b 100644
--- a/generative_ai/prompts/test_prompt_template.py
+++ b/generative_ai/prompts/test_prompt_template.py
@@ -12,6 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+from vertexai.preview import prompts
+
import prompt_create
import prompt_delete
import prompt_get
@@ -29,6 +31,7 @@ def test_prompt_template() -> None:
def test_prompt_create() -> None:
response = prompt_create.prompt_create()
assert response
+ prompts.delete(prompt_id=response.prompt_id)
def test_prompt_list_prompts() -> None:
@@ -39,11 +42,14 @@ def test_prompt_list_prompts() -> None:
def test_prompt_get() -> None:
get_prompt = prompt_get.get_prompt()
assert get_prompt
+ prompts.delete(prompt_id=get_prompt.prompt_id)
def test_prompt_list_version() -> None:
list_versions = prompt_list_version.list_prompt_version()
assert list_versions
+ for prompt in list_versions:
+ prompts.delete(prompt_id=prompt.prompt_id)
def test_prompt_delete() -> None:
diff --git a/generative_ai/rag/quickstart_example.py b/generative_ai/rag/quickstart_example.py
index 1a4f2144826..32649f64aeb 100644
--- a/generative_ai/rag/quickstart_example.py
+++ b/generative_ai/rag/quickstart_example.py
@@ -39,7 +39,7 @@ def quickstart(
# paths = ["https://drive.google.com/file/d/123", "gs://my_bucket/my_files_dir"] # Supports Google Cloud Storage and Google Drive Links
# Initialize Vertex AI API once per session
- vertexai.init(project=PROJECT_ID, location="us-central1")
+ vertexai.init(project=PROJECT_ID, location="us-east4")
# Create RagCorpus
# Configure embedding model, for example "text-embedding-005".
diff --git a/iam/cloud-client/snippets/list_keys.py b/iam/cloud-client/snippets/list_keys.py
index 781ae742b99..26867f72020 100644
--- a/iam/cloud-client/snippets/list_keys.py
+++ b/iam/cloud-client/snippets/list_keys.py
@@ -24,7 +24,7 @@
def list_keys(project_id: str, account: str) -> List[iam_admin_v1.ServiceAccountKey]:
- """Creates a key for a service account.
+ """Lists a key for a service account.
project_id: ID or number of the Google Cloud project you want to use.
account: ID or email which is unique identifier of the service account.
diff --git a/iap/app_engine_app/requirements.txt b/iap/app_engine_app/requirements.txt
index f306f93a9ca..3954d17e732 100644
--- a/iap/app_engine_app/requirements.txt
+++ b/iap/app_engine_app/requirements.txt
@@ -1,2 +1,2 @@
-Flask==3.0.3
-Werkzeug==3.0.3
+Flask==3.1.3
+Werkzeug==3.1.4
diff --git a/iap/requirements.txt b/iap/requirements.txt
index 3c2961ba6a2..c0d103f39e4 100644
--- a/iap/requirements.txt
+++ b/iap/requirements.txt
@@ -1,9 +1,9 @@
cryptography==45.0.1
-Flask==3.0.3
+Flask==3.1.3
google-auth==2.38.0
gunicorn==23.0.0
requests==2.32.4
requests-toolbelt==1.0.0
-Werkzeug==3.0.6
+Werkzeug==3.1.4
google-cloud-iam~=2.17.0
PyJWT~=2.10.1
\ No newline at end of file
diff --git a/kms/snippets/delete_key.py b/kms/snippets/delete_key.py
new file mode 100644
index 00000000000..512e3df6a42
--- /dev/null
+++ b/kms/snippets/delete_key.py
@@ -0,0 +1,54 @@
+# Copyright 2026 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# [START kms_delete_key]
+from google.cloud import kms
+
+
+def delete_key(
+ project_id: str, location_id: str, key_ring_id: str, key_id: str
+) -> None:
+ """
+ Delete the given key. This action is permanent and cannot be undone. Once the
+ key is deleted, it will no longer exist.
+
+ Args:
+ project_id (str): Google Cloud project ID (e.g. 'my-project').
+ location_id (str): Cloud KMS location (e.g. 'us-east1').
+ key_ring_id (str): ID of the Cloud KMS key ring (e.g. 'my-key-ring').
+ key_id (str): ID of the key to use (e.g. 'my-key').
+
+ Returns:
+ None
+
+ """
+
+ # Create the client.
+ client = kms.KeyManagementServiceClient()
+
+ # Build the key name.
+ key_name = client.crypto_key_path(project_id, location_id, key_ring_id, key_id)
+
+ # Call the API.
+ # Note: delete_crypto_key returns a long-running operation.
+ # Warning: This operation is permanent and cannot be undone.
+ operation = client.delete_crypto_key(request={"name": key_name})
+
+ # Wait for the operation to complete.
+ operation.result()
+
+ print(f"Deleted key: {key_name}")
+
+
+# [END kms_delete_key]
diff --git a/kms/snippets/delete_key_version.py b/kms/snippets/delete_key_version.py
new file mode 100644
index 00000000000..669de9afbd6
--- /dev/null
+++ b/kms/snippets/delete_key_version.py
@@ -0,0 +1,57 @@
+# Copyright 2026 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# [START kms_delete_key_version]
+from google.cloud import kms
+
+
+def delete_key_version(
+ project_id: str, location_id: str, key_ring_id: str, key_id: str, version_id: str
+) -> None:
+ """
+ Delete the given key version. This action is permanent and cannot be undone.
+ Once the key version is deleted, it will no longer exist.
+
+ Args:
+ project_id (str): Google Cloud project ID (e.g. 'my-project').
+ location_id (str): Cloud KMS location (e.g. 'us-east1').
+ key_ring_id (str): ID of the Cloud KMS key ring (e.g. 'my-key-ring').
+ key_id (str): ID of the key to use (e.g. 'my-key').
+ version_id (str): ID of the key version to delete (e.g. '1').
+
+ Returns:
+ None
+
+ """
+
+ # Create the client.
+ client = kms.KeyManagementServiceClient()
+
+ # Build the key version name.
+ key_version_name = client.crypto_key_version_path(
+ project_id, location_id, key_ring_id, key_id, version_id
+ )
+
+ # Call the API.
+ # Note: delete_crypto_key_version returns a long-running operation.
+ # Warning: This operation is permanent and cannot be undone.
+ operation = client.delete_crypto_key_version(request={"name": key_version_name})
+
+ # Wait for the operation to complete.
+ operation.result()
+
+ print(f"Deleted key version: {key_version_name}")
+
+
+# [END kms_delete_key_version]
diff --git a/kms/snippets/get_retired_resource.py b/kms/snippets/get_retired_resource.py
new file mode 100644
index 00000000000..48042d7fa9f
--- /dev/null
+++ b/kms/snippets/get_retired_resource.py
@@ -0,0 +1,50 @@
+# Copyright 2026 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# [START kms_get_retired_resource]
+from google.cloud import kms
+
+
+def get_retired_resource(
+ project_id: str, location_id: str, retired_resource_id: str
+) -> kms.RetiredResource:
+ """
+ Get the details of a retired resource.
+
+ Args:
+ project_id (str): Google Cloud project ID (e.g. 'my-project').
+ location_id (str): Cloud KMS location (e.g. 'us-east1').
+ resource_id (str): ID of the retired resource to get.
+
+ Returns:
+ kms.RetiredResource: The requested retired resource.
+
+ """
+
+ # Create the client.
+ client = kms.KeyManagementServiceClient()
+
+ # Build the retired resource name.
+ # Note: Retired resources are tied to a Location, not a KeyRing.
+ # The name is like projects/{project}/locations/{location}/retiredResources/{id}
+ name = client.retired_resource_path(project_id, location_id, retired_resource_id)
+
+ # Call the API.
+ response = client.get_retired_resource(request={"name": name})
+
+ print(f"Got retired resource: {response.name}")
+ return response
+
+
+# [END kms_get_retired_resource]
diff --git a/kms/snippets/list_retired_resources.py b/kms/snippets/list_retired_resources.py
new file mode 100644
index 00000000000..9393b34de1c
--- /dev/null
+++ b/kms/snippets/list_retired_resources.py
@@ -0,0 +1,50 @@
+# Copyright 2026 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# [START kms_list_retired_resources]
+from typing import List
+
+from google.cloud import kms
+
+
+def list_retired_resources(project_id: str, location_id: str) -> List[kms.RetiredResource]:
+ """
+ List the retired resources in a location.
+
+ Args:
+ project_id (str): Google Cloud project ID (e.g. 'my-project').
+ location_id (str): Cloud KMS location (e.g. 'us-east1').
+
+ Returns:
+ list[kms.RetiredResource]: The list of retired resources.
+ """
+
+ # Create the client.
+ client = kms.KeyManagementServiceClient()
+
+ # Build the parent location name.
+ parent = client.common_location_path(project_id, location_id)
+
+ # Call the API.
+ # The API paginates, but the Python client library handles that for us.
+ resources_list = list(client.list_retired_resources(request={"parent": parent}))
+
+ # Iterate over the resources and print them.
+ for resource in resources_list:
+ print(f"Retired resource: {resource.name}")
+
+ return resources_list
+
+
+# [END kms_list_retired_resources]
diff --git a/kms/snippets/requirements.txt b/kms/snippets/requirements.txt
index 6e15391cfd6..167c2a25011 100644
--- a/kms/snippets/requirements.txt
+++ b/kms/snippets/requirements.txt
@@ -1,4 +1,4 @@
-google-cloud-kms==3.2.1
+google-cloud-kms==3.11.0
cryptography==45.0.1
crcmod==1.7
jwcrypto==1.5.6
\ No newline at end of file
diff --git a/kms/snippets/snippets_test.py b/kms/snippets/snippets_test.py
index 970cf13dfe6..002ab499269 100644
--- a/kms/snippets/snippets_test.py
+++ b/kms/snippets/snippets_test.py
@@ -52,6 +52,7 @@
from create_key_version import create_key_version
from decrypt_asymmetric import decrypt_asymmetric
from decrypt_symmetric import decrypt_symmetric
+from delete_key import delete_key
from destroy_key_version import destroy_key_version
from disable_key_version import disable_key_version
from enable_key_version import enable_key_version
@@ -62,10 +63,12 @@
from get_key_version_attestation import get_key_version_attestation
from get_public_key import get_public_key
from get_public_key_jwk import get_public_key_jwk
+from get_retired_resource import get_retired_resource
from iam_add_member import iam_add_member
from iam_get_policy import iam_get_policy
from iam_remove_member import iam_remove_member
from import_manually_wrapped_key import import_manually_wrapped_key
+from list_retired_resources import list_retired_resources
from quickstart import quickstart
from restore_key_version import restore_key_version
from sign_asymmetric import sign_asymmetric
@@ -886,3 +889,41 @@ def test_verify_mac(
def test_quickstart(project_id: str, location_id: str) -> None:
key_rings = quickstart(project_id, location_id)
assert key_rings
+
+
+def test_delete_key_and_retired_resources(
+ client: kms.KeyManagementServiceClient,
+ project_id: str,
+ location_id: str,
+ key_ring_id: str,
+) -> None:
+ # We can test key deletion and retired resources by first creating a key.
+ key_id = f"delete-key-{uuid.uuid4()}"
+ key_ring_name = client.key_ring_path(project_id, location_id, key_ring_id)
+ key = client.create_crypto_key(
+ request={
+ "parent": key_ring_name,
+ "crypto_key_id": key_id,
+ "crypto_key": {
+ "purpose": kms.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT,
+ },
+ "skip_initial_version_creation": True,
+ }
+ )
+
+ # Delete the key.
+ delete_key(project_id, location_id, key_ring_id, key_id)
+
+ # List retired resources and filter to just our deleted key.
+ all_retired = list_retired_resources(project_id, location_id)
+ filtered_retired = [r for r in all_retired if r.original_resource == key.name]
+
+ # Make sure the len is 1
+ assert len(filtered_retired) == 1
+
+ # Get the retired resource
+ resource_id = filtered_retired[0].name.split("/")[-1]
+ retrieved = get_retired_resource(project_id, location_id, resource_id)
+
+ # See if the result is the same as retired resource list[0]
+ assert retrieved.name == filtered_retired[0].name
diff --git a/kubernetes_engine/django_tutorial/requirements.txt b/kubernetes_engine/django_tutorial/requirements.txt
index acedc1efebb..df3b50126a0 100644
--- a/kubernetes_engine/django_tutorial/requirements.txt
+++ b/kubernetes_engine/django_tutorial/requirements.txt
@@ -1,5 +1,4 @@
-Django==5.2.3; python_version >= "3.10"
-Django==4.2.23; python_version >= "3.8" and python_version < "3.10"```
+Django==6.0.1; python_version >= "3.12"
# Uncomment the mysqlclient requirement if you are using MySQL rather than
# PostgreSQL. You must also have a MySQL client installed in that case.
#mysqlclient==1.4.1
@@ -7,4 +6,4 @@ wheel==0.40.0
gunicorn==23.0.0; python_version > '3.0'
gunicorn==23.0.0; python_version < '3.0'
# psycopg2==2.8.4 # uncomment if you prefer to build from source
-psycopg2-binary==2.9.10
+psycopg2-binary==2.9.11
diff --git a/language/snippets/generated-samples/v1/language_sentiment_text.py b/language/snippets/generated-samples/v1/language_sentiment_text.py
deleted file mode 100644
index 81b738f1395..00000000000
--- a/language/snippets/generated-samples/v1/language_sentiment_text.py
+++ /dev/null
@@ -1,55 +0,0 @@
-#
-# Copyright 2018 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# DO NOT EDIT! This is a generated sample ("Request", "analyze_sentiment")
-
-# To install the latest published package dependency, execute the following:
-# pip install google-cloud-language
-
-import sys
-
-# isort: split
-# [START language_sentiment_text]
-
-from google.cloud import language_v1
-
-
-def sample_analyze_sentiment(content):
- client = language_v1.LanguageServiceClient()
-
- # content = 'Your text to analyze, e.g. Hello, world!'
-
- if isinstance(content, bytes):
- content = content.decode("utf-8")
-
- type_ = language_v1.Document.Type.PLAIN_TEXT
- document = {"type_": type_, "content": content}
-
- response = client.analyze_sentiment(request={"document": document})
- sentiment = response.document_sentiment
- print(f"Score: {sentiment.score}")
- print(f"Magnitude: {sentiment.magnitude}")
-
-
-# [END language_sentiment_text]
-
-
-def main():
- # FIXME: Convert argv from strings to the correct types.
- sample_analyze_sentiment(*sys.argv[1:])
-
-
-if __name__ == "__main__":
- main()
diff --git a/language/snippets/generated-samples/v1/requirements-test.txt b/language/snippets/generated-samples/v1/requirements-test.txt
deleted file mode 100644
index 15d066af319..00000000000
--- a/language/snippets/generated-samples/v1/requirements-test.txt
+++ /dev/null
@@ -1 +0,0 @@
-pytest==8.2.0
diff --git a/language/snippets/generated-samples/v1/requirements.txt b/language/snippets/generated-samples/v1/requirements.txt
deleted file mode 100644
index b432a6e4238..00000000000
--- a/language/snippets/generated-samples/v1/requirements.txt
+++ /dev/null
@@ -1 +0,0 @@
-google-cloud-language==2.15.1
diff --git a/logging/cloud-client/README.rst b/logging/cloud-client/README.rst
deleted file mode 100644
index 4ddc91a754f..00000000000
--- a/logging/cloud-client/README.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-These samples have been moved.
-
-https://github.com/googleapis/python-logging/tree/main/samples
diff --git a/logging/redaction/Dockerfile b/logging/redaction/Dockerfile
index 3d8649357ed..c108cec3dd0 100644
--- a/logging/redaction/Dockerfile
+++ b/logging/redaction/Dockerfile
@@ -1,5 +1,4 @@
-# From apache/beam_python3.9_sdk:2.43.0
-FROM apache/beam_python3.9_sdk@sha256:0cb6eceed3652d01dd5a555fd9ff4eff5df62161dd99ad53fe591858bdb57741
+FROM apache/beam_python3.9_sdk@sha256:246c4b813c6de8c240b49ed03c426f413f1768321a3c441413031396a08912f9
# Install google-cloud-logging package that is missing in Beam SDK
COPY requirements.txt /tmp
diff --git a/logging/samples/AUTHORING_GUIDE.md b/logging/samples/AUTHORING_GUIDE.md
new file mode 100644
index 00000000000..8249522ffc2
--- /dev/null
+++ b/logging/samples/AUTHORING_GUIDE.md
@@ -0,0 +1 @@
+See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/AUTHORING_GUIDE.md
\ No newline at end of file
diff --git a/logging/samples/CONTRIBUTING.md b/logging/samples/CONTRIBUTING.md
new file mode 100644
index 00000000000..f5fe2e6baf1
--- /dev/null
+++ b/logging/samples/CONTRIBUTING.md
@@ -0,0 +1 @@
+See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/CONTRIBUTING.md
\ No newline at end of file
diff --git a/logging/samples/snippets/README.rst.in b/logging/samples/snippets/README.rst.in
new file mode 100644
index 00000000000..ff243c1ce81
--- /dev/null
+++ b/logging/samples/snippets/README.rst.in
@@ -0,0 +1,28 @@
+# This file is used to generate README.rst
+
+product:
+ name: Cloud Logging
+ short_name: Cloud Logging
+ url: https://cloud.google.com/logging/docs
+ description: >
+ `Cloud Logging`_ allows you to store, search, analyze, monitor,
+ and alert on log data and events from Google Cloud Platform and Amazon
+ Web Services.
+
+setup:
+- auth
+- install_deps
+
+samples:
+- name: Quickstart
+ file: quickstart.py
+- name: Snippets
+ file: snippets.py
+ show_help: true
+- name: Export
+ file: export.py
+ show_help: true
+
+cloud_client_library: true
+
+folder: logging/cloud-client
\ No newline at end of file
diff --git a/logging/samples/snippets/export.py b/logging/samples/snippets/export.py
new file mode 100644
index 00000000000..9a0673ee72d
--- /dev/null
+++ b/logging/samples/snippets/export.py
@@ -0,0 +1,138 @@
+#!/usr/bin/env python
+
+# Copyright 2016 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+
+from google.cloud import logging
+
+
+# [START logging_list_sinks]
+def list_sinks():
+ """Lists all sinks."""
+ logging_client = logging.Client()
+
+ sinks = list(logging_client.list_sinks())
+
+ if not sinks:
+ print("No sinks.")
+
+ for sink in sinks:
+ print("{}: {} -> {}".format(sink.name, sink.filter_, sink.destination))
+
+
+# [END logging_list_sinks]
+
+
+# [START logging_create_sink]
+def create_sink(sink_name, destination_bucket, filter_):
+ """Creates a sink to export logs to the given Cloud Storage bucket.
+
+ The filter determines which logs this sink matches and will be exported
+ to the destination. For example a filter of 'severity>=INFO' will send
+ all logs that have a severity of INFO or greater to the destination.
+ See https://cloud.google.com/logging/docs/view/advanced_filters for more
+ filter information.
+ """
+ logging_client = logging.Client()
+
+ # The destination can be a Cloud Storage bucket, a Cloud Pub/Sub topic,
+ # or a BigQuery dataset. In this case, it is a Cloud Storage Bucket.
+ # See https://cloud.google.com/logging/docs/api/tasks/exporting-logs for
+ # information on the destination format.
+ destination = "storage.googleapis.com/{bucket}".format(bucket=destination_bucket)
+
+ sink = logging_client.sink(sink_name, filter_=filter_, destination=destination)
+
+ if sink.exists():
+ print("Sink {} already exists.".format(sink.name))
+ return
+
+ sink.create()
+ print("Created sink {}".format(sink.name))
+
+
+# [END logging_create_sink]
+
+
+# [START logging_update_sink]
+def update_sink(sink_name, filter_):
+ """Changes a sink's filter.
+
+ The filter determines which logs this sink matches and will be exported
+ to the destination. For example a filter of 'severity>=INFO' will send
+ all logs that have a severity of INFO or greater to the destination.
+ See https://cloud.google.com/logging/docs/view/advanced_filters for more
+ filter information.
+ """
+ logging_client = logging.Client()
+ sink = logging_client.sink(sink_name)
+
+ sink.reload()
+
+ sink.filter_ = filter_
+ print("Updated sink {}".format(sink.name))
+ sink.update()
+
+
+# [END logging_update_sink]
+
+
+# [START logging_delete_sink]
+def delete_sink(sink_name):
+ """Deletes a sink."""
+ logging_client = logging.Client()
+ sink = logging_client.sink(sink_name)
+
+ sink.delete()
+
+ print("Deleted sink {}".format(sink.name))
+
+
+# [END logging_delete_sink]
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(
+ description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
+ )
+
+ subparsers = parser.add_subparsers(dest="command")
+ subparsers.add_parser("list", help=list_sinks.__doc__)
+
+ create_parser = subparsers.add_parser("create", help=list_sinks.__doc__)
+ create_parser.add_argument("sink_name", help="Name of the log export sink.")
+ create_parser.add_argument(
+ "destination_bucket", help="Cloud Storage bucket where logs will be exported."
+ )
+ create_parser.add_argument("filter", help="The filter used to match logs.")
+
+ update_parser = subparsers.add_parser("update", help=update_sink.__doc__)
+ update_parser.add_argument("sink_name", help="Name of the log export sink.")
+ update_parser.add_argument("filter", help="The filter used to match logs.")
+
+ delete_parser = subparsers.add_parser("delete", help=delete_sink.__doc__)
+ delete_parser.add_argument("sink_name", help="Name of the log export sink.")
+
+ args = parser.parse_args()
+
+ if args.command == "list":
+ list_sinks()
+ elif args.command == "create":
+ create_sink(args.sink_name, args.destination_bucket, args.filter)
+ elif args.command == "update":
+ update_sink(args.sink_name, args.filter)
+ elif args.command == "delete":
+ delete_sink(args.sink_name)
diff --git a/logging/samples/snippets/export_test.py b/logging/samples/snippets/export_test.py
new file mode 100644
index 00000000000..e7dacd49ee4
--- /dev/null
+++ b/logging/samples/snippets/export_test.py
@@ -0,0 +1,135 @@
+# Copyright 2016 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import random
+import re
+import string
+import time
+
+import backoff
+from google.cloud import logging, storage
+import pytest
+
+import export
+
+
+BUCKET = os.environ["CLOUD_STORAGE_BUCKET"]
+TEST_SINK_NAME_TMPL = "example_sink_{}_{}"
+TEST_SINK_FILTER = "severity>=CRITICAL"
+TIMESTAMP = int(time.time())
+
+# Threshold beyond which the cleanup_old_sinks fixture will delete
+# old sink, in seconds
+CLEANUP_THRESHOLD = 7200 # 2 hours
+
+# Max buckets to delete at a time, to mitigate operation timeout
+# issues. To turn off in the future, set to None.
+MAX_BUCKETS = 1500
+
+
+def _random_id():
+ return "".join(
+ random.choice(string.ascii_uppercase + string.digits) for _ in range(6)
+ )
+
+
+def _create_sink_name():
+ return TEST_SINK_NAME_TMPL.format(TIMESTAMP, _random_id())
+
+
+@backoff.on_exception(backoff.expo, Exception, max_time=60, raise_on_giveup=False)
+def _delete_object(obj, **kwargs):
+ obj.delete(**kwargs)
+
+
+# Runs once for entire test suite
+@pytest.fixture(scope="module")
+def cleanup_old_sinks():
+ client = logging.Client()
+ test_sink_name_regex = (
+ r"^" + TEST_SINK_NAME_TMPL.format(r"(\d+)", r"[A-Z0-9]{6}") + r"$"
+ )
+ for sink in client.list_sinks():
+ match = re.match(test_sink_name_regex, sink.name)
+ if match:
+ sink_timestamp = int(match.group(1))
+ if TIMESTAMP - sink_timestamp > CLEANUP_THRESHOLD:
+ _delete_object(sink)
+
+ storage_client = storage.Client()
+
+ # See _sink_storage_setup in usage_guide.py for details about how
+ # sinks are named.
+ test_bucket_name_regex = r"^sink\-storage\-(\d+)$"
+ for bucket in storage_client.list_buckets(max_results=MAX_BUCKETS):
+ match = re.match(test_bucket_name_regex, bucket.name)
+ if match:
+ # Bucket timestamp is int(time.time() * 1000)
+ bucket_timestamp = int(match.group(1))
+ if TIMESTAMP - bucket_timestamp // 1000 > CLEANUP_THRESHOLD:
+ _delete_object(bucket, force=True)
+
+
+@pytest.fixture
+def example_sink(cleanup_old_sinks):
+ client = logging.Client()
+
+ sink = client.sink(
+ _create_sink_name(),
+ filter_=TEST_SINK_FILTER,
+ destination="storage.googleapis.com/{bucket}".format(bucket=BUCKET),
+ )
+
+ sink.create()
+
+ yield sink
+
+ _delete_object(sink)
+
+
+def test_list(example_sink, capsys):
+ @backoff.on_exception(backoff.expo, AssertionError, max_time=60)
+ def eventually_consistent_test():
+ export.list_sinks()
+ out, _ = capsys.readouterr()
+ assert example_sink.name in out
+
+ eventually_consistent_test()
+
+
+def test_create(capsys):
+ sink_name = _create_sink_name()
+
+ try:
+ export.create_sink(sink_name, BUCKET, TEST_SINK_FILTER)
+ # Clean-up the temporary sink.
+ finally:
+ _delete_object(logging.Client().sink(sink_name))
+
+ out, _ = capsys.readouterr()
+ assert sink_name in out
+
+
+def test_update(example_sink, capsys):
+ updated_filter = "severity>=INFO"
+ export.update_sink(example_sink.name, updated_filter)
+
+ example_sink.reload()
+ assert example_sink.filter_ == updated_filter
+
+
+def test_delete(example_sink, capsys):
+ export.delete_sink(example_sink.name)
+ assert not example_sink.exists()
diff --git a/logging/samples/snippets/handler.py b/logging/samples/snippets/handler.py
new file mode 100644
index 00000000000..49d2578984f
--- /dev/null
+++ b/logging/samples/snippets/handler.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python
+
+# Copyright 2016 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def use_logging_handler():
+ # [START logging_stdlogging]
+ # [START logging_handler_setup]
+ # Imports the Cloud Logging client library
+ import google.cloud.logging
+
+ # Instantiates a client
+ client = google.cloud.logging.Client()
+
+ # Retrieves a Cloud Logging handler based on the environment
+ # you're running in and integrates the handler with the
+ # Python logging module. By default this captures all logs
+ # at INFO level and higher
+ client.setup_logging()
+ # [END logging_handler_setup]
+
+ # [START logging_handler_usage]
+ # Imports Python standard library logging
+ import logging
+
+ # The data to log
+ text = "Hello, world!"
+
+ # Emits the data using the standard logging module
+ logging.warning(text)
+ # [END logging_handler_usage]
+
+ print("Logged: {}".format(text))
+ # [END logging_stdlogging]
+
+
+if __name__ == "__main__":
+ use_logging_handler()
diff --git a/appengine/flexible_python37_and_earlier/django_cloudsql/polls/admin.py b/logging/samples/snippets/handler_test.py
similarity index 74%
rename from appengine/flexible_python37_and_earlier/django_cloudsql/polls/admin.py
rename to logging/samples/snippets/handler_test.py
index 5fc6d71455b..9d635806ae1 100644
--- a/appengine/flexible_python37_and_earlier/django_cloudsql/polls/admin.py
+++ b/logging/samples/snippets/handler_test.py
@@ -1,4 +1,4 @@
-# Copyright 2015 Google LLC.
+# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -12,8 +12,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from django.contrib import admin
-from .models import Question
+import handler
-admin.site.register(Question)
+
+def test_handler(capsys):
+ handler.use_logging_handler()
+ out, _ = capsys.readouterr()
+ assert "Logged" in out
diff --git a/logging/samples/snippets/quickstart.py b/logging/samples/snippets/quickstart.py
new file mode 100644
index 00000000000..7c38ea6fa82
--- /dev/null
+++ b/logging/samples/snippets/quickstart.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python
+
+# Copyright 2016 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def run_quickstart():
+ # [START logging_quickstart]
+ # Imports the Google Cloud client library
+ from google.cloud import logging
+
+ # Instantiates a client
+ logging_client = logging.Client()
+
+ # The name of the log to write to
+ log_name = "my-log"
+ # Selects the log to write to
+ logger = logging_client.logger(log_name)
+
+ # The data to log
+ text = "Hello, world!"
+
+ # Writes the log entry
+ logger.log_text(text)
+
+ print("Logged: {}".format(text))
+ # [END logging_quickstart]
+
+
+if __name__ == "__main__":
+ run_quickstart()
diff --git a/appengine/flexible_python37_and_earlier/django_cloudsql/polls/views.py b/logging/samples/snippets/quickstart_test.py
similarity index 74%
rename from appengine/flexible_python37_and_earlier/django_cloudsql/polls/views.py
rename to logging/samples/snippets/quickstart_test.py
index 262f571d568..d8ace2cbcf3 100644
--- a/appengine/flexible_python37_and_earlier/django_cloudsql/polls/views.py
+++ b/logging/samples/snippets/quickstart_test.py
@@ -1,4 +1,4 @@
-# Copyright 2015 Google LLC.
+# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -12,8 +12,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from django.http import HttpResponse
+import quickstart
-def index(request):
- return HttpResponse("Hello, world. You're at the polls index.")
+
+def test_quickstart(capsys):
+ quickstart.run_quickstart()
+ out, _ = capsys.readouterr()
+ assert "Logged" in out
diff --git a/logging/samples/snippets/requirements-test.txt b/logging/samples/snippets/requirements-test.txt
new file mode 100644
index 00000000000..37eb1f9aa7a
--- /dev/null
+++ b/logging/samples/snippets/requirements-test.txt
@@ -0,0 +1,3 @@
+backoff==2.2.1
+pytest===7.4.4; python_version == '3.7'
+pytest==8.2.2; python_version >= '3.8'
diff --git a/logging/samples/snippets/requirements.txt b/logging/samples/snippets/requirements.txt
new file mode 100644
index 00000000000..65b84840d38
--- /dev/null
+++ b/logging/samples/snippets/requirements.txt
@@ -0,0 +1,4 @@
+google-cloud-logging==3.13.0
+google-cloud-bigquery==3.40.1
+google-cloud-storage==3.7.0
+google-cloud-pubsub==2.35.0
diff --git a/logging/samples/snippets/snippets.py b/logging/samples/snippets/snippets.py
new file mode 100644
index 00000000000..f6c16d17e38
--- /dev/null
+++ b/logging/samples/snippets/snippets.py
@@ -0,0 +1,110 @@
+#!/usr/bin/env python
+
+# Copyright 2016 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This application demonstrates how to perform basic operations on logs and
+log entries with Cloud Logging.
+
+For more information, see the README.md under /logging and the
+documentation at https://cloud.google.com/logging/docs.
+"""
+
+import argparse
+
+from google.cloud import logging
+
+
+# [START logging_write_log_entry]
+def write_entry(logger_name):
+ """Writes log entries to the given logger."""
+ logging_client = logging.Client()
+
+ # This log can be found in the Cloud Logging console under 'Custom Logs'.
+ logger = logging_client.logger(logger_name)
+
+ # Make a simple text log
+ logger.log_text("Hello, world!")
+
+ # Simple text log with severity.
+ logger.log_text("Goodbye, world!", severity="WARNING")
+
+ # Struct log. The struct can be any JSON-serializable dictionary.
+ logger.log_struct(
+ {
+ "name": "King Arthur",
+ "quest": "Find the Holy Grail",
+ "favorite_color": "Blue",
+ },
+ severity="INFO",
+ )
+
+ print("Wrote logs to {}.".format(logger.name))
+
+
+# [END logging_write_log_entry]
+
+
+# [START logging_list_log_entries]
+def list_entries(logger_name):
+ """Lists the most recent entries for a given logger."""
+ logging_client = logging.Client()
+ logger = logging_client.logger(logger_name)
+
+ print("Listing entries for logger {}:".format(logger.name))
+
+ for entry in logger.list_entries():
+ timestamp = entry.timestamp.isoformat()
+ print("* {}: {}".format(timestamp, entry.payload))
+
+
+# [END logging_list_log_entries]
+
+
+# [START logging_delete_log]
+def delete_logger(logger_name):
+ """Deletes a logger and all its entries.
+
+ Note that a deletion can take several minutes to take effect.
+ """
+ logging_client = logging.Client()
+ logger = logging_client.logger(logger_name)
+
+ logger.delete()
+
+ print("Deleted all logging entries for {}".format(logger.name))
+
+
+# [END logging_delete_log]
+
+
+if __name__ == "__main__":
+
+ parser = argparse.ArgumentParser(
+ description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
+ )
+ parser.add_argument("logger_name", help="Logger name", default="example_log")
+ subparsers = parser.add_subparsers(dest="command")
+ subparsers.add_parser("list", help=list_entries.__doc__)
+ subparsers.add_parser("write", help=write_entry.__doc__)
+ subparsers.add_parser("delete", help=delete_logger.__doc__)
+
+ args = parser.parse_args()
+
+ if args.command == "list":
+ list_entries(args.logger_name)
+ elif args.command == "write":
+ write_entry(args.logger_name)
+ elif args.command == "delete":
+ delete_logger(args.logger_name)
diff --git a/logging/samples/snippets/snippets_test.py b/logging/samples/snippets/snippets_test.py
new file mode 100644
index 00000000000..5cddc92d313
--- /dev/null
+++ b/logging/samples/snippets/snippets_test.py
@@ -0,0 +1,68 @@
+# Copyright 2016 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import uuid
+
+import backoff
+from google.api_core.exceptions import NotFound
+from google.cloud import logging
+import pytest
+
+import snippets
+
+
+TEST_LOGGER_NAME = "example_log_{}".format(uuid.uuid4().hex)
+TEST_TEXT = "Hello, world."
+
+
+@pytest.fixture
+def example_log():
+ client = logging.Client()
+ logger = client.logger(TEST_LOGGER_NAME)
+ text = "Hello, world."
+ logger.log_text(text)
+ return text
+
+
+def test_list(example_log, capsys):
+ @backoff.on_exception(backoff.expo, AssertionError, max_time=120)
+ def eventually_consistent_test():
+ snippets.list_entries(TEST_LOGGER_NAME)
+ out, _ = capsys.readouterr()
+ assert example_log in out
+
+ eventually_consistent_test()
+
+
+def test_write(capsys):
+
+ snippets.write_entry(TEST_LOGGER_NAME)
+
+ @backoff.on_exception(backoff.expo, AssertionError, max_time=120)
+ def eventually_consistent_test():
+ snippets.list_entries(TEST_LOGGER_NAME)
+ out, _ = capsys.readouterr()
+ assert TEST_TEXT in out
+
+ eventually_consistent_test()
+
+
+def test_delete(example_log, capsys):
+ @backoff.on_exception(backoff.expo, NotFound, max_time=120)
+ def eventually_consistent_test():
+ snippets.delete_logger(TEST_LOGGER_NAME)
+ out, _ = capsys.readouterr()
+ assert TEST_LOGGER_NAME in out
+
+ eventually_consistent_test()
diff --git a/managedkafka/snippets/connect/clusters/clusters_test.py b/managedkafka/snippets/connect/clusters/clusters_test.py
new file mode 100644
index 00000000000..bb3b7295428
--- /dev/null
+++ b/managedkafka/snippets/connect/clusters/clusters_test.py
@@ -0,0 +1,176 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from unittest import mock
+from unittest.mock import MagicMock
+
+from google.api_core.operation import Operation
+from google.cloud import managedkafka_v1
+import pytest
+
+import create_connect_cluster # noqa: I100
+import delete_connect_cluster
+import get_connect_cluster
+import list_connect_clusters
+import update_connect_cluster
+
+PROJECT_ID = "test-project-id"
+REGION = "us-central1"
+KAFKA_CLUSTER_ID = "test-cluster-id"
+CONNECT_CLUSTER_ID = "test-connect-cluster-id"
+
+
+@mock.patch(
+ "google.cloud.managedkafka_v1.services.managed_kafka_connect.ManagedKafkaConnectClient.create_connect_cluster"
+)
+def test_create_connect_cluster(
+ mock_method: MagicMock,
+ capsys: pytest.CaptureFixture[str],
+) -> None:
+ cpu = 12
+ memory_bytes = 12884901900 # 12 GB
+ primary_subnet = "test-subnet"
+ operation = mock.MagicMock(spec=Operation)
+ connect_cluster = managedkafka_v1.types.ConnectCluster()
+ connect_cluster.name = (
+ managedkafka_v1.ManagedKafkaConnectClient.connect_cluster_path(
+ PROJECT_ID, REGION, CONNECT_CLUSTER_ID
+ )
+ )
+ operation.result = mock.MagicMock(return_value=connect_cluster)
+ mock_method.return_value = operation
+
+ create_connect_cluster.create_connect_cluster(
+ project_id=PROJECT_ID,
+ region=REGION,
+ connect_cluster_id=CONNECT_CLUSTER_ID,
+ kafka_cluster_id=KAFKA_CLUSTER_ID,
+ primary_subnet=primary_subnet,
+ cpu=cpu,
+ memory_bytes=memory_bytes,
+ )
+
+ out, _ = capsys.readouterr()
+ assert "Created Connect cluster" in out
+ assert CONNECT_CLUSTER_ID in out
+ mock_method.assert_called_once()
+
+
+@mock.patch(
+ "google.cloud.managedkafka_v1.services.managed_kafka_connect.ManagedKafkaConnectClient.get_connect_cluster"
+)
+def test_get_connect_cluster(
+ mock_method: MagicMock,
+ capsys: pytest.CaptureFixture[str],
+) -> None:
+ connect_cluster = managedkafka_v1.types.ConnectCluster()
+ connect_cluster.name = (
+ managedkafka_v1.ManagedKafkaConnectClient.connect_cluster_path(
+ PROJECT_ID, REGION, CONNECT_CLUSTER_ID
+ )
+ )
+ mock_method.return_value = connect_cluster
+
+ get_connect_cluster.get_connect_cluster(
+ project_id=PROJECT_ID,
+ region=REGION,
+ connect_cluster_id=CONNECT_CLUSTER_ID,
+ )
+
+ out, _ = capsys.readouterr()
+ assert "Got Connect cluster" in out
+ assert CONNECT_CLUSTER_ID in out
+ mock_method.assert_called_once()
+
+
+@mock.patch(
+ "google.cloud.managedkafka_v1.services.managed_kafka_connect.ManagedKafkaConnectClient.update_connect_cluster"
+)
+def test_update_connect_cluster(
+ mock_method: MagicMock,
+ capsys: pytest.CaptureFixture[str],
+) -> None:
+ new_memory_bytes = 12884901900 # 12 GB
+ operation = mock.MagicMock(spec=Operation)
+ connect_cluster = managedkafka_v1.types.ConnectCluster()
+ connect_cluster.name = (
+ managedkafka_v1.ManagedKafkaConnectClient.connect_cluster_path(
+ PROJECT_ID, REGION, CONNECT_CLUSTER_ID
+ )
+ )
+ connect_cluster.capacity_config.memory_bytes = new_memory_bytes
+ operation.result = mock.MagicMock(return_value=connect_cluster)
+ mock_method.return_value = operation
+
+ update_connect_cluster.update_connect_cluster(
+ project_id=PROJECT_ID,
+ region=REGION,
+ connect_cluster_id=CONNECT_CLUSTER_ID,
+ memory_bytes=new_memory_bytes,
+ )
+
+ out, _ = capsys.readouterr()
+ assert "Updated Connect cluster" in out
+ assert CONNECT_CLUSTER_ID in out
+ assert str(new_memory_bytes) in out
+ mock_method.assert_called_once()
+
+
+@mock.patch(
+ "google.cloud.managedkafka_v1.services.managed_kafka_connect.ManagedKafkaConnectClient.list_connect_clusters"
+)
+def test_list_connect_clusters(
+ mock_method: MagicMock,
+ capsys: pytest.CaptureFixture[str],
+) -> None:
+ connect_cluster = managedkafka_v1.types.ConnectCluster()
+ connect_cluster.name = (
+ managedkafka_v1.ManagedKafkaConnectClient.connect_cluster_path(
+ PROJECT_ID, REGION, CONNECT_CLUSTER_ID
+ )
+ )
+
+ response = [connect_cluster]
+ mock_method.return_value = response
+
+ list_connect_clusters.list_connect_clusters(
+ project_id=PROJECT_ID,
+ region=REGION,
+ )
+
+ out, _ = capsys.readouterr()
+ assert "Got Connect cluster" in out
+ assert CONNECT_CLUSTER_ID in out
+ mock_method.assert_called_once()
+
+
+@mock.patch(
+ "google.cloud.managedkafka_v1.services.managed_kafka_connect.ManagedKafkaConnectClient.delete_connect_cluster"
+)
+def test_delete_connect_cluster(
+ mock_method: MagicMock,
+ capsys: pytest.CaptureFixture[str],
+) -> None:
+ operation = mock.MagicMock(spec=Operation)
+ mock_method.return_value = operation
+
+ delete_connect_cluster.delete_connect_cluster(
+ project_id=PROJECT_ID,
+ region=REGION,
+ connect_cluster_id=CONNECT_CLUSTER_ID,
+ )
+
+ out, _ = capsys.readouterr()
+ assert "Deleted Connect cluster" in out
+ mock_method.assert_called_once()
diff --git a/managedkafka/snippets/connect/clusters/create_connect_cluster.py b/managedkafka/snippets/connect/clusters/create_connect_cluster.py
new file mode 100644
index 00000000000..c3045ed84d1
--- /dev/null
+++ b/managedkafka/snippets/connect/clusters/create_connect_cluster.py
@@ -0,0 +1,93 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def create_connect_cluster(
+ project_id: str,
+ region: str,
+ connect_cluster_id: str,
+ kafka_cluster_id: str,
+ primary_subnet: str,
+ cpu: int,
+ memory_bytes: int,
+) -> None:
+ """
+ Create a Kafka Connect cluster.
+
+ Args:
+ project_id: Google Cloud project ID.
+ region: Cloud region.
+ connect_cluster_id: ID of the Kafka Connect cluster.
+ kafka_cluster_id: The ID of the primary Managed Service for Apache Kafka cluster.
+ primary_subnet: The primary VPC subnet for the Connect cluster workers. The expected format is projects/{project_id}/regions/{region}/subnetworks/{subnet_id}.
+ cpu: Number of vCPUs to provision for the cluster. The minimum is 12.
+ memory_bytes: The memory to provision for the cluster in bytes. Must be between 1 GiB * cpu and 8 GiB * cpu.
+
+ Raises:
+ This method will raise the GoogleAPICallError exception if the operation errors or
+ the timeout before the operation completes is reached.
+ """
+ # [START managedkafka_create_connect_cluster]
+ from google.api_core.exceptions import GoogleAPICallError
+ from google.cloud import managedkafka_v1
+ from google.cloud.managedkafka_v1.services.managed_kafka_connect import ManagedKafkaConnectClient
+ from google.cloud.managedkafka_v1.types import ConnectCluster, CreateConnectClusterRequest, ConnectNetworkConfig
+
+ # TODO(developer): Update with your values.
+ # project_id = "my-project-id"
+ # region = "us-central1"
+ # connect_cluster_id = "my-connect-cluster"
+ # kafka_cluster_id = "my-kafka-cluster"
+ # primary_subnet = "projects/my-project-id/regions/us-central1/subnetworks/default"
+ # cpu = 12
+ # memory_bytes = 12884901888 # 12 GiB
+
+ connect_client = ManagedKafkaConnectClient()
+ kafka_client = managedkafka_v1.ManagedKafkaClient()
+
+ parent = connect_client.common_location_path(project_id, region)
+ kafka_cluster_path = kafka_client.cluster_path(project_id, region, kafka_cluster_id)
+
+ connect_cluster = ConnectCluster()
+ connect_cluster.name = connect_client.connect_cluster_path(project_id, region, connect_cluster_id)
+ connect_cluster.kafka_cluster = kafka_cluster_path
+ connect_cluster.capacity_config.vcpu_count = cpu
+ connect_cluster.capacity_config.memory_bytes = memory_bytes
+ connect_cluster.gcp_config.access_config.network_configs = [ConnectNetworkConfig(primary_subnet=primary_subnet)]
+ # Optionally, you can also specify accessible subnets and resolvable DNS domains as part of your network configuration.
+ # For example:
+ # connect_cluster.gcp_config.access_config.network_configs = [
+ # ConnectNetworkConfig(
+ # primary_subnet=primary_subnet,
+ # additional_subnets=additional_subnets,
+ # dns_domain_names=dns_domain_names,
+ # )
+ # ]
+
+ request = CreateConnectClusterRequest(
+ parent=parent,
+ connect_cluster_id=connect_cluster_id,
+ connect_cluster=connect_cluster,
+ )
+
+ try:
+ operation = connect_client.create_connect_cluster(request=request)
+ print(f"Waiting for operation {operation.operation.name} to complete...")
+ # Creating a Connect cluster can take 10-40 minutes.
+ response = operation.result(timeout=3000)
+ print("Created Connect cluster:", response)
+ except GoogleAPICallError as e:
+ print(f"The operation failed with error: {e}")
+
+ # [END managedkafka_create_connect_cluster]
diff --git a/managedkafka/snippets/connect/clusters/delete_connect_cluster.py b/managedkafka/snippets/connect/clusters/delete_connect_cluster.py
new file mode 100644
index 00000000000..01e27875a20
--- /dev/null
+++ b/managedkafka/snippets/connect/clusters/delete_connect_cluster.py
@@ -0,0 +1,58 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def delete_connect_cluster(
+ project_id: str,
+ region: str,
+ connect_cluster_id: str,
+) -> None:
+ """
+ Delete a Kafka Connect cluster.
+
+ Args:
+ project_id: Google Cloud project ID.
+ region: Cloud region.
+ connect_cluster_id: ID of the Kafka Connect cluster.
+
+ Raises:
+ This method will raise the GoogleAPICallError exception if the operation errors.
+ """
+ # [START managedkafka_delete_connect_cluster]
+ from google.api_core.exceptions import GoogleAPICallError
+ from google.cloud.managedkafka_v1.services.managed_kafka_connect import (
+ ManagedKafkaConnectClient,
+ )
+ from google.cloud import managedkafka_v1
+
+ # TODO(developer)
+ # project_id = "my-project-id"
+ # region = "us-central1"
+ # connect_cluster_id = "my-connect-cluster"
+
+ connect_client = ManagedKafkaConnectClient()
+
+ request = managedkafka_v1.DeleteConnectClusterRequest(
+ name=connect_client.connect_cluster_path(project_id, region, connect_cluster_id),
+ )
+
+ try:
+ operation = connect_client.delete_connect_cluster(request=request)
+ print(f"Waiting for operation {operation.operation.name} to complete...")
+ operation.result()
+ print("Deleted Connect cluster")
+ except GoogleAPICallError as e:
+ print(f"The operation failed with error: {e}")
+
+ # [END managedkafka_delete_connect_cluster]
diff --git a/managedkafka/snippets/connect/clusters/get_connect_cluster.py b/managedkafka/snippets/connect/clusters/get_connect_cluster.py
new file mode 100644
index 00000000000..8dfd39b5958
--- /dev/null
+++ b/managedkafka/snippets/connect/clusters/get_connect_cluster.py
@@ -0,0 +1,55 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def get_connect_cluster(
+ project_id: str,
+ region: str,
+ connect_cluster_id: str,
+) -> None:
+ """
+ Get a Kafka Connect cluster.
+
+ Args:
+ project_id: Google Cloud project ID.
+ region: Cloud region.
+ connect_cluster_id: ID of the Kafka Connect cluster.
+
+ Raises:
+ This method will raise the NotFound exception if the Connect cluster is not found.
+ """
+ # [START managedkafka_get_connect_cluster]
+ from google.api_core.exceptions import NotFound
+ from google.cloud.managedkafka_v1.services.managed_kafka_connect import ManagedKafkaConnectClient
+ from google.cloud import managedkafka_v1
+
+ # TODO(developer)
+ # project_id = "my-project-id"
+ # region = "us-central1"
+ # connect_cluster_id = "my-connect-cluster"
+
+ client = ManagedKafkaConnectClient()
+
+ cluster_path = client.connect_cluster_path(project_id, region, connect_cluster_id)
+ request = managedkafka_v1.GetConnectClusterRequest(
+ name=cluster_path,
+ )
+
+ try:
+ cluster = client.get_connect_cluster(request=request)
+ print("Got Connect cluster:", cluster)
+ except NotFound as e:
+ print(f"Failed to get Connect cluster {connect_cluster_id} with error: {e}")
+
+ # [END managedkafka_get_connect_cluster]
diff --git a/managedkafka/snippets/connect/clusters/list_connect_clusters.py b/managedkafka/snippets/connect/clusters/list_connect_clusters.py
new file mode 100644
index 00000000000..749a5267d91
--- /dev/null
+++ b/managedkafka/snippets/connect/clusters/list_connect_clusters.py
@@ -0,0 +1,51 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def list_connect_clusters(
+ project_id: str,
+ region: str,
+) -> None:
+ """
+ List Kafka Connect clusters in a given project ID and region.
+
+ Args:
+ project_id: Google Cloud project ID.
+ region: Cloud region.
+ """
+ # [START managedkafka_list_connect_clusters]
+ from google.cloud import managedkafka_v1
+ from google.cloud.managedkafka_v1.services.managed_kafka_connect import (
+ ManagedKafkaConnectClient,
+ )
+ from google.api_core.exceptions import GoogleAPICallError
+
+ # TODO(developer)
+ # project_id = "my-project-id"
+ # region = "us-central1"
+
+ connect_client = ManagedKafkaConnectClient()
+
+ request = managedkafka_v1.ListConnectClustersRequest(
+ parent=connect_client.common_location_path(project_id, region),
+ )
+
+ response = connect_client.list_connect_clusters(request=request)
+ for cluster in response:
+ try:
+ print("Got Connect cluster:", cluster)
+ except GoogleAPICallError as e:
+ print(f"Failed to list Connect clusters with error: {e}")
+
+ # [END managedkafka_list_connect_clusters]
diff --git a/managedkafka/snippets/connect/clusters/requirements.txt b/managedkafka/snippets/connect/clusters/requirements.txt
new file mode 100644
index 00000000000..5f372e81c41
--- /dev/null
+++ b/managedkafka/snippets/connect/clusters/requirements.txt
@@ -0,0 +1,6 @@
+protobuf==5.29.4
+pytest==8.2.2
+google-api-core==2.23.0
+google-auth==2.38.0
+google-cloud-managedkafka==0.1.12
+googleapis-common-protos==1.66.0
diff --git a/managedkafka/snippets/connect/clusters/update_connect_cluster.py b/managedkafka/snippets/connect/clusters/update_connect_cluster.py
new file mode 100644
index 00000000000..16587046949
--- /dev/null
+++ b/managedkafka/snippets/connect/clusters/update_connect_cluster.py
@@ -0,0 +1,72 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def update_connect_cluster(
+ project_id: str, region: str, connect_cluster_id: str, memory_bytes: int
+) -> None:
+ """
+ Update a Kafka Connect cluster.
+
+ Args:
+ project_id: Google Cloud project ID.
+ region: Cloud region.
+ connect_cluster_id: ID of the Kafka Connect cluster.
+ memory_bytes: The memory to provision for the cluster in bytes.
+
+ Raises:
+ This method will raise the GoogleAPICallError exception if the operation errors or
+ the timeout before the operation completes is reached.
+ """
+ # [START managedkafka_update_connect_cluster]
+ from google.api_core.exceptions import GoogleAPICallError
+ from google.cloud import managedkafka_v1
+ from google.cloud.managedkafka_v1.services.managed_kafka_connect import (
+ ManagedKafkaConnectClient,
+ )
+ from google.cloud.managedkafka_v1.types import ConnectCluster
+ from google.protobuf import field_mask_pb2
+
+ # TODO(developer)
+ # project_id = "my-project-id"
+ # region = "us-central1"
+ # connect_cluster_id = "my-connect-cluster"
+ # memory_bytes = 4295000000
+
+ connect_client = ManagedKafkaConnectClient()
+
+ connect_cluster = ConnectCluster()
+ connect_cluster.name = connect_client.connect_cluster_path(
+ project_id, region, connect_cluster_id
+ )
+ connect_cluster.capacity_config.memory_bytes = memory_bytes
+ update_mask = field_mask_pb2.FieldMask()
+ update_mask.paths.append("capacity_config.memory_bytes")
+
+ # For a list of editable fields, one can check https://cloud.google.com/managed-service-for-apache-kafka/docs/connect-cluster/create-connect-cluster#properties.
+ request = managedkafka_v1.UpdateConnectClusterRequest(
+ update_mask=update_mask,
+ connect_cluster=connect_cluster,
+ )
+
+ try:
+ operation = connect_client.update_connect_cluster(request=request)
+ print(f"Waiting for operation {operation.operation.name} to complete...")
+ operation.result()
+ response = operation.result()
+ print("Updated Connect cluster:", response)
+ except GoogleAPICallError as e:
+ print(f"The operation failed with error: {e}")
+
+ # [END managedkafka_update_connect_cluster]
diff --git a/managedkafka/snippets/connect/connectors/connectors_test.py b/managedkafka/snippets/connect/connectors/connectors_test.py
new file mode 100644
index 00000000000..ade860ae40d
--- /dev/null
+++ b/managedkafka/snippets/connect/connectors/connectors_test.py
@@ -0,0 +1,405 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from unittest import mock
+from unittest.mock import MagicMock
+
+import create_bigquery_sink_connector
+import create_cloud_storage_sink_connector
+import create_mirrormaker2_source_connector
+import create_pubsub_sink_connector
+import create_pubsub_source_connector
+import delete_connector
+import get_connector
+from google.api_core.operation import Operation
+from google.cloud import managedkafka_v1
+import list_connectors
+import pause_connector
+import pytest
+import restart_connector
+import resume_connector
+import stop_connector
+import update_connector
+
+
+PROJECT_ID = "test-project-id"
+REGION = "us-central1"
+CONNECT_CLUSTER_ID = "test-connect-cluster-id"
+CONNECTOR_ID = "test-connector-id"
+
+
+@mock.patch(
+ "google.cloud.managedkafka_v1.services.managed_kafka_connect.ManagedKafkaConnectClient.create_connector"
+)
+def test_create_mirrormaker2_source_connector(
+ mock_method: MagicMock,
+ capsys: pytest.CaptureFixture[str],
+) -> None:
+ connector_id = "mm2-source-to-target-connector-id"
+ operation = mock.MagicMock(spec=Operation)
+ connector = managedkafka_v1.types.Connector()
+ connector.name = connector_id
+ operation.result = mock.MagicMock(return_value=connector)
+ mock_method.return_value = operation
+
+ create_mirrormaker2_source_connector.create_mirrormaker2_source_connector(
+ PROJECT_ID,
+ REGION,
+ CONNECT_CLUSTER_ID,
+ connector_id,
+ "source_cluster_dns",
+ "target_cluster_dns",
+ "3",
+ "source",
+ "target",
+ ".*",
+ "mm2.*\\.internal,.*\\.replica,__.*",
+ )
+
+ out, _ = capsys.readouterr()
+ assert "Created Connector" in out
+ assert connector_id in out
+ mock_method.assert_called_once()
+
+
+@mock.patch(
+ "google.cloud.managedkafka_v1.services.managed_kafka_connect.ManagedKafkaConnectClient.create_connector"
+)
+def test_create_pubsub_source_connector(
+ mock_method: MagicMock,
+ capsys: pytest.CaptureFixture[str],
+) -> None:
+ connector_id = "CPS_SOURCE_CONNECTOR_ID"
+ operation = mock.MagicMock(spec=Operation)
+ connector = managedkafka_v1.types.Connector()
+ connector.name = connector_id
+ operation.result = mock.MagicMock(return_value=connector)
+ mock_method.return_value = operation
+
+ create_pubsub_source_connector.create_pubsub_source_connector(
+ PROJECT_ID,
+ REGION,
+ CONNECT_CLUSTER_ID,
+ connector_id,
+ "GMK_TOPIC_ID",
+ "CPS_SUBSCRIPTION_ID",
+ "GCP_PROJECT_ID",
+ "3",
+ "org.apache.kafka.connect.converters.ByteArrayConverter",
+ "org.apache.kafka.connect.storage.StringConverter",
+ )
+
+ out, _ = capsys.readouterr()
+ assert "Created Connector" in out
+ assert connector_id in out
+ mock_method.assert_called_once()
+
+
+@mock.patch(
+ "google.cloud.managedkafka_v1.services.managed_kafka_connect.ManagedKafkaConnectClient.create_connector"
+)
+def test_create_pubsub_sink_connector(
+ mock_method: MagicMock,
+ capsys: pytest.CaptureFixture[str],
+) -> None:
+ connector_id = "CPS_SINK_CONNECTOR_ID"
+ operation = mock.MagicMock(spec=Operation)
+ connector = managedkafka_v1.types.Connector()
+ connector.name = connector_id
+ operation.result = mock.MagicMock(return_value=connector)
+ mock_method.return_value = operation
+
+ create_pubsub_sink_connector.create_pubsub_sink_connector(
+ PROJECT_ID,
+ REGION,
+ CONNECT_CLUSTER_ID,
+ connector_id,
+ "GMK_TOPIC_ID",
+ "org.apache.kafka.connect.storage.StringConverter",
+ "org.apache.kafka.connect.storage.StringConverter",
+ "CPS_TOPIC_ID",
+ "GCP_PROJECT_ID",
+ "3",
+ )
+
+ out, _ = capsys.readouterr()
+ assert "Created Connector" in out
+ assert connector_id in out
+ mock_method.assert_called_once()
+
+
+@mock.patch(
+ "google.cloud.managedkafka_v1.services.managed_kafka_connect.ManagedKafkaConnectClient.create_connector"
+)
+def test_create_cloud_storage_sink_connector(
+ mock_method: MagicMock,
+ capsys: pytest.CaptureFixture[str],
+) -> None:
+ connector_id = "GCS_SINK_CONNECTOR_ID"
+ operation = mock.MagicMock(spec=Operation)
+ connector = managedkafka_v1.types.Connector()
+ connector.name = connector_id
+ operation.result = mock.MagicMock(return_value=connector)
+ mock_method.return_value = operation
+
+ create_cloud_storage_sink_connector.create_cloud_storage_sink_connector(
+ PROJECT_ID,
+ REGION,
+ CONNECT_CLUSTER_ID,
+ connector_id,
+ "GMK_TOPIC_ID",
+ "GCS_BUCKET_NAME",
+ "3",
+ "json",
+ "org.apache.kafka.connect.json.JsonConverter",
+ "false",
+ "org.apache.kafka.connect.storage.StringConverter",
+ )
+
+ out, _ = capsys.readouterr()
+ assert "Created Connector" in out
+ assert connector_id
+
+
+@mock.patch(
+ "google.cloud.managedkafka_v1.services.managed_kafka_connect.ManagedKafkaConnectClient.create_connector"
+)
+def test_create_bigquery_sink_connector(
+ mock_method: MagicMock,
+ capsys: pytest.CaptureFixture[str],
+) -> None:
+ connector_id = "BQ_SINK_CONNECTOR_ID"
+ operation = mock.MagicMock(spec=Operation)
+ connector = managedkafka_v1.types.Connector()
+ connector.name = connector_id
+ operation.result = mock.MagicMock(return_value=connector)
+ mock_method.return_value = operation
+
+ create_bigquery_sink_connector.create_bigquery_sink_connector(
+ PROJECT_ID,
+ REGION,
+ CONNECT_CLUSTER_ID,
+ connector_id,
+ "GMK_TOPIC_ID",
+ "3",
+ "org.apache.kafka.connect.storage.StringConverter",
+ "org.apache.kafka.connect.json.JsonConverter",
+ "false",
+ "BQ_DATASET_ID",
+ )
+
+ out, _ = capsys.readouterr()
+ assert "Created Connector" in out
+ assert connector_id in out
+ mock_method.assert_called_once()
+
+
+@mock.patch(
+ "google.cloud.managedkafka_v1.services.managed_kafka_connect.ManagedKafkaConnectClient.list_connectors"
+)
+def test_list_connectors(
+ mock_method: MagicMock,
+ capsys: pytest.CaptureFixture[str],
+) -> None:
+ connector = managedkafka_v1.types.Connector()
+ connector.name = managedkafka_v1.ManagedKafkaConnectClient.connector_path(
+ PROJECT_ID, REGION, CONNECT_CLUSTER_ID, CONNECTOR_ID
+ )
+ mock_method.return_value = [connector]
+
+ list_connectors.list_connectors(
+ project_id=PROJECT_ID,
+ region=REGION,
+ connect_cluster_id=CONNECT_CLUSTER_ID,
+ )
+
+ out, _ = capsys.readouterr()
+ assert "Got connector" in out
+ assert CONNECTOR_ID in out
+ mock_method.assert_called_once()
+
+
+@mock.patch(
+ "google.cloud.managedkafka_v1.services.managed_kafka_connect.ManagedKafkaConnectClient.get_connector"
+)
+def test_get_connector(
+ mock_method: MagicMock,
+ capsys: pytest.CaptureFixture[str],
+) -> None:
+ connector = managedkafka_v1.types.Connector()
+ connector.name = managedkafka_v1.ManagedKafkaConnectClient.connector_path(
+ PROJECT_ID, REGION, CONNECT_CLUSTER_ID, CONNECTOR_ID
+ )
+ mock_method.return_value = connector
+
+ get_connector.get_connector(
+ project_id=PROJECT_ID,
+ region=REGION,
+ connect_cluster_id=CONNECT_CLUSTER_ID,
+ connector_id=CONNECTOR_ID,
+ )
+
+ out, _ = capsys.readouterr()
+ assert "Got connector" in out
+ assert CONNECTOR_ID in out
+ mock_method.assert_called_once()
+
+
+@mock.patch(
+ "google.cloud.managedkafka_v1.services.managed_kafka_connect.ManagedKafkaConnectClient.update_connector"
+)
+def test_update_connector(
+ mock_method: MagicMock,
+ capsys: pytest.CaptureFixture[str],
+) -> None:
+ configs = {"tasks.max": "6", "value.converter.schemas.enable": "true"}
+ operation = mock.MagicMock(spec=Operation)
+ connector = managedkafka_v1.types.Connector()
+ connector.name = managedkafka_v1.ManagedKafkaConnectClient.connector_path(
+ PROJECT_ID, REGION, CONNECT_CLUSTER_ID, CONNECTOR_ID
+ )
+ operation.result = mock.MagicMock(return_value=connector)
+ mock_method.return_value = operation
+
+ update_connector.update_connector(
+ project_id=PROJECT_ID,
+ region=REGION,
+ connect_cluster_id=CONNECT_CLUSTER_ID,
+ connector_id=CONNECTOR_ID,
+ configs=configs,
+ )
+
+ out, _ = capsys.readouterr()
+ assert "Updated connector" in out
+ assert CONNECTOR_ID in out
+ mock_method.assert_called_once()
+
+
+@mock.patch(
+ "google.cloud.managedkafka_v1.services.managed_kafka_connect.ManagedKafkaConnectClient.delete_connector"
+)
+def test_delete_connector(
+ mock_method: MagicMock,
+ capsys: pytest.CaptureFixture[str],
+) -> None:
+ operation = mock.MagicMock(spec=Operation)
+ operation.result = mock.MagicMock(return_value=None)
+ mock_method.return_value = operation
+
+ delete_connector.delete_connector(
+ project_id=PROJECT_ID,
+ region=REGION,
+ connect_cluster_id=CONNECT_CLUSTER_ID,
+ connector_id=CONNECTOR_ID,
+ )
+
+ out, _ = capsys.readouterr()
+ assert "Deleted connector" in out
+ mock_method.assert_called_once()
+
+
+@mock.patch(
+ "google.cloud.managedkafka_v1.services.managed_kafka_connect.ManagedKafkaConnectClient.pause_connector"
+)
+def test_pause_connector(
+ mock_method: MagicMock,
+ capsys: pytest.CaptureFixture[str],
+) -> None:
+ operation = mock.MagicMock(spec=Operation)
+ operation.result = mock.MagicMock(return_value=None)
+ mock_method.return_value = operation
+
+ pause_connector.pause_connector(
+ project_id=PROJECT_ID,
+ region=REGION,
+ connect_cluster_id=CONNECT_CLUSTER_ID,
+ connector_id=CONNECTOR_ID,
+ )
+
+ out, _ = capsys.readouterr()
+ assert "Paused connector" in out
+ assert CONNECTOR_ID in out
+ mock_method.assert_called_once()
+
+
+@mock.patch(
+ "google.cloud.managedkafka_v1.services.managed_kafka_connect.ManagedKafkaConnectClient.resume_connector"
+)
+def test_resume_connector(
+ mock_method: MagicMock,
+ capsys: pytest.CaptureFixture[str],
+) -> None:
+ operation = mock.MagicMock(spec=Operation)
+ operation.result = mock.MagicMock(return_value=None)
+ mock_method.return_value = operation
+
+ resume_connector.resume_connector(
+ project_id=PROJECT_ID,
+ region=REGION,
+ connect_cluster_id=CONNECT_CLUSTER_ID,
+ connector_id=CONNECTOR_ID,
+ )
+
+ out, _ = capsys.readouterr()
+ assert "Resumed connector" in out
+ assert CONNECTOR_ID in out
+ mock_method.assert_called_once()
+
+
+@mock.patch(
+ "google.cloud.managedkafka_v1.services.managed_kafka_connect.ManagedKafkaConnectClient.stop_connector"
+)
+def test_stop_connector(
+ mock_method: MagicMock,
+ capsys: pytest.CaptureFixture[str],
+) -> None:
+ operation = mock.MagicMock(spec=Operation)
+ operation.result = mock.MagicMock(return_value=None)
+ mock_method.return_value = operation
+
+ stop_connector.stop_connector(
+ project_id=PROJECT_ID,
+ region=REGION,
+ connect_cluster_id=CONNECT_CLUSTER_ID,
+ connector_id=CONNECTOR_ID,
+ )
+
+ out, _ = capsys.readouterr()
+ assert "Stopped connector" in out
+ assert CONNECTOR_ID in out
+ mock_method.assert_called_once()
+
+
+@mock.patch(
+ "google.cloud.managedkafka_v1.services.managed_kafka_connect.ManagedKafkaConnectClient.restart_connector"
+)
+def test_restart_connector(
+ mock_method: MagicMock,
+ capsys: pytest.CaptureFixture[str],
+) -> None:
+ operation = mock.MagicMock(spec=Operation)
+ operation.result = mock.MagicMock(return_value=None)
+ mock_method.return_value = operation
+
+ restart_connector.restart_connector(
+ project_id=PROJECT_ID,
+ region=REGION,
+ connect_cluster_id=CONNECT_CLUSTER_ID,
+ connector_id=CONNECTOR_ID,
+ )
+
+ out, _ = capsys.readouterr()
+ assert "Restarted connector" in out
+ assert CONNECTOR_ID in out
+ mock_method.assert_called_once()
diff --git a/managedkafka/snippets/connect/connectors/create_bigquery_sink_connector.py b/managedkafka/snippets/connect/connectors/create_bigquery_sink_connector.py
new file mode 100644
index 00000000000..129872d66d3
--- /dev/null
+++ b/managedkafka/snippets/connect/connectors/create_bigquery_sink_connector.py
@@ -0,0 +1,98 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def create_bigquery_sink_connector(
+ project_id: str,
+ region: str,
+ connect_cluster_id: str,
+ connector_id: str,
+ topics: str,
+ tasks_max: str,
+ key_converter: str,
+ value_converter: str,
+ value_converter_schemas_enable: str,
+ default_dataset: str,
+) -> None:
+ """
+ Create a BigQuery Sink connector.
+
+ Args:
+ project_id: Google Cloud project ID.
+ region: Cloud region.
+ connect_cluster_id: ID of the Kafka Connect cluster.
+ connector_id: Name of the connector.
+ topics: Kafka topics to read from.
+ tasks_max: Maximum number of tasks.
+ key_converter: Key converter class.
+ value_converter: Value converter class.
+ value_converter_schemas_enable: Enable schemas for value converter.
+ default_dataset: BigQuery dataset ID.
+
+ Raises:
+ This method will raise the GoogleAPICallError exception if the operation errors or
+ the timeout before the operation completes is reached.
+ """
+ # TODO(developer): Update with your config values. Here is a sample configuration:
+ # project_id = "my-project-id"
+ # region = "us-central1"
+ # connect_cluster_id = "my-connect-cluster"
+ # connector_id = "BQ_SINK_CONNECTOR_ID"
+ # topics = "GMK_TOPIC_ID"
+ # tasks_max = "3"
+ # key_converter = "org.apache.kafka.connect.storage.StringConverter"
+ # value_converter = "org.apache.kafka.connect.json.JsonConverter"
+ # value_converter_schemas_enable = "false"
+ # default_dataset = "BQ_DATASET_ID"
+
+ # [START managedkafka_create_bigquery_sink_connector]
+ from google.api_core.exceptions import GoogleAPICallError
+ from google.cloud.managedkafka_v1.services.managed_kafka_connect import (
+ ManagedKafkaConnectClient,
+ )
+ from google.cloud.managedkafka_v1.types import Connector, CreateConnectorRequest
+
+ connect_client = ManagedKafkaConnectClient()
+ parent = connect_client.connect_cluster_path(project_id, region, connect_cluster_id)
+
+ configs = {
+ "name": connector_id,
+ "project": project_id,
+ "topics": topics,
+ "tasks.max": tasks_max,
+ "connector.class": "com.wepay.kafka.connect.bigquery.BigQuerySinkConnector",
+ "key.converter": key_converter,
+ "value.converter": value_converter,
+ "value.converter.schemas.enable": value_converter_schemas_enable,
+ "defaultDataset": default_dataset,
+ }
+
+ connector = Connector()
+ connector.name = connector_id
+ connector.configs = configs
+
+ request = CreateConnectorRequest(
+ parent=parent,
+ connector_id=connector_id,
+ connector=connector,
+ )
+
+ try:
+ operation = connect_client.create_connector(request=request)
+ print(f"Waiting for operation {operation.operation.name} to complete...")
+ response = operation.result()
+ print("Created Connector:", response)
+ except GoogleAPICallError as e:
+ print(f"The operation failed with error: {e}")
+ # [END managedkafka_create_bigquery_sink_connector]
diff --git a/managedkafka/snippets/connect/connectors/create_cloud_storage_sink_connector.py b/managedkafka/snippets/connect/connectors/create_cloud_storage_sink_connector.py
new file mode 100644
index 00000000000..8e6d7bc2c70
--- /dev/null
+++ b/managedkafka/snippets/connect/connectors/create_cloud_storage_sink_connector.py
@@ -0,0 +1,101 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+def create_cloud_storage_sink_connector(
+ project_id: str,
+ region: str,
+ connect_cluster_id: str,
+ connector_id: str,
+ topics: str,
+ gcs_bucket_name: str,
+ tasks_max: str,
+ format_output_type: str,
+ value_converter: str,
+ value_converter_schemas_enable: str,
+ key_converter: str,
+) -> None:
+ """
+ Create a Cloud Storage Sink connector.
+
+ Args:
+ project_id: Google Cloud project ID.
+ region: Cloud region.
+ connect_cluster_id: ID of the Kafka Connect cluster.
+ connector_id: Name of the connector.
+ topics: Kafka topics to read from.
+ gcs_bucket_name: Google Cloud Storage bucket name.
+ tasks_max: Maximum number of tasks.
+ format_output_type: Output format type.
+ value_converter: Value converter class.
+ value_converter_schemas_enable: Enable schemas for value converter.
+ key_converter: Key converter class.
+
+ Raises:
+ This method will raise the GoogleAPICallError exception if the operation errors or
+ the timeout before the operation completes is reached.
+ """
+ # TODO(developer): Update with your config values. Here is a sample configuration:
+ # project_id = "my-project-id"
+ # region = "us-central1"
+ # connect_cluster_id = "my-connect-cluster"
+ # connector_id = "GCS_SINK_CONNECTOR_ID"
+ # topics = "GMK_TOPIC_ID"
+ # gcs_bucket_name = "GCS_BUCKET_NAME"
+ # tasks_max = "3"
+ # format_output_type = "json"
+ # value_converter = "org.apache.kafka.connect.json.JsonConverter"
+ # value_converter_schemas_enable = "false"
+ # key_converter = "org.apache.kafka.connect.storage.StringConverter"
+
+ # [START managedkafka_create_cloud_storage_sink_connector]
+ from google.api_core.exceptions import GoogleAPICallError
+ from google.cloud.managedkafka_v1.services.managed_kafka_connect import (
+ ManagedKafkaConnectClient,
+ )
+ from google.cloud.managedkafka_v1.types import Connector, CreateConnectorRequest
+
+ connect_client = ManagedKafkaConnectClient()
+ parent = connect_client.connect_cluster_path(project_id, region, connect_cluster_id)
+
+ configs = {
+ "connector.class": "io.aiven.kafka.connect.gcs.GcsSinkConnector",
+ "tasks.max": tasks_max,
+ "topics": topics,
+ "gcs.bucket.name": gcs_bucket_name,
+ "gcs.credentials.default": "true",
+ "format.output.type": format_output_type,
+ "name": connector_id,
+ "value.converter": value_converter,
+ "value.converter.schemas.enable": value_converter_schemas_enable,
+ "key.converter": key_converter,
+ }
+
+ connector = Connector()
+ connector.name = connector_id
+ connector.configs = configs
+
+ request = CreateConnectorRequest(
+ parent=parent,
+ connector_id=connector_id,
+ connector=connector,
+ )
+
+ try:
+ operation = connect_client.create_connector(request=request)
+ print(f"Waiting for operation {operation.operation.name} to complete...")
+ response = operation.result()
+ print("Created Connector:", response)
+ except GoogleAPICallError as e:
+ print(f"The operation failed with error: {e}")
+ # [END managedkafka_create_cloud_storage_sink_connector]
diff --git a/managedkafka/snippets/connect/connectors/create_mirrormaker2_source_connector.py b/managedkafka/snippets/connect/connectors/create_mirrormaker2_source_connector.py
new file mode 100644
index 00000000000..2252ac2c2fd
--- /dev/null
+++ b/managedkafka/snippets/connect/connectors/create_mirrormaker2_source_connector.py
@@ -0,0 +1,107 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def create_mirrormaker2_source_connector(
+ project_id: str,
+ region: str,
+ connect_cluster_id: str,
+ connector_id: str,
+ source_bootstrap_servers: str,
+ target_bootstrap_servers: str,
+ tasks_max: str,
+ source_cluster_alias: str,
+ target_cluster_alias: str,
+ topics: str,
+ topics_exclude: str,
+) -> None:
+ """
+ Create a MirrorMaker 2.0 Source connector.
+
+ Args:
+ project_id: Google Cloud project ID.
+ region: Cloud region.
+ connect_cluster_id: ID of the Kafka Connect cluster.
+ connector_id: Name of the connector.
+ source_bootstrap_servers: Source cluster bootstrap servers.
+ target_bootstrap_servers: Target cluster bootstrap servers. This is usually the primary cluster.
+ tasks_max: Controls the level of parallelism for the connector.
+ source_cluster_alias: Alias for the source cluster.
+ target_cluster_alias: Alias for the target cluster.
+ topics: Topics to mirror.
+ topics_exclude: Topics to exclude from mirroring.
+
+ Raises:
+ This method will raise the GoogleAPICallError exception if the operation errors.
+ """
+ # TODO(developer): Update with your config values. Here is a sample configuration:
+ # project_id = "my-project-id"
+ # region = "us-central1"
+ # connect_cluster_id = "my-connect-cluster"
+ # connector_id = "mm2-source-to-target-connector-id"
+ # source_bootstrap_servers = "source_cluster_dns"
+ # target_bootstrap_servers = "target_cluster_dns"
+ # tasks_max = "3"
+ # source_cluster_alias = "source"
+ # target_cluster_alias = "target"
+ # topics = ".*"
+ # topics_exclude = "mm2.*.internal,.*.replica,__.*"
+
+ # [START managedkafka_create_mirrormaker2_source_connector]
+ from google.api_core.exceptions import GoogleAPICallError
+ from google.cloud.managedkafka_v1.services.managed_kafka_connect import (
+ ManagedKafkaConnectClient,
+ )
+ from google.cloud.managedkafka_v1.types import Connector, CreateConnectorRequest
+
+ connect_client = ManagedKafkaConnectClient()
+ parent = connect_client.connect_cluster_path(project_id, region, connect_cluster_id)
+
+ configs = {
+ "connector.class": "org.apache.kafka.connect.mirror.MirrorSourceConnector",
+ "name": connector_id,
+ "tasks.max": tasks_max,
+ "source.cluster.alias": source_cluster_alias,
+ "target.cluster.alias": target_cluster_alias, # This is usually the primary cluster.
+ # Replicate all topics from the source
+ "topics": topics,
+ # The value for bootstrap.servers is a hostname:port pair for the Kafka broker in
+ # the source/target cluster.
+ # For example: "kafka-broker:9092"
+ "source.cluster.bootstrap.servers": source_bootstrap_servers,
+ "target.cluster.bootstrap.servers": target_bootstrap_servers,
+ # You can define an exclusion policy for topics as follows:
+ # To exclude internal MirrorMaker 2 topics, internal topics and replicated topics.
+ "topics.exclude": topics_exclude,
+ }
+
+ connector = Connector()
+ # The name of the connector.
+ connector.name = connector_id
+ connector.configs = configs
+
+ request = CreateConnectorRequest(
+ parent=parent,
+ connector_id=connector_id,
+ connector=connector,
+ )
+
+ try:
+ operation = connect_client.create_connector(request=request)
+ print(f"Waiting for operation {operation.operation.name} to complete...")
+ response = operation.result()
+ print("Created Connector:", response)
+ except GoogleAPICallError as e:
+ print(f"The operation failed with error: {e}")
+ # [END managedkafka_create_mirrormaker2_source_connector]
diff --git a/managedkafka/snippets/connect/connectors/create_pubsub_sink_connector.py b/managedkafka/snippets/connect/connectors/create_pubsub_sink_connector.py
new file mode 100644
index 00000000000..7f455059a84
--- /dev/null
+++ b/managedkafka/snippets/connect/connectors/create_pubsub_sink_connector.py
@@ -0,0 +1,97 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def create_pubsub_sink_connector(
+ project_id: str,
+ region: str,
+ connect_cluster_id: str,
+ connector_id: str,
+ topics: str,
+ value_converter: str,
+ key_converter: str,
+ cps_topic: str,
+ cps_project: str,
+ tasks_max: str,
+) -> None:
+ """
+ Create a Pub/Sub Sink connector.
+
+ Args:
+ project_id: Google Cloud project ID.
+ region: Cloud region.
+ connect_cluster_id: ID of the Kafka Connect cluster.
+ connector_id: Name of the connector.
+ topics: Kafka topics to read from.
+ value_converter: Value converter class.
+ key_converter: Key converter class.
+ cps_topic: Cloud Pub/Sub topic ID.
+ cps_project: Cloud Pub/Sub project ID.
+ tasks_max: Maximum number of tasks.
+
+ Raises:
+ This method will raise the GoogleAPICallError exception if the operation errors or
+ the timeout before the operation completes is reached.
+ """
+ # TODO(developer): Update with your config values. Here is a sample configuration:
+ # project_id = "my-project-id"
+ # region = "us-central1"
+ # connect_cluster_id = "my-connect-cluster"
+ # connector_id = "CPS_SINK_CONNECTOR_ID"
+ # topics = "GMK_TOPIC_ID"
+ # value_converter = "org.apache.kafka.connect.storage.StringConverter"
+ # key_converter = "org.apache.kafka.connect.storage.StringConverter"
+ # cps_topic = "CPS_TOPIC_ID"
+ # cps_project = "GCP_PROJECT_ID"
+ # tasks_max = "3"
+
+ # [START managedkafka_create_pubsub_sink_connector]
+ from google.api_core.exceptions import GoogleAPICallError
+ from google.cloud.managedkafka_v1.services.managed_kafka_connect import (
+ ManagedKafkaConnectClient,
+ )
+ from google.cloud.managedkafka_v1.types import Connector, CreateConnectorRequest
+
+ connect_client = ManagedKafkaConnectClient()
+ parent = connect_client.connect_cluster_path(project_id, region, connect_cluster_id)
+
+ configs = {
+ "connector.class": "com.google.pubsub.kafka.sink.CloudPubSubSinkConnector",
+ "name": connector_id,
+ "tasks.max": tasks_max,
+ "topics": topics,
+ "value.converter": value_converter,
+ "key.converter": key_converter,
+ "cps.topic": cps_topic,
+ "cps.project": cps_project,
+ }
+
+ connector = Connector()
+ connector.name = connector_id
+ connector.configs = configs
+
+ request = CreateConnectorRequest(
+ parent=parent,
+ connector_id=connector_id,
+ connector=connector,
+ )
+
+ try:
+ operation = connect_client.create_connector(request=request)
+ print(f"Waiting for operation {operation.operation.name} to complete...")
+ response = operation.result()
+ print("Created Connector:", response)
+ except GoogleAPICallError as e:
+ print(f"The operation failed with error: {e}")
+ # [END managedkafka_create_pubsub_sink_connector]
diff --git a/managedkafka/snippets/connect/connectors/create_pubsub_source_connector.py b/managedkafka/snippets/connect/connectors/create_pubsub_source_connector.py
new file mode 100644
index 00000000000..19f891fd384
--- /dev/null
+++ b/managedkafka/snippets/connect/connectors/create_pubsub_source_connector.py
@@ -0,0 +1,97 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def create_pubsub_source_connector(
+ project_id: str,
+ region: str,
+ connect_cluster_id: str,
+ connector_id: str,
+ kafka_topic: str,
+ cps_subscription: str,
+ cps_project: str,
+ tasks_max: str,
+ value_converter: str,
+ key_converter: str,
+) -> None:
+ """
+ Create a Pub/Sub Source connector.
+
+ Args:
+ project_id: Google Cloud project ID.
+ region: Cloud region.
+ connect_cluster_id: ID of the Kafka Connect cluster.
+ connector_id: Name of the connector.
+ kafka_topic: Kafka topic to publish to.
+ cps_subscription: Cloud Pub/Sub subscription ID.
+ cps_project: Cloud Pub/Sub project ID.
+ tasks_max: Maximum number of tasks.
+ value_converter: Value converter class.
+ key_converter: Key converter class.
+
+ Raises:
+ This method will raise the GoogleAPICallError exception if the operation errors or
+ the timeout before the operation completes is reached.
+ """
+ # TODO(developer): Update with your config values. Here is a sample configuration:
+ # project_id = "my-project-id"
+ # region = "us-central1"
+ # connect_cluster_id = "my-connect-cluster"
+ # connector_id = "CPS_SOURCE_CONNECTOR_ID"
+ # kafka_topic = "GMK_TOPIC_ID"
+ # cps_subscription = "CPS_SUBSCRIPTION_ID"
+ # cps_project = "GCP_PROJECT_ID"
+ # tasks_max = "3"
+ # value_converter = "org.apache.kafka.connect.converters.ByteArrayConverter"
+ # key_converter = "org.apache.kafka.connect.storage.StringConverter"
+
+ # [START managedkafka_create_pubsub_source_connector]
+ from google.api_core.exceptions import GoogleAPICallError
+ from google.cloud.managedkafka_v1.services.managed_kafka_connect import (
+ ManagedKafkaConnectClient,
+ )
+ from google.cloud.managedkafka_v1.types import Connector, CreateConnectorRequest
+
+ connect_client = ManagedKafkaConnectClient()
+ parent = connect_client.connect_cluster_path(project_id, region, connect_cluster_id)
+
+ configs = {
+ "connector.class": "com.google.pubsub.kafka.source.CloudPubSubSourceConnector",
+ "name": connector_id,
+ "tasks.max": tasks_max,
+ "kafka.topic": kafka_topic,
+ "cps.subscription": cps_subscription,
+ "cps.project": cps_project,
+ "value.converter": value_converter,
+ "key.converter": key_converter,
+ }
+
+ connector = Connector()
+ connector.name = connector_id
+ connector.configs = configs
+
+ request = CreateConnectorRequest(
+ parent=parent,
+ connector_id=connector_id,
+ connector=connector,
+ )
+
+ try:
+ operation = connect_client.create_connector(request=request)
+ print(f"Waiting for operation {operation.operation.name} to complete...")
+ response = operation.result()
+ print("Created Connector:", response)
+ except GoogleAPICallError as e:
+ print(f"The operation failed with error: {e}")
+ # [END managedkafka_create_pubsub_source_connector]
diff --git a/managedkafka/snippets/connect/connectors/delete_connector.py b/managedkafka/snippets/connect/connectors/delete_connector.py
new file mode 100644
index 00000000000..84ee0e3ecff
--- /dev/null
+++ b/managedkafka/snippets/connect/connectors/delete_connector.py
@@ -0,0 +1,61 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def delete_connector(
+ project_id: str,
+ region: str,
+ connect_cluster_id: str,
+ connector_id: str,
+) -> None:
+ """
+ Delete a connector.
+
+ Args:
+ project_id: Google Cloud project ID.
+ region: Cloud region.
+ connect_cluster_id: ID of the Kafka Connect cluster.
+ connector_id: ID of the connector.
+
+ Raises:
+ This method will raise the GoogleAPICallError exception if the operation errors.
+ """
+ # [START managedkafka_delete_connector]
+ from google.api_core.exceptions import GoogleAPICallError
+ from google.cloud.managedkafka_v1.services.managed_kafka_connect import (
+ ManagedKafkaConnectClient,
+ )
+ from google.cloud import managedkafka_v1
+
+ # TODO(developer)
+ # project_id = "my-project-id"
+ # region = "us-central1"
+ # connect_cluster_id = "my-connect-cluster"
+ # connector_id = "my-connector"
+
+ connect_client = ManagedKafkaConnectClient()
+
+ request = managedkafka_v1.DeleteConnectorRequest(
+ name=connect_client.connector_path(project_id, region, connect_cluster_id, connector_id),
+ )
+
+ try:
+ operation = connect_client.delete_connector(request=request)
+ print(f"Waiting for operation {operation.operation.name} to complete...")
+ operation.result()
+ print("Deleted connector")
+ except GoogleAPICallError as e:
+ print(f"The operation failed with error: {e}")
+
+ # [END managedkafka_delete_connector]
diff --git a/managedkafka/snippets/connect/connectors/get_connector.py b/managedkafka/snippets/connect/connectors/get_connector.py
new file mode 100644
index 00000000000..a3477ef4c70
--- /dev/null
+++ b/managedkafka/snippets/connect/connectors/get_connector.py
@@ -0,0 +1,60 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def get_connector(
+ project_id: str,
+ region: str,
+ connect_cluster_id: str,
+ connector_id: str,
+) -> None:
+ """
+ Get details of a specific connector.
+
+ Args:
+ project_id: Google Cloud project ID.
+ region: Cloud region.
+ connect_cluster_id: ID of the Kafka Connect cluster.
+ connector_id: ID of the connector.
+
+ Raises:
+ This method will raise the NotFound exception if the connector is not found.
+ """
+ # [START managedkafka_get_connector]
+ from google.api_core.exceptions import NotFound
+ from google.cloud.managedkafka_v1.services.managed_kafka_connect import ManagedKafkaConnectClient
+ from google.cloud import managedkafka_v1
+
+ # TODO(developer)
+ # project_id = "my-project-id"
+ # region = "us-central1"
+ # connect_cluster_id = "my-connect-cluster"
+ # connector_id = "my-connector"
+
+ connect_client = ManagedKafkaConnectClient()
+
+ connector_path = connect_client.connector_path(
+ project_id, region, connect_cluster_id, connector_id
+ )
+ request = managedkafka_v1.GetConnectorRequest(
+ name=connector_path,
+ )
+
+ try:
+ connector = connect_client.get_connector(request=request)
+ print("Got connector:", connector)
+ except NotFound as e:
+ print(f"Failed to get connector {connector_id} with error: {e}")
+
+ # [END managedkafka_get_connector]
diff --git a/managedkafka/snippets/connect/connectors/list_connectors.py b/managedkafka/snippets/connect/connectors/list_connectors.py
new file mode 100644
index 00000000000..f707df09454
--- /dev/null
+++ b/managedkafka/snippets/connect/connectors/list_connectors.py
@@ -0,0 +1,54 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def list_connectors(
+ project_id: str,
+ region: str,
+ connect_cluster_id: str,
+) -> None:
+ """
+ List all connectors in a Kafka Connect cluster.
+
+ Args:
+ project_id: Google Cloud project ID.
+ region: Cloud region.
+ connect_cluster_id: ID of the Kafka Connect cluster.
+ """
+ # [START managedkafka_list_connectors]
+ from google.cloud import managedkafka_v1
+ from google.cloud.managedkafka_v1.services.managed_kafka_connect import (
+ ManagedKafkaConnectClient,
+ )
+ from google.api_core.exceptions import GoogleAPICallError
+
+ # TODO(developer)
+ # project_id = "my-project-id"
+ # region = "us-central1"
+ # connect_cluster_id = "my-connect-cluster"
+
+ connect_client = ManagedKafkaConnectClient()
+
+ request = managedkafka_v1.ListConnectorsRequest(
+ parent=connect_client.connect_cluster_path(project_id, region, connect_cluster_id),
+ )
+
+ try:
+ response = connect_client.list_connectors(request=request)
+ for connector in response:
+ print("Got connector:", connector)
+ except GoogleAPICallError as e:
+ print(f"Failed to list connectors with error: {e}")
+
+ # [END managedkafka_list_connectors]
diff --git a/managedkafka/snippets/connect/connectors/pause_connector.py b/managedkafka/snippets/connect/connectors/pause_connector.py
new file mode 100644
index 00000000000..35f184c2443
--- /dev/null
+++ b/managedkafka/snippets/connect/connectors/pause_connector.py
@@ -0,0 +1,61 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def pause_connector(
+ project_id: str,
+ region: str,
+ connect_cluster_id: str,
+ connector_id: str,
+) -> None:
+ """
+ Pause a connector.
+
+ Args:
+ project_id: Google Cloud project ID.
+ region: Cloud region.
+ connect_cluster_id: ID of the Kafka Connect cluster.
+ connector_id: ID of the connector.
+
+ Raises:
+ This method will raise the GoogleAPICallError exception if the operation errors.
+ """
+ # [START managedkafka_pause_connector]
+ from google.api_core.exceptions import GoogleAPICallError
+ from google.cloud.managedkafka_v1.services.managed_kafka_connect import (
+ ManagedKafkaConnectClient,
+ )
+ from google.cloud import managedkafka_v1
+
+ # TODO(developer)
+ # project_id = "my-project-id"
+ # region = "us-central1"
+ # connect_cluster_id = "my-connect-cluster"
+ # connector_id = "my-connector"
+
+ connect_client = ManagedKafkaConnectClient()
+
+ request = managedkafka_v1.PauseConnectorRequest(
+ name=connect_client.connector_path(project_id, region, connect_cluster_id, connector_id),
+ )
+
+ try:
+ operation = connect_client.pause_connector(request=request)
+ print(f"Waiting for operation {operation.operation.name} to complete...")
+ operation.result()
+ print(f"Paused connector {connector_id}")
+ except GoogleAPICallError as e:
+ print(f"Failed to pause connector {connector_id} with error: {e}")
+
+ # [END managedkafka_pause_connector]
diff --git a/managedkafka/snippets/connect/connectors/restart_connector.py b/managedkafka/snippets/connect/connectors/restart_connector.py
new file mode 100644
index 00000000000..72714de7aa1
--- /dev/null
+++ b/managedkafka/snippets/connect/connectors/restart_connector.py
@@ -0,0 +1,63 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def restart_connector(
+ project_id: str,
+ region: str,
+ connect_cluster_id: str,
+ connector_id: str,
+) -> None:
+ """
+ Restart a connector.
+ Note: This operation is used to restart a failed connector. To start
+ a stopped connector, use the `resume_connector` operation instead.
+
+ Args:
+ project_id: Google Cloud project ID.
+ region: Cloud region.
+ connect_cluster_id: ID of the Kafka Connect cluster.
+ connector_id: ID of the connector.
+
+ Raises:
+ This method will raise the GoogleAPICallError exception if the operation errors.
+ """
+ # [START managedkafka_restart_connector]
+ from google.api_core.exceptions import GoogleAPICallError
+ from google.cloud.managedkafka_v1.services.managed_kafka_connect import (
+ ManagedKafkaConnectClient,
+ )
+ from google.cloud import managedkafka_v1
+
+ # TODO(developer)
+ # project_id = "my-project-id"
+ # region = "us-central1"
+ # connect_cluster_id = "my-connect-cluster"
+ # connector_id = "my-connector"
+
+ connect_client = ManagedKafkaConnectClient()
+
+ request = managedkafka_v1.RestartConnectorRequest(
+ name=connect_client.connector_path(project_id, region, connect_cluster_id, connector_id),
+ )
+
+ try:
+ operation = connect_client.restart_connector(request=request)
+ print(f"Waiting for operation {operation.operation.name} to complete...")
+ operation.result()
+ print(f"Restarted connector {connector_id}")
+ except GoogleAPICallError as e:
+ print(f"Failed to restart connector {connector_id} with error: {e}")
+
+ # [END managedkafka_restart_connector]
diff --git a/managedkafka/snippets/connect/connectors/resume_connector.py b/managedkafka/snippets/connect/connectors/resume_connector.py
new file mode 100644
index 00000000000..3787368ef1e
--- /dev/null
+++ b/managedkafka/snippets/connect/connectors/resume_connector.py
@@ -0,0 +1,61 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def resume_connector(
+ project_id: str,
+ region: str,
+ connect_cluster_id: str,
+ connector_id: str,
+) -> None:
+ """
+ Resume a paused connector.
+
+ Args:
+ project_id: Google Cloud project ID.
+ region: Cloud region.
+ connect_cluster_id: ID of the Kafka Connect cluster.
+ connector_id: ID of the connector.
+
+ Raises:
+ This method will raise the GoogleAPICallError exception if the operation errors.
+ """
+ # [START managedkafka_resume_connector]
+ from google.api_core.exceptions import GoogleAPICallError
+ from google.cloud.managedkafka_v1.services.managed_kafka_connect import (
+ ManagedKafkaConnectClient,
+ )
+ from google.cloud import managedkafka_v1
+
+ # TODO(developer)
+ # project_id = "my-project-id"
+ # region = "us-central1"
+ # connect_cluster_id = "my-connect-cluster"
+ # connector_id = "my-connector"
+
+ connect_client = ManagedKafkaConnectClient()
+
+ request = managedkafka_v1.ResumeConnectorRequest(
+ name=connect_client.connector_path(project_id, region, connect_cluster_id, connector_id),
+ )
+
+ try:
+ operation = connect_client.resume_connector(request=request)
+ print(f"Waiting for operation {operation.operation.name} to complete...")
+ operation.result()
+ print(f"Resumed connector {connector_id}")
+ except GoogleAPICallError as e:
+ print(f"Failed to resume connector {connector_id} with error: {e}")
+
+ # [END managedkafka_resume_connector]
diff --git a/managedkafka/snippets/connect/connectors/stop_connector.py b/managedkafka/snippets/connect/connectors/stop_connector.py
new file mode 100644
index 00000000000..cd3767075bc
--- /dev/null
+++ b/managedkafka/snippets/connect/connectors/stop_connector.py
@@ -0,0 +1,61 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def stop_connector(
+ project_id: str,
+ region: str,
+ connect_cluster_id: str,
+ connector_id: str,
+) -> None:
+ """
+ Stop a connector.
+
+ Args:
+ project_id: Google Cloud project ID.
+ region: Cloud region.
+ connect_cluster_id: ID of the Kafka Connect cluster.
+ connector_id: ID of the connector.
+
+ Raises:
+ This method will raise the GoogleAPICallError exception if the operation errors.
+ """
+ # [START managedkafka_stop_connector]
+ from google.api_core.exceptions import GoogleAPICallError
+ from google.cloud.managedkafka_v1.services.managed_kafka_connect import (
+ ManagedKafkaConnectClient,
+ )
+ from google.cloud import managedkafka_v1
+
+ # TODO(developer)
+ # project_id = "my-project-id"
+ # region = "us-central1"
+ # connect_cluster_id = "my-connect-cluster"
+ # connector_id = "my-connector"
+
+ connect_client = ManagedKafkaConnectClient()
+
+ request = managedkafka_v1.StopConnectorRequest(
+ name=connect_client.connector_path(project_id, region, connect_cluster_id, connector_id),
+ )
+
+ try:
+ operation = connect_client.stop_connector(request=request)
+ print(f"Waiting for operation {operation.operation.name} to complete...")
+ operation.result()
+ print(f"Stopped connector {connector_id}")
+ except GoogleAPICallError as e:
+ print(f"Failed to stop connector {connector_id} with error: {e}")
+
+ # [END managedkafka_stop_connector]
diff --git a/managedkafka/snippets/connect/connectors/update_connector.py b/managedkafka/snippets/connect/connectors/update_connector.py
new file mode 100644
index 00000000000..b0357079cd9
--- /dev/null
+++ b/managedkafka/snippets/connect/connectors/update_connector.py
@@ -0,0 +1,79 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def update_connector(
+ project_id: str,
+ region: str,
+ connect_cluster_id: str,
+ connector_id: str,
+ configs: dict,
+) -> None:
+ """
+ Update a connector's configuration.
+
+ Args:
+ project_id: Google Cloud project ID.
+ region: Cloud region.
+ connect_cluster_id: ID of the Kafka Connect cluster.
+ connector_id: ID of the connector.
+ configs: Dictionary containing the updated configuration.
+
+ Raises:
+ This method will raise the GoogleAPICallError exception if the operation errors.
+ """
+ # [START managedkafka_update_connector]
+ from google.api_core.exceptions import GoogleAPICallError
+ from google.cloud import managedkafka_v1
+ from google.cloud.managedkafka_v1.services.managed_kafka_connect import (
+ ManagedKafkaConnectClient,
+ )
+ from google.cloud.managedkafka_v1.types import Connector
+ from google.protobuf import field_mask_pb2
+
+ # TODO(developer)
+ # project_id = "my-project-id"
+ # region = "us-central1"
+ # connect_cluster_id = "my-connect-cluster"
+ # connector_id = "my-connector"
+ # configs = {
+ # "tasks.max": "6",
+ # "value.converter.schemas.enable": "true"
+ # }
+
+ connect_client = ManagedKafkaConnectClient()
+
+ connector = Connector()
+ connector.name = connect_client.connector_path(
+ project_id, region, connect_cluster_id, connector_id
+ )
+ connector.configs = configs
+ update_mask = field_mask_pb2.FieldMask()
+ update_mask.paths.append("config")
+
+ # For a list of editable fields, one can check https://cloud.google.com/managed-service-for-apache-kafka/docs/connect-cluster/update-connector#editable-properties.
+ request = managedkafka_v1.UpdateConnectorRequest(
+ update_mask=update_mask,
+ connector=connector,
+ )
+
+ try:
+ operation = connect_client.update_connector(request=request)
+ print(f"Waiting for operation {operation.operation.name} to complete...")
+ response = operation.result()
+ print("Updated connector:", response)
+ except GoogleAPICallError as e:
+ print(f"The operation failed with error: {e}")
+
+ # [END managedkafka_update_connector]
diff --git a/managedkafka/snippets/requirements.txt b/managedkafka/snippets/requirements.txt
index a7da4ff6516..5f372e81c41 100644
--- a/managedkafka/snippets/requirements.txt
+++ b/managedkafka/snippets/requirements.txt
@@ -2,5 +2,5 @@ protobuf==5.29.4
pytest==8.2.2
google-api-core==2.23.0
google-auth==2.38.0
-google-cloud-managedkafka==0.1.5
+google-cloud-managedkafka==0.1.12
googleapis-common-protos==1.66.0
diff --git a/media-translation/snippets/requirements.txt b/media-translation/snippets/requirements.txt
index 5fa8162b556..622d9aa3082 100644
--- a/media-translation/snippets/requirements.txt
+++ b/media-translation/snippets/requirements.txt
@@ -1,3 +1,3 @@
-google-cloud-media-translation==0.11.16
+google-cloud-media-translation==0.11.17
pyaudio==0.2.14
six==1.16.0
diff --git a/model_armor/snippets/requirements.txt b/model_armor/snippets/requirements.txt
index a2d49b77ba7..0b64c19841b 100644
--- a/model_armor/snippets/requirements.txt
+++ b/model_armor/snippets/requirements.txt
@@ -1,2 +1,2 @@
-google-cloud-modelarmor==0.2.5
+google-cloud-modelarmor==0.2.8
google-cloud-dlp==3.30.0
\ No newline at end of file
diff --git a/model_armor/snippets/snippets_test.py b/model_armor/snippets/snippets_test.py
index bbb55bcb47c..e4f1935d035 100644
--- a/model_armor/snippets/snippets_test.py
+++ b/model_armor/snippets/snippets_test.py
@@ -35,9 +35,10 @@
create_model_armor_template_with_metadata,
)
from delete_template import delete_model_armor_template
-# from get_folder_floor_settings import get_folder_floor_settings
-# from get_organization_floor_settings import get_organization_floor_settings
-# from get_project_floor_settings import get_project_floor_settings
+
+from get_folder_floor_settings import get_folder_floor_settings
+from get_organization_floor_settings import get_organization_floor_settings
+from get_project_floor_settings import get_project_floor_settings
from get_template import get_model_armor_template
from list_templates import list_model_armor_templates
from list_templates_with_filter import list_model_armor_templates_with_filter
@@ -48,11 +49,12 @@
)
from sanitize_user_prompt import sanitize_user_prompt
from screen_pdf_file import screen_pdf_file
-# from update_folder_floor_settings import update_folder_floor_settings
-# from update_organizations_floor_settings import (
-# update_organization_floor_settings,
-# )
-# from update_project_floor_settings import update_project_floor_settings
+
+from update_folder_floor_settings import update_folder_floor_settings
+from update_organizations_floor_settings import (
+ update_organization_floor_settings,
+)
+from update_project_floor_settings import update_project_floor_settings
from update_template import update_model_armor_template
from update_template_labels import update_model_armor_template_labels
from update_template_metadata import update_model_armor_template_metadata
@@ -1166,50 +1168,48 @@ def test_quickstart(
quickstart(project_id, location_id, template_id)
-# TODO: Uncomment below tests once floor setting API issues are resolved.
-
-# def test_update_organization_floor_settings(
-# floor_setting_organization_id: str,
-# ) -> None:
-# response = update_organization_floor_settings(floor_setting_organization_id)
+def test_update_organization_floor_settings(
+ floor_setting_organization_id: str,
+) -> None:
+ response = update_organization_floor_settings(floor_setting_organization_id)
-# assert response.enable_floor_setting_enforcement
+ assert response.enable_floor_setting_enforcement
-# def test_update_folder_floor_settings(floor_setting_folder_id: str) -> None:
-# response = update_folder_floor_settings(floor_setting_folder_id)
+def test_update_folder_floor_settings(floor_setting_folder_id: str) -> None:
+ response = update_folder_floor_settings(floor_setting_folder_id)
-# assert response.enable_floor_setting_enforcement
+ assert response.enable_floor_setting_enforcement
-# def test_update_project_floor_settings(floor_settings_project_id: str) -> None:
-# response = update_project_floor_settings(floor_settings_project_id)
+def test_update_project_floor_settings(floor_settings_project_id: str) -> None:
+ response = update_project_floor_settings(floor_settings_project_id)
-# assert response.enable_floor_setting_enforcement
+ assert response.enable_floor_setting_enforcement
-# def test_get_organization_floor_settings(organization_id: str) -> None:
-# expected_floor_settings_name = (
-# f"organizations/{organization_id}/locations/global/floorSetting"
-# )
-# response = get_organization_floor_settings(organization_id)
+def test_get_organization_floor_settings(organization_id: str) -> None:
+ expected_floor_settings_name = (
+ f"organizations/{organization_id}/locations/global/floorSetting"
+ )
+ response = get_organization_floor_settings(organization_id)
-# assert response.name == expected_floor_settings_name
+ assert response.name == expected_floor_settings_name
-# def test_get_folder_floor_settings(folder_id: str) -> None:
-# expected_floor_settings_name = (
-# f"folders/{folder_id}/locations/global/floorSetting"
-# )
-# response = get_folder_floor_settings(folder_id)
+def test_get_folder_floor_settings(folder_id: str) -> None:
+ expected_floor_settings_name = (
+ f"folders/{folder_id}/locations/global/floorSetting"
+ )
+ response = get_folder_floor_settings(folder_id)
-# assert response.name == expected_floor_settings_name
+ assert response.name == expected_floor_settings_name
-# def test_get_project_floor_settings(project_id: str) -> None:
-# expected_floor_settings_name = (
-# f"projects/{project_id}/locations/global/floorSetting"
-# )
-# response = get_project_floor_settings(project_id)
+def test_get_project_floor_settings(project_id: str) -> None:
+ expected_floor_settings_name = (
+ f"projects/{project_id}/locations/global/floorSetting"
+ )
+ response = get_project_floor_settings(project_id)
-# assert response.name == expected_floor_settings_name
+ assert response.name == expected_floor_settings_name
diff --git a/model_garden/anthropic/anthropic_batchpredict_with_bq.py b/model_garden/anthropic/anthropic_batchpredict_with_bq.py
index 1823eb8c266..1e9ecdf0940 100644
--- a/model_garden/anthropic/anthropic_batchpredict_with_bq.py
+++ b/model_garden/anthropic/anthropic_batchpredict_with_bq.py
@@ -26,7 +26,7 @@ def generate_content(output_uri: str) -> str:
# output_uri = f"bq://your-project.your_dataset.your_table"
job = client.batches.create(
- # Check Anthropic Claude region availability in https://cloud.devsite.corp.google.com/vertex-ai/generative-ai/docs/partner-models/use-claude#regions
+ # Check Anthropic Claude region availability in https://cloud.google.com/vertex-ai/generative-ai/docs/partner-models/use-claude#regions
# More about Anthropic model: https://console.cloud.google.com/vertex-ai/publishers/anthropic/model-garden/claude-3-5-haiku
model="publishers/anthropic/models/claude-3-5-haiku",
# The source dataset needs to be created specifically in us-east5
diff --git a/model_garden/gemma/gemma3_deploy.py b/model_garden/gemma/gemma3_deploy.py
index 3c739ebf02f..ddf705a1a3c 100644
--- a/model_garden/gemma/gemma3_deploy.py
+++ b/model_garden/gemma/gemma3_deploy.py
@@ -26,7 +26,7 @@ def deploy() -> aiplatform.Endpoint:
# [START aiplatform_modelgarden_gemma3_deploy]
import vertexai
- from vertexai.preview import model_garden
+ from vertexai import model_garden
# TODO(developer): Update and un-comment below lines
# PROJECT_ID = "your-project-id"
diff --git a/model_garden/gemma/models_deploy_options_list.py b/model_garden/gemma/models_deploy_options_list.py
index 67457315d1b..4edfd2fd8b5 100644
--- a/model_garden/gemma/models_deploy_options_list.py
+++ b/model_garden/gemma/models_deploy_options_list.py
@@ -28,7 +28,7 @@ def list_deploy_options(model : str) -> List[types.PublisherModel.CallToAction.D
# [START aiplatform_modelgarden_models_deployables_options_list]
import vertexai
- from vertexai.preview import model_garden
+ from vertexai import model_garden
# TODO(developer): Update and un-comment below lines
# PROJECT_ID = "your-project-id"
diff --git a/model_garden/gemma/models_deployable_list.py b/model_garden/gemma/models_deployable_list.py
index 689d707a6f4..7cf49e1e381 100644
--- a/model_garden/gemma/models_deployable_list.py
+++ b/model_garden/gemma/models_deployable_list.py
@@ -26,7 +26,7 @@ def list_deployable_models() -> List[str]:
# [START aiplatform_modelgarden_models_deployables_list]
import vertexai
- from vertexai.preview import model_garden
+ from vertexai import model_garden
# TODO(developer): Update and un-comment below lines
# PROJECT_ID = "your-project-id"
diff --git a/model_garden/gemma/requirements.txt b/model_garden/gemma/requirements.txt
index 2ee56ff693b..eba13fe9012 100644
--- a/model_garden/gemma/requirements.txt
+++ b/model_garden/gemma/requirements.txt
@@ -1 +1 @@
-google-cloud-aiplatform[all]==1.84.0
+google-cloud-aiplatform[all]==1.103.0
diff --git a/model_garden/gemma/test_model_garden_examples.py b/model_garden/gemma/test_model_garden_examples.py
index 6dda9bae3c0..4205ae39c08 100644
--- a/model_garden/gemma/test_model_garden_examples.py
+++ b/model_garden/gemma/test_model_garden_examples.py
@@ -34,7 +34,7 @@ def test_list_deploy_options() -> None:
assert len(deploy_options) > 0
-@patch("vertexai.preview.model_garden.OpenModel")
+@patch("vertexai.model_garden.OpenModel")
def test_gemma3_deploy(mock_open_model: MagicMock) -> None:
# Mock the deploy response.
mock_endpoint = aiplatform.Endpoint(endpoint_name="test-endpoint-name")
diff --git a/noxfile-template.py b/noxfile-template.py
index 2763a10bad3..09bd81c1b77 100644
--- a/noxfile-template.py
+++ b/noxfile-template.py
@@ -88,7 +88,7 @@ def get_pytest_env_vars() -> dict[str, str]:
# All versions used to tested samples.
-ALL_VERSIONS = ["2.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"]
+ALL_VERSIONS = ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "3.14"]
# Any default versions that should be ignored.
IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"]
@@ -97,6 +97,11 @@ def get_pytest_env_vars() -> dict[str, str]:
INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False))
+# Use the oldest tested Python version for linting (defaults to 3.10)
+LINTING_VERSION = "3.10"
+if len(TESTED_VERSIONS) > 0:
+ LINTING_VERSION = TESTED_VERSIONS[0]
+
# Error if a python version is missing
nox.options.error_on_missing_interpreters = True
@@ -146,7 +151,7 @@ def _determine_local_import_names(start_dir: str) -> list[str]:
]
-@nox.session
+@nox.session(python=LINTING_VERSION)
def lint(session: nox.sessions.Session) -> None:
if not TEST_CONFIG["enforce_type_hints"]:
session.install("flake8", "flake8-import-order")
@@ -167,7 +172,7 @@ def lint(session: nox.sessions.Session) -> None:
#
-@nox.session
+@nox.session(python=LINTING_VERSION)
def blacken(session: nox.sessions.Session) -> None:
session.install("black")
python_files = [path for path in os.listdir(".") if path.endswith(".py")]
diff --git a/parametermanager/snippets/create_param_version_with_secret.py b/parametermanager/snippets/create_param_version_with_secret.py
index 58190441b0c..b986a76f066 100644
--- a/parametermanager/snippets/create_param_version_with_secret.py
+++ b/parametermanager/snippets/create_param_version_with_secret.py
@@ -46,7 +46,7 @@ def create_param_version_with_secret(
"my-project",
"my-global-parameter",
"v1",
- "projects/my-project/secrets/application-secret/version/latest"
+ "projects/my-project/secrets/application-secret/versions/latest"
)
"""
# Import the necessary library for Google Cloud Parameter Manager.
diff --git a/parametermanager/snippets/regional_samples/create_regional_param_version_with_secret.py b/parametermanager/snippets/regional_samples/create_regional_param_version_with_secret.py
index 966b7e39345..2b350201241 100644
--- a/parametermanager/snippets/regional_samples/create_regional_param_version_with_secret.py
+++ b/parametermanager/snippets/regional_samples/create_regional_param_version_with_secret.py
@@ -52,8 +52,7 @@ def create_regional_param_version_with_secret(
"us-central1",
"my-regional-parameter",
"v1",
- "projects/my-project/locations/us-central1/
- secrets/application-secret/version/latest"
+ "projects/my-project/locations/us-central1/secrets/application-secret/versions/latest"
)
"""
# Import the necessary library for Google Cloud Parameter Manager.
diff --git a/parametermanager/snippets/regional_samples/remove_regional_param_kms_key.py b/parametermanager/snippets/regional_samples/remove_regional_param_kms_key.py
index 486a8e68204..7022e34820c 100644
--- a/parametermanager/snippets/regional_samples/remove_regional_param_kms_key.py
+++ b/parametermanager/snippets/regional_samples/remove_regional_param_kms_key.py
@@ -41,7 +41,7 @@ def remove_regional_param_kms_key(
remove_regional_param_kms_key(
"my-project",
"us-central1",
- "my-global-parameter"
+ "my-regional-parameter"
)
"""
# Import the necessary library for Google Cloud Parameter Manager.
diff --git a/parametermanager/snippets/regional_samples/update_regional_param_kms_key.py b/parametermanager/snippets/regional_samples/update_regional_param_kms_key.py
index 704614acf3d..bf2ec86107a 100644
--- a/parametermanager/snippets/regional_samples/update_regional_param_kms_key.py
+++ b/parametermanager/snippets/regional_samples/update_regional_param_kms_key.py
@@ -42,7 +42,7 @@ def update_regional_param_kms_key(
update_regional_param_kms_key(
"my-project",
"us-central1",
- "my-global-parameter",
+ "my-regional-parameter",
"projects/my-project/locations/us-central1/keyRings/test/cryptoKeys/updated-test-key"
)
"""
diff --git a/parametermanager/snippets/requirements.txt b/parametermanager/snippets/requirements.txt
index 012571b208f..0919a6ec653 100644
--- a/parametermanager/snippets/requirements.txt
+++ b/parametermanager/snippets/requirements.txt
@@ -1 +1 @@
-google-cloud-parametermanager==0.1.3
+google-cloud-parametermanager==0.1.5
diff --git a/people-and-planet-ai/conftest.py b/people-and-planet-ai/conftest.py
index fed54feb9b9..1bf49d26a00 100644
--- a/people-and-planet-ai/conftest.py
+++ b/people-and-planet-ai/conftest.py
@@ -84,7 +84,7 @@ def bucket_name(test_name: str, location: str, unique_id: str) -> Iterable[str]:
# Try to remove all files before deleting the bucket.
# Deleting a bucket with too many files results in an error.
try:
- run_cmd("gsutil", "-m", "rm", "-rf", f"gs://{bucket_name}/*")
+ run_cmd("gcloud", "storage", "rm", "--recursive", f"gs://{bucket_name}/**")
except RuntimeError:
# If no files were found and it fails, ignore the error.
pass
diff --git a/people-and-planet-ai/weather-forecasting/notebooks/3-training.ipynb b/people-and-planet-ai/weather-forecasting/notebooks/3-training.ipynb
index f0656c1208c..ab637613a91 100644
--- a/people-and-planet-ai/weather-forecasting/notebooks/3-training.ipynb
+++ b/people-and-planet-ai/weather-forecasting/notebooks/3-training.ipynb
@@ -1381,7 +1381,7 @@
" display_name=\"weather-forecasting\",\n",
" python_package_gcs_uri=f\"gs://{bucket}/weather/weather-model-1.0.0.tar.gz\",\n",
" python_module_name=\"weather.trainer\",\n",
- " container_uri=\"us-docker.pkg.dev/vertex-ai/training/pytorch-gpu.2-4.py310:latest\",\n",
+ " container_uri=\"us-docker.pkg.dev/vertex-ai/training/pytorch-gpu.2-8.py310:latest\",\n",
")\n",
"job.run(\n",
" machine_type=\"n1-highmem-8\",\n",
diff --git a/people-and-planet-ai/weather-forecasting/serving/weather-model/pyproject.toml b/people-and-planet-ai/weather-forecasting/serving/weather-model/pyproject.toml
index e5b3a98ffbe..43c03683ccd 100644
--- a/people-and-planet-ai/weather-forecasting/serving/weather-model/pyproject.toml
+++ b/people-and-planet-ai/weather-forecasting/serving/weather-model/pyproject.toml
@@ -17,9 +17,9 @@
name = "weather-model"
version = "1.0.0"
dependencies = [
- "datasets==3.0.1",
- "torch==2.4.0", # make sure this matches the `container_uri` in `notebooks/3-training.ipynb`
- "transformers==4.48.0",
+ "datasets==4.0.0",
+ "torch==2.8.0", # make sure this matches the `container_uri` in `notebooks/3-training.ipynb`
+ "transformers==5.0.0",
]
[project.scripts]
diff --git a/pubsublite/spark-connector/README.md b/pubsublite/spark-connector/README.md
index dc800440166..cdef86589f7 100644
--- a/pubsublite/spark-connector/README.md
+++ b/pubsublite/spark-connector/README.md
@@ -54,7 +54,7 @@ Get the connector's uber jar from this [public Cloud Storage location]. Alternat
```bash
export BUCKET_ID=your-gcs-bucket-id
- gsutil mb gs://$BUCKET_ID
+ gcloud storage buckets create gs://$BUCKET_ID
```
## Python setup
@@ -193,7 +193,7 @@ Here is an example output: