diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 301fb0204..1af9bb43e 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -42,20 +42,20 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@e296a935590eb16afc0c0108289f68c87e2a89a5 # v4.30.7 + uses: github/codeql-action/init@cf1bb45a277cb3c205638b2cd5c984db1c46a412 # v4.31.7 with: languages: ${{ matrix.language }} # Autobuild attempts to build any compiled languages (C/C++, C#, Go, or Java). # If this step fails, then you should remove it and run the build manually - name: Autobuild - uses: github/codeql-action/autobuild@e296a935590eb16afc0c0108289f68c87e2a89a5 # v4.30.7 + uses: github/codeql-action/autobuild@cf1bb45a277cb3c205638b2cd5c984db1c46a412 # v4.31.7 - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@e296a935590eb16afc0c0108289f68c87e2a89a5 # v4.30.7 + uses: github/codeql-action/analyze@cf1bb45a277cb3c205638b2cd5c984db1c46a412 # v4.31.7 with: category: "/language:${{matrix.language}}" diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index df109d873..e809dbd81 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -24,34 +24,34 @@ jobs: runs-on: ubuntu-latest steps: - name: Setup Python - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 + uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 with: - python-version: "3.13" + python-version: "3.14" - run: pip install nox coverage - name: Checkout base branch - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: ref: ${{ github.base_ref }} - name: Calculate base code coverage run: | - nox --sessions unit-3.13 + nox --sessions unit-3.14 coverage report --show-missing export CUR_COVER=$(coverage report | awk '$1 == "TOTAL" {print $NF+0}') echo "CUR_COVER=$CUR_COVER" >> $GITHUB_ENV coverage erase - name: Checkout PR branch - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: ref: ${{ github.event.pull_request.head.sha }} repository: ${{ github.event.pull_request.head.repo.full_name }} - name: Calculate PR code coverage run: | - nox --sessions unit-3.13 + nox --sessions unit-3.14 coverage report --show-missing export PR_COVER=$(coverage report | awk '$1 == "TOTAL" {print $NF+0}') echo "PR_COVER=$PR_COVER" >> $GITHUB_ENV diff --git a/.github/workflows/labels.yaml b/.github/workflows/labels.yaml index 24eefa436..fb5656673 100644 --- a/.github/workflows/labels.yaml +++ b/.github/workflows/labels.yaml @@ -28,7 +28,7 @@ jobs: issues: write pull-requests: write steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 - uses: micnncim/action-label-syncer@3abd5ab72fda571e69fffd97bd4e0033dd5f495c # v1.3.0 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index f82d503a0..b685b8731 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -25,15 +25,15 @@ jobs: runs-on: ubuntu-latest steps: - name: Setup Python - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 + uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 with: - python-version: "3.13" + python-version: "3.14" - name: Install nox run: pip install nox - name: Checkout code - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 - name: Run nox lint session run: nox --sessions lint diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index 3e8c8687f..f28e246c2 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -35,12 +35,12 @@ jobs: steps: - name: "Checkout code" - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false - name: "Run analysis" - uses: ossf/scorecard-action@05b42c624433fc40578a4040d5cf5e36ddca8cde # v2.4.2 + uses: ossf/scorecard-action@4eaacf0543bb3f2c246792bd56e8cdeffafb205a # v2.4.3 with: results_file: results.sarif results_format: sarif @@ -57,7 +57,7 @@ jobs: # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF # format to the repository Actions tab. - name: "Upload artifact" - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 with: name: SARIF file path: results.sarif @@ -65,6 +65,6 @@ jobs: # Upload the results to GitHub's code scanning dashboard. - name: "Upload to code-scanning" - uses: github/codeql-action/upload-sarif@e296a935590eb16afc0c0108289f68c87e2a89a5 # v4.30.7 + uses: github/codeql-action/upload-sarif@cf1bb45a277cb3c205638b2cd5c984db1c46a412 # v4.31.7 with: sarif_file: resultsFiltered.sarif diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index b600742bf..feb74526e 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -35,7 +35,7 @@ jobs: strategy: matrix: os: [macos-latest, windows-latest, ubuntu-latest] - python-version: ["3.9", "3.13"] + python-version: ["3.10", "3.14"] fail-fast: false permissions: contents: read @@ -44,10 +44,10 @@ jobs: pull-requests: write steps: - name: Checkout code - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 - name: Setup Python ${{ matrix.python-version }} - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 + uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 with: python-version: ${{ matrix.python-version }} @@ -144,7 +144,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.9", "3.13"] + python-version: ["3.10", "3.14"] fail-fast: false permissions: contents: read @@ -153,10 +153,10 @@ jobs: pull-requests: write steps: - name: Checkout code - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 - name: Setup Python ${{ matrix.python-version }} - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 + uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 with: python-version: ${{ matrix.python-version }} diff --git a/CHANGELOG.md b/CHANGELOG.md index 1347a4f8c..acb8ab3a0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,18 @@ # Changelog +## [1.19.0](https://github.com/GoogleCloudPlatform/cloud-sql-python-connector/compare/v1.18.5...v1.19.0) (2025-12-09) + + +### Features + +* Add python 3.14 and remove python 3.9 support ([#1339](https://github.com/GoogleCloudPlatform/cloud-sql-python-connector/issues/1339)) ([a2e8c4d](https://github.com/GoogleCloudPlatform/cloud-sql-python-connector/commit/a2e8c4d30deeeb73475caf3367acadc58193228f)) + + +### Documentation + +* Add sample Cloud Run example application ([#1335](https://github.com/GoogleCloudPlatform/cloud-sql-python-connector/issues/1335)) ([adc25f6](https://github.com/GoogleCloudPlatform/cloud-sql-python-connector/commit/adc25f683059a3f0fe3865d648374dce087cdaaa)) +* Update documentation to describe how to use Advanced DR write endpoint. ([#1332](https://github.com/GoogleCloudPlatform/cloud-sql-python-connector/issues/1332)) ([2e1ed21](https://github.com/GoogleCloudPlatform/cloud-sql-python-connector/commit/2e1ed211cad6dd63a76fa8c80df6c1d09d112778)) + ## [1.18.5](https://github.com/GoogleCloudPlatform/cloud-sql-python-connector/compare/v1.18.4...v1.18.5) (2025-10-09) diff --git a/README.md b/README.md index 9bea3d6d6..d6921425f 100644 --- a/README.md +++ b/README.md @@ -348,14 +348,27 @@ conn = connector.connect( ) ``` -### Using DNS domain names to identify instances +### Using Advanced Disaster Recovery and DNS domain names to identify instances -The connector can be configured to use DNS to look up an instance. This would -allow you to configure your application to connect to a database instance, and -centrally configure which instance in your DNS zone. +The connector can be configured to use DNS to look up an instance. +Use a DNS name managed by Cloud SQL [Advanced Disaster Recovery](https://docs.cloud.google.com/sql/docs/mysql/use-advanced-disaster-recovery), +or a domain name that you manage. + +#### Using Advanced Recovery Write Endpoint DNS Name + +[Advanced Disaster Recovery](https://docs.cloud.google.com/sql/docs/mysql/use-advanced-disaster-recovery) +creates geographically distributed replicas of your Cloud SQL database instance. When you perform +switchover or failover on the database instance, the connector will gracefully disconnect from the +old primary instance and reconnect to the new primary instance. + +Follow the instructions in [Connect using Write Endpoint](https://docs.cloud.google.com/sql/docs/mysql/connect-to-instance-using-write-endpoint) +to get the write endpoint DNS name for your primary instance. Then, use this write endpoint DNS +name to configure the connector. #### Configure your DNS Records +The connector may be configured to use DNS that you define as well. + Add a DNS TXT record for the Cloud SQL instance to a **private** DNS server or a private Google Cloud DNS Zone used by your application. diff --git a/google/cloud/sql/connector/version.py b/google/cloud/sql/connector/version.py index c42d7c855..c30d04f46 100644 --- a/google/cloud/sql/connector/version.py +++ b/google/cloud/sql/connector/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "1.18.5" +__version__ = "1.19.0" diff --git a/noxfile.py b/noxfile.py index b0220f474..34ba6d603 100644 --- a/noxfile.py +++ b/noxfile.py @@ -22,7 +22,7 @@ LINT_PATHS = ["google", "tests", "noxfile.py"] -TEST_PYTHON_VERSIONS = ["3.9", "3.10", "3.11", "3.12", "3.13"] +TEST_PYTHON_VERSIONS = ["3.10", "3.11", "3.12", "3.13", "3.14"] @nox.session diff --git a/requirements-test.txt b/requirements-test.txt index 9cba5a851..453dcf01b 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -1,13 +1,13 @@ -pytest==8.4.1 +pytest==9.0.2 mock==5.2.0 pytest-cov==7.0.0 -pytest-asyncio==1.1.0 -SQLAlchemy[asyncio]==2.0.43 +pytest-asyncio==1.3.0 +SQLAlchemy[asyncio]==2.0.44 sqlalchemy-pytds==1.0.2 sqlalchemy-stubs==0.4 -PyMySQL==1.1.1 -pg8000==1.31.4 -asyncpg==0.30.0 -python-tds==1.16.1 +PyMySQL==1.1.2 +pg8000==1.31.5 +asyncpg==0.31.0 +python-tds==1.17.1 aioresponses==0.7.8 pytest-aiohttp==1.1.0 diff --git a/requirements.txt b/requirements.txt index 766d62dc5..fb0a7ccf8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,6 @@ -aiofiles==24.1.0 -aiohttp==3.12.15 -cryptography==46.0.2 -dnspython==2.7.0 -Requests==2.32.4 -google-auth==2.40.3 +aiofiles==25.1.0 +aiohttp==3.13.2 +cryptography==46.0.3 +dnspython==2.8.0 +Requests==2.32.5 +google-auth==2.43.0 diff --git a/samples/cloudrun/README.md b/samples/cloudrun/README.md new file mode 100644 index 000000000..2c2b7eb72 --- /dev/null +++ b/samples/cloudrun/README.md @@ -0,0 +1,180 @@ +# Connecting Cloud Run to Cloud SQL with the Python Connector + +This guide provides a comprehensive walkthrough of how to connect a Cloud Run service to a Cloud SQL instance using the Cloud SQL Python Connector. It covers connecting to instances with both public and private IP addresses and demonstrates how to handle database credentials securely. + +## Develop a Python Application + +The following Python applications demonstrate how to connect to a Cloud SQL instance using the Cloud SQL Python Connector. + +### `mysql/main.py` and `postgres/main.py` + +These files contain the core application logic for connecting to a Cloud SQL for MySQL or PostgreSQL instance. They provide two separate authentication methods, each exposed at a different route: +- `/`: Password-based authentication +- `/iam`: IAM-based authentication + + +### `sqlserver/main.py` + +This file contains the core application logic for connecting to a Cloud SQL for SQL Server instance. It uses the `cloud-sql-python-connector` to create a SQLAlchemy connection pool with password-based authentication at the `/` route. + +> [!NOTE] +> +> Cloud SQL for SQL Server does not support IAM database authentication. + + +> [!NOTE] +> **Lazy Refresh** +> +> The sample code in all three `main.py` files initializes the `Connector` with `refresh_strategy=lazy`. This is a recommended approach to avoid connection errors and optimize cost by preventing background processes from running when the CPU is throttled. + +## Global Variables and Lazy Instantiation + +In a Cloud Run service, global variables are initialized when the container instance starts up. The application instance then handles subsequent requests until the container is spun down. + +The `Connector` and SQLAlchemy `Engine` objects are defined as global variables (initially set to `None`) and are lazily instantiated (created only when needed) inside the request handlers. + +This approach offers several benefits: + +1. **Faster Startup:** By deferring initialization until the first request, the Cloud Run service can start listening for requests almost immediately, reducing cold start latency. +2. **Resource Efficiency:** Expensive operations, like establishing background connections or fetching secrets, are only performed when actually required. +3. **Connection Reuse:** Once initialized, the global `Connector` and `Engine` instances are reused for all subsequent requests to that container instance. This prevents the overhead of creating new connections for every request and avoids hitting connection limits. + +## IAM Authentication Prerequisites + + +For IAM authentication to work, you must ensure two things: + +1. **The Cloud Run service's service account has the `Cloud SQL Client` role.** You can grant this role with the following command: + ```bash + gcloud projects add-iam-policy-binding PROJECT_ID \ + --member="serviceAccount:SERVICE_ACCOUNT_EMAIL" \ + --role="roles/cloudsql.client" + ``` + Replace `PROJECT_ID` with your Google Cloud project ID and `SERVICE_ACCOUNT_EMAIL` with the email of the service account your Cloud Run service is using. + +2. **The service account is added as a database user to your Cloud SQL instance.** You can do this with the following command: + ```bash + gcloud sql users create SERVICE_ACCOUNT_EMAIL \ + --instance=INSTANCE_NAME \ + --type=cloud_iam_user + ``` + Replace `SERVICE_ACCOUNT_EMAIL` with the same service account email and `INSTANCE_NAME` with your Cloud SQL instance name. + +For Password-based authentication to work: + +1. **The Cloud Run service's service account has the `Secret Accessor` role.** You can grant this role with the following command: + ```bash + gcloud projects add-iam-policy-binding PROJECT_ID \ + --member="serviceAccount:SERVICE_ACCOUNT_EMAIL" \ + --role="roles/secretmanager.secretAccessor" + ``` + Replace `PROJECT_ID` with your Google Cloud project ID and `SERVICE_ACCOUNT_EMAIL` with the email of the service account your Cloud Run service is using. + +## Deploy the Application to Cloud Run + +Follow these steps to deploy the application to Cloud Run. + +### Build and Push the Docker Image + +1. **Enable the Artifact Registry API:** + + ```bash + gcloud services enable artifactregistry.googleapis.com + ``` + +2. **Create an Artifact Registry repository:** + + ```bash + gcloud artifacts repositories create REPO_NAME \ + --repository-format=docker \ + --location=REGION + ``` + +3. **Configure Docker to authenticate with Artifact Registry:** + + ```bash + gcloud auth configure-docker REGION-docker.pkg.dev + ``` + +4. **Build the Docker image (replace `mysql` with `postgres` or `sqlserver` as needed):** + + ```bash + docker build -t REGION-docker.pkg.dev/PROJECT_ID/REPO_NAME/IMAGE_NAME mysql + ``` + +5. **Push the Docker image to Artifact Registry:** + + ```bash + docker push REGION-docker.pkg.dev/PROJECT_ID/REPO_NAME/IMAGE_NAME + ``` + +### Deploy to Cloud Run + +Deploy the container image to Cloud Run using the `gcloud run deploy` command. + + +**Sample Values:** +* `SERVICE_NAME`: `my-cloud-run-service` +* `REGION`: `us-central1` +* `PROJECT_ID`: `my-gcp-project-id` +* `REPO_NAME`: `my-artifact-repo` +* `IMAGE_NAME`: `my-app-image` +* `INSTANCE_CONNECTION_NAME`: `my-gcp-project-id:us-central1:my-instance-name` +* `DB_USER`: `my-db-user` (for password-based authentication) +* `DB_IAM_USER`: `my-service-account@my-gcp-project-id.iam.gserviceaccount.com` (for IAM-based authentication) +* `DB_NAME`: `my-db-name` +* `DB_PASSWORD`: `my-user-pass-secret-name` +* `VPC_NETWORK`: `my-vpc-network` +* `SUBNET_NAME`: `my-vpc-subnet` + + +**For MySQL and PostgreSQL (Public IP):** + +```bash +gcloud run deploy SERVICE_NAME \ + --image=REGION-docker.pkg.dev/PROJECT_ID/REPO_NAME/IMAGE_NAME \ + --set-env-vars=DB_USER=DB_USER,DB_IAM_USER=DB_IAM_USER,DB_NAME=DB_NAME,DB_SECRET_NAME=DB_SECRET_NAME,INSTANCE_CONNECTION_NAME=INSTANCE_CONNECTION_NAME \ + --region=REGION \ + --update-secrets=DB_PASSWORD=DB_PASSWORD:latest +``` + +**For MySQL and PostgreSQL (Private IP):** + +```bash +gcloud run deploy SERVICE_NAME \ + --image=REGION-docker.pkg.dev/PROJECT_ID/REPO_NAME/IMAGE_NAME \ + --set-env-vars=DB_USER=DB_USER,DB_IAM_USER=DB_IAM_USER,DB_NAME=DB_NAME,DB_SECRET_NAME=DB_SECRET_NAME,INSTANCE_CONNECTION_NAME=INSTANCE_CONNECTION_NAME,IP_TYPE=PRIVATE \ + --network=VPC_NETWORK \ + --subnet=SUBNET_NAME \ + --vpc-egress=private-ranges-only \ + --region=REGION \ + --update-secrets=DB_PASSWORD=DB_PASSWORD:latest +``` + +**For SQL Server (Public IP):** + +```bash +gcloud run deploy SERVICE_NAME \ + --image=REGION-docker.pkg.dev/PROJECT_ID/REPO_NAME/IMAGE_NAME \ + --set-env-vars=DB_USER=DB_USER,DB_NAME=DB_NAME,DB_SECRET_NAME=DB_SECRET_NAME,INSTANCE_CONNECTION_NAME=INSTANCE_CONNECTION_NAME \ + --region=REGION \ + --update-secrets=DB_PASSWORD=DB_PASSWORD:latest +``` + +**For SQL Server (Private IP):** + +```bash +gcloud run deploy SERVICE_NAME \ + --image=REGION-docker.pkg.dev/PROJECT_ID/REPO_NAME/IMAGE_name \ + --set-env-vars=DB_USER=DB_USER,DB_NAME=DB_NAME,DB_SECRET_NAME=DB_SECRET_NAME,INSTANCE_CONNECTION_NAME=INSTANCE_CONNECTION_NAME,IP_TYPE=PRIVATE \ + --network=VPC_NETWORK \ + --subnet=SUBNET_NAME \ + --vpc-egress=private-ranges-only \ + --region=REGION \ + --update-secrets=DB_PASSWORD=DB_PASSWORD:latest +``` + +> [!NOTE] +> **`For PSC connections`** +> +> To connect to the Cloud SQL instance with PSC connection type, create a PSC endpoint, a DNS zone and DNS record for the instance in the same VPC network as the Cloud Run service and replace the `IP_TYPE` in the deploy command with `PSC`. To configure DNS records, refer to [Connect to an instance using Private Service Connect](https://docs.cloud.google.com/sql/docs/mysql/configure-private-service-connect) guide \ No newline at end of file diff --git a/samples/cloudrun/mysql/Dockerfile b/samples/cloudrun/mysql/Dockerfile new file mode 100644 index 000000000..485bced95 --- /dev/null +++ b/samples/cloudrun/mysql/Dockerfile @@ -0,0 +1,18 @@ +# Use the official lightweight Python image. +# https://hub.docker.com/_/python +FROM python:3.14-slim + +# Allow statements and log messages to immediately appear in the Knative logs +ENV PYTHONUNBUFFERED True + +# Copy local code to the container image. +ENV APP_HOME /app +WORKDIR $APP_HOME +COPY . . + +# Install production dependencies. +RUN pip install --no-cache-dir -r requirements.txt + +# Run the web service on container startup. +# Use gunicorn for production deployments. +CMD exec gunicorn --bind :$PORT --workers 1 --threads 8 --timeout 0 main:app diff --git a/samples/cloudrun/mysql/main.py b/samples/cloudrun/mysql/main.py new file mode 100644 index 000000000..b1b546682 --- /dev/null +++ b/samples/cloudrun/mysql/main.py @@ -0,0 +1,142 @@ +""" +Copyright 2025 Google LLC + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import os +import sqlalchemy +from flask import Flask +from google.cloud.sql.connector import Connector, IPTypes + +# Initialize Flask app +app = Flask(__name__) + +# Connector and SQLAlchemy engines are initialized as None to allow for lazy instantiation. +# +# The Connector object is a global variable to ensure that the same connector +# instance is used across all requests. This prevents the unnecessary creation +# of new Connector instances, which is inefficient and can lead to connection +# limits being reached. +# +# Lazy instantiation (initializing the Connector and Engine only when needed) +# allows the Cloud Run service to start up faster, as it avoids performing +# initialization tasks (like fetching secrets or metadata) during startup. +connector = None +iam_engine = None +password_engine = None + + +# Function to create a database connection using IAM authentication +def get_iam_connection() -> sqlalchemy.engine.base.Connection: + """Creates a database connection using IAM authentication.""" + instance_connection_name = os.environ["INSTANCE_CONNECTION_NAME"] + db_user = os.environ["DB_IAM_USER"] # IAM service account email + db_name = os.environ["DB_NAME"] + ip_type_str = os.environ.get("IP_TYPE", "PUBLIC") + ip_type = IPTypes[ip_type_str] + + conn = connector.connect( + instance_connection_name, + "pymysql", + user=db_user, + db=db_name, + ip_type=ip_type, + enable_iam_auth=True, + ) + return conn + + +# Function to create a database connection using password-based authentication +def get_password_connection() -> sqlalchemy.engine.base.Connection: + """Creates a database connection using password authentication.""" + instance_connection_name = os.environ["INSTANCE_CONNECTION_NAME"] + db_user = os.environ["DB_USER"] # Database username + db_name = os.environ["DB_NAME"] + db_password = os.environ["DB_PASSWORD"] + ip_type_str = os.environ.get("IP_TYPE", "PUBLIC") + ip_type = IPTypes[ip_type_str] + + conn = connector.connect( + instance_connection_name, + "pymysql", + user=db_user, + password=db_password, + db=db_name, + ip_type=ip_type, + ) + return conn + + +# This example uses two distinct SQLAlchemy engines to demonstrate two different +# authentication methods (IAM and password-based) in the same application. +# +# In a typical production application, you would generally only need one +# SQLAlchemy engine, configured for your preferred authentication method. +# Both engines are defined globally to allow for connection pooling and +# reuse across requests. + + +def connect_with_password() -> sqlalchemy.engine.base.Connection: + """Initializes the connector and password engine if necessary, then returns a connection.""" + global connector, password_engine + + if connector is None: + connector = Connector(refresh_strategy="lazy") + + if password_engine is None: + password_engine = sqlalchemy.create_engine( + "mysql+pymysql://", + creator=get_password_connection, + ) + + return password_engine.connect() + + +def connect_with_iam() -> sqlalchemy.engine.base.Connection: + """Initializes the connector and IAM engine if necessary, then returns a connection.""" + global connector, iam_engine + + if connector is None: + connector = Connector(refresh_strategy="lazy") + + if iam_engine is None: + iam_engine = sqlalchemy.create_engine( + "mysql+pymysql://", + creator=get_iam_connection, + ) + + return iam_engine.connect() + + +@app.route("/") +def password_auth_index(): + try: + with connect_with_password() as conn: + result = conn.execute(sqlalchemy.text("SELECT 1")).fetchall() + return f"Database connection successful (password authentication), result: {result}" + except Exception as e: + return f"Error connecting to the database (password authentication)", 500 + + +@app.route("/iam") +def iam_auth_index(): + try: + with connect_with_iam() as conn: + result = conn.execute(sqlalchemy.text("SELECT 1")).fetchall() + return f"Database connection successful (IAM authentication), result: {result}" + except Exception as e: + return f"Error connecting to the database (IAM authentication)", 500 + +if __name__ == "__main__": + app.run(host="0.0.0.0", port=int(os.environ.get("PORT", 8080))) diff --git a/samples/cloudrun/mysql/requirements.txt b/samples/cloudrun/mysql/requirements.txt new file mode 100644 index 000000000..c21d2ff2e --- /dev/null +++ b/samples/cloudrun/mysql/requirements.txt @@ -0,0 +1,5 @@ +cloud-sql-python-connector[pymysql] +sqlalchemy +Flask +gunicorn +google-cloud-secret-manager diff --git a/samples/cloudrun/postgres/Dockerfile b/samples/cloudrun/postgres/Dockerfile new file mode 100644 index 000000000..485bced95 --- /dev/null +++ b/samples/cloudrun/postgres/Dockerfile @@ -0,0 +1,18 @@ +# Use the official lightweight Python image. +# https://hub.docker.com/_/python +FROM python:3.14-slim + +# Allow statements and log messages to immediately appear in the Knative logs +ENV PYTHONUNBUFFERED True + +# Copy local code to the container image. +ENV APP_HOME /app +WORKDIR $APP_HOME +COPY . . + +# Install production dependencies. +RUN pip install --no-cache-dir -r requirements.txt + +# Run the web service on container startup. +# Use gunicorn for production deployments. +CMD exec gunicorn --bind :$PORT --workers 1 --threads 8 --timeout 0 main:app diff --git a/samples/cloudrun/postgres/main.py b/samples/cloudrun/postgres/main.py new file mode 100644 index 000000000..e33d5a06c --- /dev/null +++ b/samples/cloudrun/postgres/main.py @@ -0,0 +1,143 @@ +""" +Copyright 2025 Google LLC + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import os +import sqlalchemy +from flask import Flask +from google.cloud.sql.connector import Connector, IPTypes + +# Initialize Flask app +app = Flask(__name__) + +# Connector and SQLAlchemy engines are initialized as None to allow for lazy instantiation. +# +# The Connector object is a global variable to ensure that the same connector +# instance is used across all requests. This prevents the unnecessary creation +# of new Connector instances, which is inefficient and can lead to connection +# limits being reached. +# +# Lazy instantiation (initializing the Connector and Engine only when needed) +# allows the Cloud Run service to start up faster, as it avoids performing +# initialization tasks (like fetching secrets or metadata) during startup. +connector = None +iam_engine = None +password_engine = None + + +# Function to create a database connection using IAM authentication +def get_iam_connection() -> sqlalchemy.engine.base.Connection: + """Creates a database connection using IAM authentication.""" + instance_connection_name = os.environ["INSTANCE_CONNECTION_NAME"] + db_user = os.environ["DB_IAM_USER"] # IAM service account email + db_name = os.environ["DB_NAME"] + ip_type_str = os.environ.get("IP_TYPE", "PUBLIC") + ip_type = IPTypes[ip_type_str] + + conn = connector.connect( + instance_connection_name, + "pg8000", + user=db_user, + db=db_name, + ip_type=ip_type, + enable_iam_auth=True, + ) + return conn + + +# Function to create a database connection using password-based authentication +def get_password_connection() -> sqlalchemy.engine.base.Connection: + """Creates a database connection using password authentication.""" + instance_connection_name = os.environ["INSTANCE_CONNECTION_NAME"] + db_user = os.environ["DB_USER"] # Database username + db_name = os.environ["DB_NAME"] + db_password = os.environ["DB_PASSWORD"] + ip_type_str = os.environ.get("IP_TYPE", "PUBLIC") + ip_type = IPTypes[ip_type_str] + + + conn = connector.connect( + instance_connection_name, + "pg8000", + user=db_user, + password=db_password, + db=db_name, + ip_type=ip_type, + ) + return conn + + +# This example uses two distinct SQLAlchemy engines to demonstrate two different +# authentication methods (IAM and password-based) in the same application. +# +# In a typical production application, you would generally only need one +# SQLAlchemy engine, configured for your preferred authentication method. +# Both engines are defined globally to allow for connection pooling and +# reuse across requests. + + +def connect_with_password() -> sqlalchemy.engine.base.Connection: + """Initializes the connector and password engine if necessary, then returns a connection.""" + global connector, password_engine + + if connector is None: + connector = Connector(refresh_strategy="lazy") + + if password_engine is None: + password_engine = sqlalchemy.create_engine( + "postgresql+pg8000://", + creator=get_password_connection, + ) + + return password_engine.connect() + + +def connect_with_iam() -> sqlalchemy.engine.base.Connection: + """Initializes the connector and IAM engine if necessary, then returns a connection.""" + global connector, iam_engine + + if connector is None: + connector = Connector(refresh_strategy="lazy") + + if iam_engine is None: + iam_engine = sqlalchemy.create_engine( + "postgresql+pg8000://", + creator=get_iam_connection, + ) + + return iam_engine.connect() + + +@app.route("/") +def password_auth_index(): + try: + with connect_with_password() as conn: + result = conn.execute(sqlalchemy.text("SELECT 1")).fetchall() + return f"Database connection successful (password authentication), result: {result}" + except Exception as e: + return f"Error connecting to the database (password authentication)", 500 + + +@app.route("/iam") +def iam_auth_index(): + try: + with connect_with_iam() as conn: + result = conn.execute(sqlalchemy.text("SELECT 1")).fetchall() + return f"Database connection successful (IAM authentication), result: {result}" + except Exception as e: + return f"Error connecting to the database (IAM authentication)", 500 + +if __name__ == "__main__": + app.run(host="0.0.0.0", port=int(os.environ.get("PORT", 8080))) diff --git a/samples/cloudrun/postgres/requirements.txt b/samples/cloudrun/postgres/requirements.txt new file mode 100644 index 000000000..dbd9232b5 --- /dev/null +++ b/samples/cloudrun/postgres/requirements.txt @@ -0,0 +1,5 @@ +cloud-sql-python-connector[pg8000] +sqlalchemy +Flask +gunicorn +google-cloud-secret-manager diff --git a/samples/cloudrun/sqlserver/Dockerfile b/samples/cloudrun/sqlserver/Dockerfile new file mode 100644 index 000000000..485bced95 --- /dev/null +++ b/samples/cloudrun/sqlserver/Dockerfile @@ -0,0 +1,18 @@ +# Use the official lightweight Python image. +# https://hub.docker.com/_/python +FROM python:3.14-slim + +# Allow statements and log messages to immediately appear in the Knative logs +ENV PYTHONUNBUFFERED True + +# Copy local code to the container image. +ENV APP_HOME /app +WORKDIR $APP_HOME +COPY . . + +# Install production dependencies. +RUN pip install --no-cache-dir -r requirements.txt + +# Run the web service on container startup. +# Use gunicorn for production deployments. +CMD exec gunicorn --bind :$PORT --workers 1 --threads 8 --timeout 0 main:app diff --git a/samples/cloudrun/sqlserver/main.py b/samples/cloudrun/sqlserver/main.py new file mode 100644 index 000000000..0ce8162cf --- /dev/null +++ b/samples/cloudrun/sqlserver/main.py @@ -0,0 +1,89 @@ +""" +Copyright 2025 Google LLC + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import os +import sqlalchemy +from flask import Flask +from google.cloud.sql.connector import Connector, IPTypes + +# Initialize Flask app +app = Flask(__name__) + +# Connector and SQLAlchemy engine are initialized as None to allow for lazy instantiation. +# +# The Connector object is a global variable to ensure that the same connector +# instance is used across all requests. This prevents the unnecessary creation +# of new Connector instances, which is inefficient and can lead to connection +# limits being reached. +# +# Lazy instantiation (initializing the Connector and Engine only when needed) +# allows the Cloud Run service to start up faster, as it avoids performing +# initialization tasks (like fetching secrets or metadata) during startup. +connector = None +engine = None + + +def get_connection() -> sqlalchemy.engine.base.Connection: + """ + Function to create a database connection. + This function will be used by SQLAlchemy as a creator. + """ + instance_connection_name = os.environ["INSTANCE_CONNECTION_NAME"] + db_user = os.environ["DB_USER"] + db_name = os.environ["DB_NAME"] + db_password = os.environ["DB_PASSWORD"] + ip_type_str = os.environ.get("IP_TYPE", "PUBLIC") + ip_type = IPTypes[ip_type_str] + + # Connect to the database + conn = connector.connect( + instance_connection_name, + "pytds", + user=db_user, + password=db_password, + db=db_name, + ip_type=ip_type, + ) + return conn + + +def connect_to_db() -> sqlalchemy.engine.base.Connection: + """Initializes the connector and engine if necessary, then returns a connection.""" + global connector, engine + + if connector is None: + connector = Connector(refresh_strategy="lazy") + + if engine is None: + engine = sqlalchemy.create_engine( + "mssql+pytds://", + creator=get_connection, + ) + + return engine.connect() + + +@app.route("/") +def index(): + try: + with connect_to_db() as conn: + result = conn.execute(sqlalchemy.text("SELECT 1")).fetchall() + return f"Database connection successful, result: {result}" + except Exception as e: + return f"Error connecting to the database", 500 + +if __name__ == "__main__": + app.run(host="0.0.0.0", port=int(os.environ.get("PORT", 8080))) diff --git a/samples/cloudrun/sqlserver/requirements.txt b/samples/cloudrun/sqlserver/requirements.txt new file mode 100644 index 000000000..e577fbdc7 --- /dev/null +++ b/samples/cloudrun/sqlserver/requirements.txt @@ -0,0 +1,5 @@ +cloud-sql-python-connector[pytds] +sqlalchemy-pytds +google-cloud-secret-manager +Flask +gunicorn