From 83c023d81c4ce169b0f0eb87957c37f2c2ef4d5a Mon Sep 17 00:00:00 2001 From: Rajat Bhatta Date: Tue, 4 Oct 2022 06:08:38 +0000 Subject: [PATCH 01/25] samples: add code samples for PostgreSql dialect --- samples/samples/pg_snippets.py | 1487 ++++++++++++++++++++++++++++++++ 1 file changed, 1487 insertions(+) create mode 100644 samples/samples/pg_snippets.py diff --git a/samples/samples/pg_snippets.py b/samples/samples/pg_snippets.py new file mode 100644 index 0000000000..2872d80085 --- /dev/null +++ b/samples/samples/pg_snippets.py @@ -0,0 +1,1487 @@ +#!/usr/bin/env python + +# Copyright 2022 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This application demonstrates how to do basic operations using Cloud +Spanner PostgreSql dialect. + +For more information, see the README.rst under /spanner. +""" +import argparse +import base64 +import datetime +import decimal +import json +import logging +import time + +from google.cloud import spanner, spanner_admin_database_v1 +from google.cloud.spanner_admin_database_v1.types.common import DatabaseDialect +from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin +from google.cloud.spanner_v1 import param_types +from google.protobuf import field_mask_pb2 # type: ignore + +OPERATION_TIMEOUT_SECONDS = 240 + + +# [START spanner_postgresql_create_instance] +def create_instance(instance_id): + """Creates an instance.""" + spanner_client = spanner.Client() + + config_name = "{}/instanceConfigs/regional-us-central1".format( + spanner_client.project_name + ) + + instance = spanner_client.instance( + instance_id, + configuration_name=config_name, + display_name="This is a display name.", + node_count=1, + labels={ + "cloud_spanner_samples": "true", + "sample_name": "snippets-create_instance-explicit", + "created": str(int(time.time())), + }, + ) + + operation = instance.create() + + print("Waiting for operation to complete...") + operation.result(OPERATION_TIMEOUT_SECONDS) + + print("Created instance {}".format(instance_id)) + + +# [END spanner_postgresql_create_instance] + + +# [START spanner_postgresql_create_database] +def create_database(instance_id, database_id): + """Creates a PostgreSql database and tables for sample data.""" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + + database = instance.database( + database_id, + database_dialect=DatabaseDialect.POSTGRESQL, + ) + + operation = database.create() + + print("Waiting for operation to complete...") + operation.result(OPERATION_TIMEOUT_SECONDS) + + create_table_using_ddl(database.name) + print("Created database {} on instance {}".format(database_id, instance_id)) + + +def create_table_using_ddl(database_name): + spanner_client = spanner.Client() + request = spanner_admin_database_v1.UpdateDatabaseDdlRequest( + database=database_name, + statements=[ + """CREATE TABLE Singers ( + SingerId bigint NOT NULL, + FirstName character varying(1024), + LastName character varying(1024), + SingerInfo bytea, + PRIMARY KEY (SingerId) + )""", + """CREATE TABLE Albums ( + SingerId bigint NOT NULL, + AlbumId bigint NOT NULL, + AlbumTitle character varying(1024), + PRIMARY KEY (SingerId, AlbumId) + ) INTERLEAVE IN PARENT Singers ON DELETE CASCADE""", + ], + ) + operation = spanner_client.database_admin_api.update_database_ddl(request) + operation.result(OPERATION_TIMEOUT_SECONDS) + +# [END spanner_postgresql_create_database] + + +# [START spanner_postgresql_insert_data] +def insert_data(instance_id, database_id): + """Inserts sample data into the given database. + + The database and table must already exist and can be created using + `create_database`. + """ + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + with database.batch() as batch: + batch.insert( + table="Singers", + columns=("SingerId", "FirstName", "LastName"), + values=[ + (1, u"Marc", u"Richards"), + (2, u"Catalina", u"Smith"), + (3, u"Alice", u"Trentor"), + (4, u"Lea", u"Martin"), + (5, u"David", u"Lomond"), + ], + ) + + batch.insert( + table="Albums", + columns=("SingerId", "AlbumId", "AlbumTitle"), + values=[ + (1, 1, u"Total Junk"), + (1, 2, u"Go, Go, Go"), + (2, 1, u"Green"), + (2, 2, u"Forever Hold Your Peace"), + (2, 3, u"Terrified"), + ], + ) + + print("Inserted data.") + + +# [END spanner_postgresql_insert_data] + + +# [START spanner_postgresql_delete_data] +def delete_data(instance_id, database_id): + """Deletes sample data from the given database. + + The database, table, and data must already exist and can be created using + `create_database` and `insert_data`. + """ + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + # Delete individual rows + albums_to_delete = spanner.KeySet(keys=[[2, 1], [2, 3]]) + + # Delete a range of rows where the column key is >=3 and <5 + singers_range = spanner.KeyRange(start_closed=[3], end_open=[5]) + singers_to_delete = spanner.KeySet(ranges=[singers_range]) + + # Delete remaining Singers rows, which will also delete the remaining + # Albums rows because Albums was defined with ON DELETE CASCADE + remaining_singers = spanner.KeySet(all_=True) + + with database.batch() as batch: + batch.delete("Albums", albums_to_delete) + batch.delete("Singers", singers_to_delete) + batch.delete("Singers", remaining_singers) + + print("Deleted data.") + + +# [END spanner_postgresql_delete_data] + + +# [START spanner_postgresql_query_data] +def query_data(instance_id, database_id): + """Queries sample data from the database using SQL.""" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + with database.snapshot() as snapshot: + results = snapshot.execute_sql( + "SELECT SingerId, AlbumId, AlbumTitle FROM Albums" + ) + + for row in results: + print(u"SingerId: {}, AlbumId: {}, AlbumTitle: {}".format(*row)) + + +# [END spanner_postgresql_query_data] + + +# [START spanner_postgresql_read_data] +def read_data(instance_id, database_id): + """Reads sample data from the database.""" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + with database.snapshot() as snapshot: + keyset = spanner.KeySet(all_=True) + results = snapshot.read( + table="Albums", columns=("SingerId", "AlbumId", "AlbumTitle"), keyset=keyset + ) + + for row in results: + print(u"SingerId: {}, AlbumId: {}, AlbumTitle: {}".format(*row)) + + +# [END spanner_postgresql_read_data] + + +# [START spanner_postgresql_add_column] +def add_column(instance_id, database_id): + """Adds a new column to the Albums table in the example database.""" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + operation = database.update_ddl( + ["ALTER TABLE Albums ADD COLUMN MarketingBudget BIGINT"] + ) + + print("Waiting for operation to complete...") + operation.result(OPERATION_TIMEOUT_SECONDS) + + print("Added the MarketingBudget column.") + + +# [END spanner_postgresql_add_column] + + +# [START spanner_postgresql_update_data] +def update_data(instance_id, database_id): + """Updates sample data in the database. + + This updates the `MarketingBudget` column which must be created before + running this sample. You can add the column by running the `add_column` + sample or by running this DDL statement against your database: + + ALTER TABLE Albums ADD COLUMN MarketingBudget INT64 + + """ + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + with database.batch() as batch: + batch.update( + table="Albums", + columns=("SingerId", "AlbumId", "MarketingBudget"), + values=[(1, 1, 100000), (2, 2, 500000)], + ) + + print("Updated data.") + + +# [END spanner_postgresql_update_data] + + +# [START spanner_postgresql_read_write_transaction] +def read_write_transaction(instance_id, database_id): + """Performs a read-write transaction to update two sample records in the + database. + + This will transfer 200,000 from the `MarketingBudget` field for the second + Album to the first Album. If the `MarketingBudget` is too low, it will + raise an exception. + + Before running this sample, you will need to run the `update_data` sample + to populate the fields. + """ + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + def update_albums(transaction): + # Read the second album budget. + second_album_keyset = spanner.KeySet(keys=[(2, 2)]) + second_album_result = transaction.read( + table="Albums", + columns=("MarketingBudget",), + keyset=second_album_keyset, + limit=1, + ) + second_album_row = list(second_album_result)[0] + second_album_budget = second_album_row[0] + + transfer_amount = 200000 + + if second_album_budget < transfer_amount: + # Raising an exception will automatically roll back the + # transaction. + raise ValueError("The second album doesn't have enough funds to transfer") + + # Read the first album's budget. + first_album_keyset = spanner.KeySet(keys=[(1, 1)]) + first_album_result = transaction.read( + table="Albums", + columns=("MarketingBudget",), + keyset=first_album_keyset, + limit=1, + ) + first_album_row = list(first_album_result)[0] + first_album_budget = first_album_row[0] + + # Update the budgets. + second_album_budget -= transfer_amount + first_album_budget += transfer_amount + print( + "Setting first album's budget to {} and the second album's " + "budget to {}.".format(first_album_budget, second_album_budget) + ) + + # Update the rows. + transaction.update( + table="Albums", + columns=("SingerId", "AlbumId", "MarketingBudget"), + values=[(1, 1, first_album_budget), (2, 2, second_album_budget)], + ) + + database.run_in_transaction(update_albums) + + print("Transaction complete.") + + +# [END spanner_postgresql_read_write_transaction] + + +# [START spanner_postgresql_query_data_with_new_column] +def query_data_with_new_column(instance_id, database_id): + """Queries sample data from the database using SQL. + + This sample uses the `MarketingBudget` column. You can add the column + by running the `add_column` sample or by running this DDL statement against + your database: + + ALTER TABLE Albums ADD COLUMN MarketingBudget INT64 + """ + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + with database.snapshot() as snapshot: + results = snapshot.execute_sql( + "SELECT SingerId, AlbumId, MarketingBudget FROM Albums" + ) + + for row in results: + print(u"SingerId: {}, AlbumId: {}, MarketingBudget: {}".format(*row)) + + +# [END spanner_postgresql_query_data_with_new_column] + + +# [START spanner_postgresql_create_index] +def add_index(instance_id, database_id): + """Adds a simple index to the example database.""" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + operation = database.update_ddl( + ["CREATE INDEX AlbumsByAlbumTitle ON Albums(AlbumTitle)"] + ) + + print("Waiting for operation to complete...") + operation.result(OPERATION_TIMEOUT_SECONDS) + + print("Added the AlbumsByAlbumTitle index.") + + +# [END spanner_postgresql_create_index] + +# [START spanner_postgresql_read_data_with_index] +def read_data_with_index(instance_id, database_id): + """Reads sample data from the database using an index. + + The index must exist before running this sample. You can add the index + by running the `add_index` sample or by running this DDL statement against + your database: + + CREATE INDEX AlbumsByAlbumTitle ON Albums(AlbumTitle) + + """ + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + with database.snapshot() as snapshot: + keyset = spanner.KeySet(all_=True) + results = snapshot.read( + table="Albums", + columns=("AlbumId", "AlbumTitle"), + keyset=keyset, + index="AlbumsByAlbumTitle", + ) + + for row in results: + print("AlbumId: {}, AlbumTitle: {}".format(*row)) + + +# [END spanner_postgresql_read_data_with_index] + + +# [START spanner_postgresql_create_storing_index] +def add_storing_index(instance_id, database_id): + """Adds an storing index to the example database.""" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + operation = database.update_ddl( + [ + "CREATE INDEX AlbumsByAlbumTitle2 ON Albums(AlbumTitle)" + "INCLUDE (MarketingBudget)" + ] + ) + + print("Waiting for operation to complete...") + operation.result(OPERATION_TIMEOUT_SECONDS) + + print("Added the AlbumsByAlbumTitle2 index.") + + +# [END spanner_postgresql_create_storing_index] + + +# [START spanner_postgresql_read_data_with_storing_index] +def read_data_with_storing_index(instance_id, database_id): + """Reads sample data from the database using an index with a storing + clause. + + The index must exist before running this sample. You can add the index + by running the `add_scoring_index` sample or by running this DDL statement + against your database: + + CREATE INDEX AlbumsByAlbumTitle2 ON Albums(AlbumTitle) + INCLUDE (MarketingBudget) + + """ + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + with database.snapshot() as snapshot: + keyset = spanner.KeySet(all_=True) + results = snapshot.read( + table="Albums", + columns=("AlbumId", "AlbumTitle", "MarketingBudget"), + keyset=keyset, + index="AlbumsByAlbumTitle2", + ) + + for row in results: + print(u"AlbumId: {}, AlbumTitle: {}, " "MarketingBudget: {}".format(*row)) + + +# [END spanner_postgresql_read_data_with_storing_index] + + +# [START spanner_postgresql_read_only_transaction] +def read_only_transaction(instance_id, database_id): + """Reads data inside of a read-only transaction. + + Within the read-only transaction, or "snapshot", the application sees + consistent view of the database at a particular timestamp. + """ + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + with database.snapshot(multi_use=True) as snapshot: + # Read using SQL. + results = snapshot.execute_sql( + "SELECT SingerId, AlbumId, AlbumTitle FROM Albums" + ) + + print("Results from first read:") + for row in results: + print(u"SingerId: {}, AlbumId: {}, AlbumTitle: {}".format(*row)) + + # Perform another read using the `read` method. Even if the data + # is updated in-between the reads, the snapshot ensures that both + # return the same data. + keyset = spanner.KeySet(all_=True) + results = snapshot.read( + table="Albums", columns=("SingerId", "AlbumId", "AlbumTitle"), keyset=keyset + ) + + print("Results from second read:") + for row in results: + print(u"SingerId: {}, AlbumId: {}, AlbumTitle: {}".format(*row)) + + +# [END spanner_postgresql_read_only_transaction] + + +def insert_with_dml(instance_id, database_id): + """Inserts data with a DML statement into the database. """ + # [START spanner_postgresql_dml_getting_started_insert] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + def insert_singers(transaction): + row_ct = transaction.execute_update( + "INSERT INTO Singers (SingerId, FirstName, LastName) VALUES " + "(12, 'Melissa', 'Garcia'), " + "(13, 'Russell', 'Morales'), " + "(14, 'Jacqueline', 'Long'), " + "(15, 'Dylan', 'Shaw')" + ) + print("{} record(s) inserted.".format(row_ct)) + + database.run_in_transaction(insert_singers) + # [END spanner_postgresql_dml_getting_started_insert] + + +def query_data_with_parameter(instance_id, database_id): + """Queries sample data from the database using SQL with a parameter.""" + # [START spanner_postgresql_query_with_parameter] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + with database.snapshot() as snapshot: + results = snapshot.execute_sql( + "SELECT SingerId, FirstName, LastName FROM Singers " + "WHERE LastName = $1", + params={"p1": "Garcia"}, + param_types={"p1": spanner.param_types.STRING}, + ) + + for row in results: + print(u"SingerId: {}, FirstName: {}, LastName: {}".format(*row)) + # [END spanner_postgresql_query_with_parameter] + + + +def write_with_dml_transaction(instance_id, database_id): + """ Transfers part of a marketing budget from one album to another. """ + # [START spanner_postgresql_dml_getting_started_update] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + def transfer_budget(transaction): + # Transfer marketing budget from one album to another. Performed in a + # single transaction to ensure that the transfer is atomic. + second_album_result = transaction.execute_sql( + "SELECT MarketingBudget from Albums " "WHERE SingerId = 2 and AlbumId = 2" + ) + second_album_row = list(second_album_result)[0] + second_album_budget = second_album_row[0] + + transfer_amount = 200000 + + # Transaction will only be committed if this condition still holds at + # the time of commit. Otherwise it will be aborted and the callable + # will be rerun by the client library + if second_album_budget >= transfer_amount: + first_album_result = transaction.execute_sql( + "SELECT MarketingBudget from Albums " + "WHERE SingerId = 1 and AlbumId = 1" + ) + first_album_row = list(first_album_result)[0] + first_album_budget = first_album_row[0] + + second_album_budget -= transfer_amount + first_album_budget += transfer_amount + + # Update first album + transaction.execute_update( + "UPDATE Albums " + "SET MarketingBudget = $1 " + "WHERE SingerId = 1 and AlbumId = 1", + params={"p1": first_album_budget}, + param_types={"p1": spanner.param_types.INT64}, + ) + + # Update second album + transaction.execute_update( + "UPDATE Albums " + "SET MarketingBudget = $1 " + "WHERE SingerId = 2 and AlbumId = 2", + params={"p1": second_album_budget}, + param_types={"p1": spanner.param_types.INT64}, + ) + + print( + "Transferred {} from Album2's budget to Album1's".format( + transfer_amount + ) + ) + + database.run_in_transaction(transfer_budget) + # [END spanner_postgresql_dml_getting_started_update] + + +# [START spanner_postgresql_read_stale_data] +def read_stale_data(instance_id, database_id): + """Reads sample data from the database. The data is exactly 15 seconds + stale.""" + import datetime + + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + staleness = datetime.timedelta(seconds=15) + + with database.snapshot(exact_staleness=staleness) as snapshot: + keyset = spanner.KeySet(all_=True) + results = snapshot.read( + table="Albums", + columns=("SingerId", "AlbumId", "MarketingBudget"), + keyset=keyset, + ) + + for row in results: + print(u"SingerId: {}, AlbumId: {}, MarketingBudget: {}".format(*row)) + + +# [END spanner_postgresql_read_stale_data] + + +# [START spanner_postgresql_update_data_with_timestamp_column] +def update_data_with_timestamp(instance_id, database_id): + """Updates Performances tables in the database with the COMMIT_TIMESTAMP + column. + + This updates the `MarketingBudget` column which must be created before + running this sample. You can add the column by running the `add_column` + sample or by running this DDL statement against your database: + + ALTER TABLE Albums ADD COLUMN MarketingBudget BIGINT + + In addition this update expects the LastUpdateTime column added by + applying this DDL statement against your database: + + ALTER TABLE Albums ADD COLUMN LastUpdateTime SPANNER.COMMIT_TIMESTAMP + """ + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + + database = instance.database(database_id) + + with database.batch() as batch: + batch.update( + table="Albums", + columns=("SingerId", "AlbumId", "MarketingBudget", "LastUpdateTime"), + values=[ + (1, 1, 1000000, spanner.COMMIT_TIMESTAMP), + (2, 2, 750000, spanner.COMMIT_TIMESTAMP), + ], + ) + + print("Updated data.") + + +# [END spanner_postgresql_update_data_with_timestamp_column] + + +# [START spanner_postgresql_add_timestamp_column] +def add_timestamp_column(instance_id, database_id): + """ Adds a new TIMESTAMP column to the Albums table in the example database. + """ + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + + database = instance.database(database_id) + + operation = database.update_ddl( + [ + "ALTER TABLE Albums ADD COLUMN LastUpdateTime " + "SPANNER.COMMIT_TIMESTAMP" + ] + ) + + print("Waiting for operation to complete...") + operation.result(OPERATION_TIMEOUT_SECONDS) + + print( + 'Altered table "Albums" on database {} on instance {}.'.format( + database_id, instance_id + ) + ) + + +# [END spanner_postgresql_add_timestamp_column] + + +# [START spanner_postgresql_query_data_with_timestamp_column] +def query_data_with_timestamp(instance_id, database_id): + """Queries sample data from the database using SQL. + + This updates the `LastUpdateTime` column which must be created before + running this sample. You can add the column by running the + `add_timestamp_column` sample or by running this DDL statement + against your database: + + ALTER TABLE Performances ADD COLUMN LastUpdateTime TIMESTAMP + OPTIONS (allow_commit_timestamp=true) + + """ + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + + database = instance.database(database_id) + + with database.snapshot() as snapshot: + results = snapshot.execute_sql( + "SELECT SingerId, AlbumId, MarketingBudget FROM Albums " + "ORDER BY LastUpdateTime DESC" + ) + + for row in results: + print(u"SingerId: {}, AlbumId: {}, MarketingBudget: {}".format(*row)) + + +# [END spanner_postgresql_query_data_with_timestamp_column] + + +# [START spanner_postgresql_create_table_with_timestamp_column] +def create_table_with_timestamp(instance_id, database_id): + """Creates a table with a COMMIT_TIMESTAMP column.""" + + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + request = spanner_admin_database_v1.UpdateDatabaseDdlRequest( + database=database.name, + statements= + [ + """CREATE TABLE Performances ( + SingerId BIGINT NOT NULL, + VenueId BIGINT NOT NULL, + EventDate Date, + Revenue BIGINT, + LastUpdateTime SPANNER.COMMIT_TIMESTAMP NOT NULL, + PRIMARY KEY (SingerId, VenueId, EventDate)) + INTERLEAVE IN PARENT Singers ON DELETE CASCADE""" + ], + ) + operation = spanner_client.database_admin_api.update_database_ddl(request) + + print("Waiting for operation to complete...") + operation.result(OPERATION_TIMEOUT_SECONDS) + + print( + "Created Performances table on database {} on instance {}".format( + database_id, instance_id + ) + ) + + +# [END spanner_postgresql_create_table_with_timestamp_column] + + +# [START spanner_postgresql_insert_data_with_timestamp_column] +def insert_data_with_timestamp(instance_id, database_id): + """Inserts data with a COMMIT_TIMESTAMP field into a table. """ + + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + + database = instance.database(database_id) + + with database.batch() as batch: + batch.insert( + table="Performances", + columns=("SingerId", "VenueId", "EventDate", "Revenue", "LastUpdateTime"), + values=[ + (1, 4, "2017-10-05", 11000, spanner.COMMIT_TIMESTAMP), + (1, 19, "2017-11-02", 15000, spanner.COMMIT_TIMESTAMP), + (2, 42, "2017-12-23", 7000, spanner.COMMIT_TIMESTAMP), + ], + ) + + print("Inserted data.") + + +# [END spanner_postgresql_insert_data_with_timestamp_column] + + +def insert_data_with_dml(instance_id, database_id): + """Inserts sample data into the given database using a DML statement. """ + # [START spanner_postgresql_dml_standard_insert] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + def insert_singers(transaction): + row_ct = transaction.execute_update( + "INSERT INTO Singers (SingerId, FirstName, LastName) " + " VALUES (10, 'Virginia', 'Watson')" + ) + + print("{} record(s) inserted.".format(row_ct)) + + database.run_in_transaction(insert_singers) + # [END spanner_postgresql_dml_standard_insert] + + + +def update_data_with_dml(instance_id, database_id): + """Updates sample data from the database using a DML statement. """ + # [START spanner_postgresql_dml_standard_update] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + def update_albums(transaction): + row_ct = transaction.execute_update( + "UPDATE Albums " + "SET MarketingBudget = MarketingBudget * 2 " + "WHERE SingerId = 1 and AlbumId = 1" + ) + + print("{} record(s) updated.".format(row_ct)) + + database.run_in_transaction(update_albums) + # [END spanner_postgresql_dml_standard_update] + + +def delete_data_with_dml(instance_id, database_id): + """Deletes sample data from the database using a DML statement. """ + # [START spanner_postgresql_dml_standard_delete] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + def delete_singers(transaction): + row_ct = transaction.execute_update( + "DELETE FROM Singers WHERE FirstName = 'Alice'" + ) + + print("{} record(s) deleted.".format(row_ct)) + + database.run_in_transaction(delete_singers) + # [END spanner_postgresql_dml_standard_delete] + + +def dml_write_read_transaction(instance_id, database_id): + """First inserts data then reads it from within a transaction using DML.""" + # [START spanner_postgresql_dml_write_then_read] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + def write_then_read(transaction): + # Insert record. + row_ct = transaction.execute_update( + "INSERT INTO Singers (SingerId, FirstName, LastName) " + " VALUES (11, 'Timothy', 'Campbell')" + ) + print("{} record(s) inserted.".format(row_ct)) + + # Read newly inserted record. + results = transaction.execute_sql( + "SELECT FirstName, LastName FROM Singers WHERE SingerId = 11" + ) + for result in results: + print("FirstName: {}, LastName: {}".format(*result)) + + database.run_in_transaction(write_then_read) + # [END spanner_postgresql_dml_write_then_read] + + +def update_data_with_partitioned_dml(instance_id, database_id): + """ Update sample data with a partitioned DML statement. """ + # [START spanner_postgresql_dml_partitioned_update] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + row_ct = database.execute_partitioned_dml( + "UPDATE Albums SET MarketingBudget = 100000 WHERE SingerId > 1" + ) + + print("{} records updated.".format(row_ct)) + # [END spanner_postgresql_dml_partitioned_update] + + +def delete_data_with_partitioned_dml(instance_id, database_id): + """ Delete sample data with a partitioned DML statement. """ + # [START spanner_postgresql_dml_partitioned_delete] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + row_ct = database.execute_partitioned_dml("DELETE FROM Singers WHERE SingerId > 10") + + print("{} record(s) deleted.".format(row_ct)) + # [END spanner_postgresql_dml_partitioned_delete] + + +def update_with_batch_dml(instance_id, database_id): + """Updates sample data in the database using Batch DML. """ + # [START spanner_postgresql_dml_batch_update] + from google.rpc.code_pb2 import OK + + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + insert_statement = ( + "INSERT INTO Albums " + "(SingerId, AlbumId, AlbumTitle, MarketingBudget) " + "VALUES (1, 3, 'Test Album Title', 10000)" + ) + + update_statement = ( + "UPDATE Albums " + "SET MarketingBudget = MarketingBudget * 2 " + "WHERE SingerId = 1 and AlbumId = 3" + ) + + def update_albums(transaction): + status, row_cts = transaction.batch_update([insert_statement, update_statement]) + + if status.code != OK: + # Do handling here. + # Note: the exception will still be raised when + # `commit` is called by `run_in_transaction`. + return + + print("Executed {} SQL statements using Batch DML.".format(len(row_cts))) + + database.run_in_transaction(update_albums) + # [END spanner_postgresql_dml_batch_update] + + +def create_table_with_datatypes(instance_id, database_id): + """Creates a table with supported datatypes. """ + # [START spanner_postgresql_create_table_with_datatypes] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + request = spanner_admin_database_v1.UpdateDatabaseDdlRequest( + database=database.name, + statements= + [ + """CREATE TABLE Venues ( + VenueId BIGINT NOT NULL, + VenueName character varying(100), + VenueInfo BYTEA, + Capacity BIGINT, + OutdoorVenue BOOL, + PopularityScore FLOAT8, + Revenue NUMERIC, + LastUpdateTime SPANNER.COMMIT_TIMESTAMP NOT NULL, + PRIMARY KEY (VenueId))""" + ], + ) + operation = spanner_client.database_admin_api.update_database_ddl(request) + + print("Waiting for operation to complete...") + operation.result(OPERATION_TIMEOUT_SECONDS) + + print( + "Created Venues table on database {} on instance {}".format( + database_id, instance_id + ) + ) + # [END spanner_postgresql_create_table_with_datatypes] + + +def insert_datatypes_data(instance_id, database_id): + """Inserts data with supported datatypes into a table. """ + # [START spanner_postgresql_insert_datatypes_data] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + exampleBytes1 = base64.b64encode(u"Hello World 1".encode()) + exampleBytes2 = base64.b64encode(u"Hello World 2".encode()) + exampleBytes3 = base64.b64encode(u"Hello World 3".encode()) + with database.batch() as batch: + batch.insert( + table="Venues", + columns=( + "VenueId", + "VenueName", + "VenueInfo", + "Capacity", + "OutdoorVenue", + "PopularityScore", + "Revenue", + "LastUpdateTime", + ), + values=[ + ( + 4, + "Venue 4", + exampleBytes1, + 1800, + False, + 0.85543, + decimal.Decimal("215100.10"), + spanner.COMMIT_TIMESTAMP, + ), + ( + 19, + "Venue 19", + exampleBytes2, + 6300, + True, + 0.98716, + decimal.Decimal("1200100.00"), + spanner.COMMIT_TIMESTAMP, + ), + ( + 42, + "Venue 42", + exampleBytes3, + 3000, + False, + 0.72598, + decimal.Decimal("390650.99"), + spanner.COMMIT_TIMESTAMP, + ), + ], + ) + + print("Inserted data.") + # [END spanner_postgresql_insert_datatypes_data] + + +def query_data_with_bool(instance_id, database_id): + """Queries sample data using SQL with a BOOL parameter. """ + # [START spanner_postgresql_query_with_bool_parameter] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + exampleBool = True + param = {"p1": exampleBool} + param_type = {"p1": param_types.BOOL} + + with database.snapshot() as snapshot: + results = snapshot.execute_sql( + "SELECT VenueId, VenueName, OutdoorVenue FROM Venues " + "WHERE OutdoorVenue = $1", + params=param, + param_types=param_type, + ) + + for row in results: + print(u"VenueId: {}, VenueName: {}, OutdoorVenue: {}".format(*row)) + # [END spanner_postgresql_query_with_bool_parameter] + + +def query_data_with_bytes(instance_id, database_id): + """Queries sample data using SQL with a BYTES parameter. """ + # [START spanner_postgresql_query_with_bytes_parameter] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + exampleBytes = base64.b64encode(u"Hello World 1".encode()) + param = {"p1": exampleBytes} + param_type = {"p1": param_types.BYTES} + + with database.snapshot() as snapshot: + results = snapshot.execute_sql( + "SELECT VenueId, VenueName FROM Venues " "WHERE VenueInfo = $1", + params=param, + param_types=param_type, + ) + + for row in results: + print(u"VenueId: {}, VenueName: {}".format(*row)) + # [END spanner_postgresql_query_with_bytes_parameter] + + +def query_data_with_float(instance_id, database_id): + """Queries sample data using SQL with a FLOAT8 parameter. """ + # [START spanner_postgresql_query_with_float_parameter] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + exampleFloat = 0.8 + param = {"p1": exampleFloat} + param_type = {"p1": param_types.FLOAT64} + + with database.snapshot() as snapshot: + results = snapshot.execute_sql( + "SELECT VenueId, VenueName, PopularityScore FROM Venues " + "WHERE PopularityScore > $1", + params=param, + param_types=param_type, + ) + + for row in results: + print(u"VenueId: {}, VenueName: {}, PopularityScore: {}".format(*row)) + # [END spanner_postgresql_query_with_float_parameter] + + +def query_data_with_int(instance_id, database_id): + """Queries sample data using SQL with a BIGINT parameter. """ + # [START spanner_postgresql_query_with_int_parameter] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + exampleInt = 3000 + param = {"p1": exampleInt} + param_type = {"p1": param_types.INT64} + + with database.snapshot() as snapshot: + results = snapshot.execute_sql( + "SELECT VenueId, VenueName, Capacity FROM Venues " + "WHERE Capacity >= $1", + params=param, + param_types=param_type, + ) + + for row in results: + print(u"VenueId: {}, VenueName: {}, Capacity: {}".format(*row)) + # [END spanner_postgresql_query_with_int_parameter] + + +def query_data_with_string(instance_id, database_id): + """Queries sample data using SQL with a STRING parameter. """ + # [START spanner_postgresql_query_with_string_parameter] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + exampleString = "Venue 42" + param = {"p1": exampleString} + param_type = {"p1": param_types.STRING} + + with database.snapshot() as snapshot: + results = snapshot.execute_sql( + "SELECT VenueId, VenueName FROM Venues " "WHERE VenueName = $1", + params=param, + param_types=param_type, + ) + + for row in results: + print(u"VenueId: {}, VenueName: {}".format(*row)) + # [END spanner_postgresql_query_with_string_parameter] + + +def query_data_with_timestamp_parameter(instance_id, database_id): + """Queries sample data using SQL with a TIMESTAMPTZ parameter. """ + # [START spanner_postgresql_query_with_timestamp_parameter] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + example_timestamp = datetime.datetime.utcnow().isoformat() + "Z" + # [END spanner_postgresql_query_with_timestamp_parameter] + # Avoid time drift on the local machine. + # https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4197. + example_timestamp = ( + datetime.datetime.utcnow() + datetime.timedelta(days=1) + ).isoformat() + "Z" + # [START spanner_postgresql_query_with_timestamp_parameter] + param = {"p1": example_timestamp} + param_type = {"p1": param_types.TIMESTAMP} + + with database.snapshot() as snapshot: + results = snapshot.execute_sql( + "SELECT VenueId, VenueName, LastUpdateTime FROM Venues " + "WHERE LastUpdateTime < $1", + params=param, + param_types=param_type, + ) + + for row in results: + print(u"VenueId: {}, VenueName: {}, LastUpdateTime: {}".format(*row)) + # [END spanner_postgresql_query_with_timestamp_parameter] + + +def query_data_with_numeric_parameter(instance_id, database_id): + """Queries sample data using SQL with a NUMERIC parameter. """ + # [START spanner_postgresql_query_with_numeric_parameter] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + example_numeric = decimal.Decimal("300000") + param = {"p1": example_numeric} + param_type = {"p1": param_types.NUMERIC} + + with database.snapshot() as snapshot: + results = snapshot.execute_sql( + "SELECT VenueId, Revenue FROM Venues WHERE Revenue < $1", + params=param, + param_types=param_type, + ) + + for row in results: + print(u"VenueId: {}, Revenue: {}".format(*row)) + # [END spanner_postgresql_query_with_numeric_parameter] + + +def create_client_with_query_options(instance_id, database_id): + """Create a client with query options.""" + # [START spanner_postgresql_create_client_with_query_options] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + spanner_client = spanner.Client( + query_options={ + "optimizer_version": "1", + "optimizer_statistics_package": "latest", + } + ) + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + with database.snapshot() as snapshot: + results = snapshot.execute_sql( + "SELECT VenueId, VenueName, LastUpdateTime FROM Venues" + ) + + for row in results: + print(u"VenueId: {}, VenueName: {}, LastUpdateTime: {}".format(*row)) + # [END spanner_postgresql_create_client_with_query_options] + + +def query_data_with_query_options(instance_id, database_id): + """Queries sample data using SQL with query options.""" + # [START spanner_postgresql_query_with_query_options] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + with database.snapshot() as snapshot: + results = snapshot.execute_sql( + "SELECT VenueId, VenueName, LastUpdateTime FROM Venues", + query_options={ + "optimizer_version": "1", + "optimizer_statistics_package": "latest", + }, + ) + + for row in results: + print(u"VenueId: {}, VenueName: {}, LastUpdateTime: {}".format(*row)) + # [END spanner_postgresql_query_with_query_options] + + +if __name__ == "__main__": # noqa: C901 + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter + ) + parser.add_argument("instance_id", help="Your Cloud Spanner instance ID.") + parser.add_argument( + "--database-id", help="Your Cloud Spanner database ID.", default="example_db" + ) + + subparsers = parser.add_subparsers(dest="command") + subparsers.add_parser("create_instance", help=create_instance.__doc__) + subparsers.add_parser("create_database", help=create_database.__doc__) + subparsers.add_parser("insert_data", help=insert_data.__doc__) + subparsers.add_parser("delete_data", help=delete_data.__doc__) + subparsers.add_parser("query_data", help=query_data.__doc__) + subparsers.add_parser("read_data", help=read_data.__doc__) + subparsers.add_parser("read_stale_data", help=read_stale_data.__doc__) + subparsers.add_parser("add_column", help=add_column.__doc__) + subparsers.add_parser("update_data", help=update_data.__doc__) + subparsers.add_parser( + "query_data_with_new_column", help=query_data_with_new_column.__doc__ + ) + subparsers.add_parser("read_write_transaction", help=read_write_transaction.__doc__) + subparsers.add_parser("read_only_transaction", help=read_only_transaction.__doc__) + subparsers.add_parser("add_index", help=add_index.__doc__) + subparsers.add_parser("read_data_with_index", help=insert_data.__doc__) + subparsers.add_parser("add_storing_index", help=add_storing_index.__doc__) + subparsers.add_parser("read_data_with_storing_index", help=insert_data.__doc__) + subparsers.add_parser( + "create_table_with_timestamp", help=create_table_with_timestamp.__doc__ + ) + subparsers.add_parser( + "insert_data_with_timestamp", help=insert_data_with_timestamp.__doc__ + ) + subparsers.add_parser("add_timestamp_column", help=add_timestamp_column.__doc__) + subparsers.add_parser( + "update_data_with_timestamp", help=update_data_with_timestamp.__doc__ + ) + subparsers.add_parser( + "query_data_with_timestamp", help=query_data_with_timestamp.__doc__ + ) + subparsers.add_parser("insert_data_with_dml", help=insert_data_with_dml.__doc__) + subparsers.add_parser("update_data_with_dml", help=update_data_with_dml.__doc__) + subparsers.add_parser("delete_data_with_dml", help=delete_data_with_dml.__doc__) + subparsers.add_parser( + "dml_write_read_transaction", help=dml_write_read_transaction.__doc__ + ) + subparsers.add_parser("insert_with_dml", help=insert_with_dml.__doc__) + subparsers.add_parser( + "query_data_with_parameter", help=query_data_with_parameter.__doc__ + ) + subparsers.add_parser( + "write_with_dml_transaction", help=write_with_dml_transaction.__doc__ + ) + subparsers.add_parser( + "update_data_with_partitioned_dml", + help=update_data_with_partitioned_dml.__doc__, + ) + subparsers.add_parser( + "delete_data_with_partitioned_dml", + help=delete_data_with_partitioned_dml.__doc__, + ) + subparsers.add_parser("update_with_batch_dml", help=update_with_batch_dml.__doc__) + subparsers.add_parser( + "create_table_with_datatypes", help=create_table_with_datatypes.__doc__ + ) + subparsers.add_parser("insert_datatypes_data", help=insert_datatypes_data.__doc__) + subparsers.add_parser("query_data_with_bool", help=query_data_with_bool.__doc__) + subparsers.add_parser("query_data_with_bytes", help=query_data_with_bytes.__doc__) + subparsers.add_parser("query_data_with_float", help=query_data_with_float.__doc__) + subparsers.add_parser("query_data_with_int", help=query_data_with_int.__doc__) + subparsers.add_parser("query_data_with_string", help=query_data_with_string.__doc__) + subparsers.add_parser( + "query_data_with_timestamp_parameter", + help=query_data_with_timestamp_parameter.__doc__, + ) + subparsers.add_parser( + "query_data_with_numeric_parameter", + help=query_data_with_numeric_parameter.__doc__, + ) + subparsers.add_parser( + "query_data_with_query_options", help=query_data_with_query_options.__doc__ + ) + subparsers.add_parser( + "create_client_with_query_options", + help=create_client_with_query_options.__doc__, + ) + + args = parser.parse_args() + + if args.command == "create_instance": + create_instance(args.instance_id) + elif args.command == "create_database": + create_database(args.instance_id, args.database_id) + elif args.command == "insert_data": + insert_data(args.instance_id, args.database_id) + elif args.command == "delete_data": + delete_data(args.instance_id, args.database_id) + elif args.command == "query_data": + query_data(args.instance_id, args.database_id) + elif args.command == "read_data": + read_data(args.instance_id, args.database_id) + elif args.command == "read_stale_data": + read_stale_data(args.instance_id, args.database_id) + elif args.command == "add_column": + add_column(args.instance_id, args.database_id) + elif args.command == "update_data": + update_data(args.instance_id, args.database_id) + elif args.command == "query_data_with_new_column": + query_data_with_new_column(args.instance_id, args.database_id) + elif args.command == "read_write_transaction": + read_write_transaction(args.instance_id, args.database_id) + elif args.command == "read_only_transaction": + read_only_transaction(args.instance_id, args.database_id) + elif args.command == "add_index": + add_index(args.instance_id, args.database_id) + elif args.command == "read_data_with_index": + read_data_with_index(args.instance_id, args.database_id) + elif args.command == "add_storing_index": + add_storing_index(args.instance_id, args.database_id) + elif args.command == "read_data_with_storing_index": + read_data_with_storing_index(args.instance_id, args.database_id) + elif args.command == "create_table_with_timestamp": + create_table_with_timestamp(args.instance_id, args.database_id) + elif args.command == "insert_data_with_timestamp": + insert_data_with_timestamp(args.instance_id, args.database_id) + elif args.command == "add_timestamp_column": + add_timestamp_column(args.instance_id, args.database_id) + elif args.command == "update_data_with_timestamp": + update_data_with_timestamp(args.instance_id, args.database_id) + elif args.command == "query_data_with_timestamp": + query_data_with_timestamp(args.instance_id, args.database_id) + elif args.command == "insert_data_with_dml": + insert_data_with_dml(args.instance_id, args.database_id) + elif args.command == "update_data_with_dml": + update_data_with_dml(args.instance_id, args.database_id) + elif args.command == "delete_data_with_dml": + delete_data_with_dml(args.instance_id, args.database_id) + elif args.command == "dml_write_read_transaction": + dml_write_read_transaction(args.instance_id, args.database_id) + elif args.command == "insert_with_dml": + insert_with_dml(args.instance_id, args.database_id) + elif args.command == "query_data_with_parameter": + query_data_with_parameter(args.instance_id, args.database_id) + elif args.command == "write_with_dml_transaction": + write_with_dml_transaction(args.instance_id, args.database_id) + elif args.command == "update_data_with_partitioned_dml": + update_data_with_partitioned_dml(args.instance_id, args.database_id) + elif args.command == "delete_data_with_partitioned_dml": + delete_data_with_partitioned_dml(args.instance_id, args.database_id) + elif args.command == "update_with_batch_dml": + update_with_batch_dml(args.instance_id, args.database_id) + elif args.command == "create_table_with_datatypes": + create_table_with_datatypes(args.instance_id, args.database_id) + elif args.command == "insert_datatypes_data": + insert_datatypes_data(args.instance_id, args.database_id) + elif args.command == "query_data_with_bool": + query_data_with_bool(args.instance_id, args.database_id) + elif args.command == "query_data_with_bytes": + query_data_with_bytes(args.instance_id, args.database_id) + elif args.command == "query_data_with_date": + query_data_with_date(args.instance_id, args.database_id) + elif args.command == "query_data_with_float": + query_data_with_float(args.instance_id, args.database_id) + elif args.command == "query_data_with_int": + query_data_with_int(args.instance_id, args.database_id) + elif args.command == "query_data_with_string": + query_data_with_string(args.instance_id, args.database_id) + elif args.command == "query_data_with_timestamp_parameter": + query_data_with_timestamp_parameter(args.instance_id, args.database_id) + elif args.command == "query_data_with_query_options": + query_data_with_query_options(args.instance_id, args.database_id) + elif args.command == "create_client_with_query_options": + create_client_with_query_options(args.instance_id, args.database_id) From 2b87cb05856581209b8c8516b26dac028b1d0745 Mon Sep 17 00:00:00 2001 From: Astha Mohta Date: Tue, 4 Oct 2022 13:07:03 +0530 Subject: [PATCH 02/25] linting --- samples/samples/autocommit.py | 7 +- samples/samples/autocommit_test.py | 3 +- samples/samples/backup_sample_test.py | 40 +- samples/samples/batch_sample.py | 4 +- samples/samples/conftest.py | 11 +- samples/samples/noxfile.py | 15 +- samples/samples/pg_snippets.py | 2207 ++++++++++++------------- samples/samples/snippets.py | 201 +-- samples/samples/snippets_test.py | 32 +- 9 files changed, 1280 insertions(+), 1240 deletions(-) diff --git a/samples/samples/autocommit.py b/samples/samples/autocommit.py index d5c44b0c53..873ed2b7bd 100644 --- a/samples/samples/autocommit.py +++ b/samples/samples/autocommit.py @@ -46,11 +46,14 @@ def enable_autocommit_mode(instance_id, database_id): if __name__ == "__main__": parser = argparse.ArgumentParser( - description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter, ) parser.add_argument("instance_id", help="Your Cloud Spanner instance ID.") parser.add_argument( - "--database-id", help="Your Cloud Spanner database ID.", default="example_db", + "--database-id", + help="Your Cloud Spanner database ID.", + default="example_db", ) subparsers = parser.add_subparsers(dest="command") subparsers.add_parser("enable_autocommit_mode", help=enable_autocommit_mode.__doc__) diff --git a/samples/samples/autocommit_test.py b/samples/samples/autocommit_test.py index 6b102da8fe..8150058f1c 100644 --- a/samples/samples/autocommit_test.py +++ b/samples/samples/autocommit_test.py @@ -25,7 +25,8 @@ def test_enable_autocommit_mode(capsys, instance_id, sample_database): op.result() autocommit.enable_autocommit_mode( - instance_id, sample_database.database_id, + instance_id, + sample_database.database_id, ) out, _ = capsys.readouterr() assert "Autocommit mode is enabled." in out diff --git a/samples/samples/backup_sample_test.py b/samples/samples/backup_sample_test.py index da50fbba46..5f094e7a77 100644 --- a/samples/samples/backup_sample_test.py +++ b/samples/samples/backup_sample_test.py @@ -26,12 +26,12 @@ def sample_name(): def unique_database_id(): - """ Creates a unique id for the database. """ + """Creates a unique id for the database.""" return f"test-db-{uuid.uuid4().hex[:10]}" def unique_backup_id(): - """ Creates a unique id for the backup. """ + """Creates a unique id for the backup.""" return f"test-backup-{uuid.uuid4().hex[:10]}" @@ -52,7 +52,10 @@ def test_create_backup(capsys, instance_id, sample_database): version_time = list(results)[0][0] backup_sample.create_backup( - instance_id, sample_database.database_id, BACKUP_ID, version_time, + instance_id, + sample_database.database_id, + BACKUP_ID, + version_time, ) out, _ = capsys.readouterr() assert BACKUP_ID in out @@ -74,10 +77,16 @@ def test_copy_backup(capsys, instance_id, spanner_client): @pytest.mark.dependency(name="create_backup_with_encryption_key") def test_create_backup_with_encryption_key( - capsys, instance_id, sample_database, kms_key_name, + capsys, + instance_id, + sample_database, + kms_key_name, ): backup_sample.create_backup_with_encryption_key( - instance_id, sample_database.database_id, CMEK_BACKUP_ID, kms_key_name, + instance_id, + sample_database.database_id, + CMEK_BACKUP_ID, + kms_key_name, ) out, _ = capsys.readouterr() assert CMEK_BACKUP_ID in out @@ -97,7 +106,10 @@ def test_restore_database(capsys, instance_id, sample_database): @pytest.mark.dependency(depends=["create_backup_with_encryption_key"]) @RetryErrors(exception=DeadlineExceeded, max_tries=2) def test_restore_database_with_encryption_key( - capsys, instance_id, sample_database, kms_key_name, + capsys, + instance_id, + sample_database, + kms_key_name, ): backup_sample.restore_database_with_encryption_key( instance_id, CMEK_RESTORE_DB_ID, CMEK_BACKUP_ID, kms_key_name @@ -123,10 +135,14 @@ def test_list_backup_operations(capsys, instance_id, sample_database): @pytest.mark.dependency(name="list_backup", depends=["create_backup", "copy_backup"]) def test_list_backups( - capsys, instance_id, sample_database, + capsys, + instance_id, + sample_database, ): backup_sample.list_backups( - instance_id, sample_database.database_id, BACKUP_ID, + instance_id, + sample_database.database_id, + BACKUP_ID, ) out, _ = capsys.readouterr() id_count = out.count(BACKUP_ID) @@ -153,7 +169,9 @@ def test_delete_backup(capsys, instance_id): @pytest.mark.dependency(depends=["create_backup"]) def test_cancel_backup(capsys, instance_id, sample_database): backup_sample.cancel_backup( - instance_id, sample_database.database_id, BACKUP_ID, + instance_id, + sample_database.database_id, + BACKUP_ID, ) out, _ = capsys.readouterr() cancel_success = "Backup creation was successfully cancelled." in out @@ -166,7 +184,9 @@ def test_cancel_backup(capsys, instance_id, sample_database): @RetryErrors(exception=DeadlineExceeded, max_tries=2) def test_create_database_with_retention_period(capsys, sample_instance): backup_sample.create_database_with_version_retention_period( - sample_instance.instance_id, RETENTION_DATABASE_ID, RETENTION_PERIOD, + sample_instance.instance_id, + RETENTION_DATABASE_ID, + RETENTION_PERIOD, ) out, _ = capsys.readouterr() assert (RETENTION_DATABASE_ID + " created with ") in out diff --git a/samples/samples/batch_sample.py b/samples/samples/batch_sample.py index 553dc31517..73d9f5667e 100644 --- a/samples/samples/batch_sample.py +++ b/samples/samples/batch_sample.py @@ -57,7 +57,7 @@ def run_batch_query(instance_id, database_id): for future in concurrent.futures.as_completed(futures, timeout=3600): finish, row_ct = future.result() elapsed = finish - start - print(u"Completed {} rows in {} seconds".format(row_ct, elapsed)) + print("Completed {} rows in {} seconds".format(row_ct, elapsed)) # Clean up snapshot.close() @@ -68,7 +68,7 @@ def process(snapshot, partition): print("Started processing partition.") row_ct = 0 for row in snapshot.process_read_batch(partition): - print(u"SingerId: {}, AlbumId: {}, AlbumTitle: {}".format(*row)) + print("SingerId: {}, AlbumId: {}, AlbumTitle: {}".format(*row)) row_ct += 1 return time.time(), row_ct diff --git a/samples/samples/conftest.py b/samples/samples/conftest.py index 314c984920..c745afa151 100644 --- a/samples/samples/conftest.py +++ b/samples/samples/conftest.py @@ -31,7 +31,7 @@ @pytest.fixture(scope="module") def sample_name(): - """ Sample testcase modules must define this fixture. + """Sample testcase modules must define this fixture. The name is used to label the instance created by the sample, to aid in debugging leaked instances. @@ -98,7 +98,11 @@ def multi_region_instance_config(spanner_client): @pytest.fixture(scope="module") def sample_instance( - spanner_client, cleanup_old_instances, instance_id, instance_config, sample_name, + spanner_client, + cleanup_old_instances, + instance_id, + instance_config, + sample_name, ): sample_instance = spanner_client.instance( instance_id, @@ -184,7 +188,8 @@ def database_ddl(): def sample_database(sample_instance, database_id, database_ddl): sample_database = sample_instance.database( - database_id, ddl_statements=database_ddl, + database_id, + ddl_statements=database_ddl, ) if not sample_database.exists(): diff --git a/samples/samples/noxfile.py b/samples/samples/noxfile.py index 0398d72ff6..b053ca568f 100644 --- a/samples/samples/noxfile.py +++ b/samples/samples/noxfile.py @@ -180,6 +180,7 @@ def blacken(session: nox.sessions.Session) -> None: # format = isort + black # + @nox.session def format(session: nox.sessions.Session) -> None: """ @@ -207,7 +208,9 @@ def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: # check for presence of tests - test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob("**/test_*.py", recursive=True) + test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob( + "**/test_*.py", recursive=True + ) test_list.extend(glob.glob("**/tests", recursive=True)) if len(test_list) == 0: @@ -229,9 +232,7 @@ def _session_tests( if os.path.exists("requirements-test.txt"): if os.path.exists("constraints-test.txt"): - session.install( - "-r", "requirements-test.txt", "-c", "constraints-test.txt" - ) + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") else: session.install("-r", "requirements-test.txt") with open("requirements-test.txt") as rtfile: @@ -244,9 +245,9 @@ def _session_tests( post_install(session) if "pytest-parallel" in packages: - concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto']) + concurrent_args.extend(["--workers", "auto", "--tests-per-worker", "auto"]) elif "pytest-xdist" in packages: - concurrent_args.extend(['-n', 'auto']) + concurrent_args.extend(["-n", "auto"]) session.run( "pytest", @@ -276,7 +277,7 @@ def py(session: nox.sessions.Session) -> None: def _get_repo_root() -> Optional[str]: - """ Returns the root folder of the project. """ + """Returns the root folder of the project.""" # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) for i in range(10): diff --git a/samples/samples/pg_snippets.py b/samples/samples/pg_snippets.py index 2872d80085..43b19b022f 100644 --- a/samples/samples/pg_snippets.py +++ b/samples/samples/pg_snippets.py @@ -38,31 +38,31 @@ # [START spanner_postgresql_create_instance] def create_instance(instance_id): - """Creates an instance.""" - spanner_client = spanner.Client() + """Creates an instance.""" + spanner_client = spanner.Client() - config_name = "{}/instanceConfigs/regional-us-central1".format( - spanner_client.project_name - ) + config_name = "{}/instanceConfigs/regional-us-central1".format( + spanner_client.project_name + ) - instance = spanner_client.instance( - instance_id, - configuration_name=config_name, - display_name="This is a display name.", - node_count=1, - labels={ - "cloud_spanner_samples": "true", - "sample_name": "snippets-create_instance-explicit", - "created": str(int(time.time())), - }, - ) + instance = spanner_client.instance( + instance_id, + configuration_name=config_name, + display_name="This is a display name.", + node_count=1, + labels={ + "cloud_spanner_samples": "true", + "sample_name": "snippets-create_instance-explicit", + "created": str(int(time.time())), + }, + ) - operation = instance.create() + operation = instance.create() - print("Waiting for operation to complete...") - operation.result(OPERATION_TIMEOUT_SECONDS) + print("Waiting for operation to complete...") + operation.result(OPERATION_TIMEOUT_SECONDS) - print("Created instance {}".format(instance_id)) + print("Created instance {}".format(instance_id)) # [END spanner_postgresql_create_instance] @@ -70,87 +70,88 @@ def create_instance(instance_id): # [START spanner_postgresql_create_database] def create_database(instance_id, database_id): - """Creates a PostgreSql database and tables for sample data.""" - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) + """Creates a PostgreSql database and tables for sample data.""" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) - database = instance.database( - database_id, - database_dialect=DatabaseDialect.POSTGRESQL, - ) + database = instance.database( + database_id, + database_dialect=DatabaseDialect.POSTGRESQL, + ) - operation = database.create() + operation = database.create() - print("Waiting for operation to complete...") - operation.result(OPERATION_TIMEOUT_SECONDS) + print("Waiting for operation to complete...") + operation.result(OPERATION_TIMEOUT_SECONDS) - create_table_using_ddl(database.name) - print("Created database {} on instance {}".format(database_id, instance_id)) + create_table_using_ddl(database.name) + print("Created database {} on instance {}".format(database_id, instance_id)) def create_table_using_ddl(database_name): - spanner_client = spanner.Client() - request = spanner_admin_database_v1.UpdateDatabaseDdlRequest( - database=database_name, - statements=[ - """CREATE TABLE Singers ( + spanner_client = spanner.Client() + request = spanner_admin_database_v1.UpdateDatabaseDdlRequest( + database=database_name, + statements=[ + """CREATE TABLE Singers ( SingerId bigint NOT NULL, FirstName character varying(1024), LastName character varying(1024), SingerInfo bytea, PRIMARY KEY (SingerId) )""", - """CREATE TABLE Albums ( + """CREATE TABLE Albums ( SingerId bigint NOT NULL, AlbumId bigint NOT NULL, AlbumTitle character varying(1024), PRIMARY KEY (SingerId, AlbumId) ) INTERLEAVE IN PARENT Singers ON DELETE CASCADE""", - ], - ) - operation = spanner_client.database_admin_api.update_database_ddl(request) - operation.result(OPERATION_TIMEOUT_SECONDS) + ], + ) + operation = spanner_client.database_admin_api.update_database_ddl(request) + operation.result(OPERATION_TIMEOUT_SECONDS) + # [END spanner_postgresql_create_database] # [START spanner_postgresql_insert_data] def insert_data(instance_id, database_id): - """Inserts sample data into the given database. - - The database and table must already exist and can be created using - `create_database`. - """ - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) - - with database.batch() as batch: - batch.insert( - table="Singers", - columns=("SingerId", "FirstName", "LastName"), - values=[ - (1, u"Marc", u"Richards"), - (2, u"Catalina", u"Smith"), - (3, u"Alice", u"Trentor"), - (4, u"Lea", u"Martin"), - (5, u"David", u"Lomond"), - ], - ) + """Inserts sample data into the given database. + + The database and table must already exist and can be created using + `create_database`. + """ + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + with database.batch() as batch: + batch.insert( + table="Singers", + columns=("SingerId", "FirstName", "LastName"), + values=[ + (1, "Marc", "Richards"), + (2, "Catalina", "Smith"), + (3, "Alice", "Trentor"), + (4, "Lea", "Martin"), + (5, "David", "Lomond"), + ], + ) - batch.insert( - table="Albums", - columns=("SingerId", "AlbumId", "AlbumTitle"), - values=[ - (1, 1, u"Total Junk"), - (1, 2, u"Go, Go, Go"), - (2, 1, u"Green"), - (2, 2, u"Forever Hold Your Peace"), - (2, 3, u"Terrified"), - ], - ) + batch.insert( + table="Albums", + columns=("SingerId", "AlbumId", "AlbumTitle"), + values=[ + (1, 1, "Total Junk"), + (1, 2, "Go, Go, Go"), + (2, 1, "Green"), + (2, 2, "Forever Hold Your Peace"), + (2, 3, "Terrified"), + ], + ) - print("Inserted data.") + print("Inserted data.") # [END spanner_postgresql_insert_data] @@ -158,32 +159,32 @@ def insert_data(instance_id, database_id): # [START spanner_postgresql_delete_data] def delete_data(instance_id, database_id): - """Deletes sample data from the given database. + """Deletes sample data from the given database. - The database, table, and data must already exist and can be created using - `create_database` and `insert_data`. - """ - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) + The database, table, and data must already exist and can be created using + `create_database` and `insert_data`. + """ + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) - # Delete individual rows - albums_to_delete = spanner.KeySet(keys=[[2, 1], [2, 3]]) + # Delete individual rows + albums_to_delete = spanner.KeySet(keys=[[2, 1], [2, 3]]) - # Delete a range of rows where the column key is >=3 and <5 - singers_range = spanner.KeyRange(start_closed=[3], end_open=[5]) - singers_to_delete = spanner.KeySet(ranges=[singers_range]) + # Delete a range of rows where the column key is >=3 and <5 + singers_range = spanner.KeyRange(start_closed=[3], end_open=[5]) + singers_to_delete = spanner.KeySet(ranges=[singers_range]) - # Delete remaining Singers rows, which will also delete the remaining - # Albums rows because Albums was defined with ON DELETE CASCADE - remaining_singers = spanner.KeySet(all_=True) + # Delete remaining Singers rows, which will also delete the remaining + # Albums rows because Albums was defined with ON DELETE CASCADE + remaining_singers = spanner.KeySet(all_=True) - with database.batch() as batch: - batch.delete("Albums", albums_to_delete) - batch.delete("Singers", singers_to_delete) - batch.delete("Singers", remaining_singers) + with database.batch() as batch: + batch.delete("Albums", albums_to_delete) + batch.delete("Singers", singers_to_delete) + batch.delete("Singers", remaining_singers) - print("Deleted data.") + print("Deleted data.") # [END spanner_postgresql_delete_data] @@ -191,18 +192,18 @@ def delete_data(instance_id, database_id): # [START spanner_postgresql_query_data] def query_data(instance_id, database_id): - """Queries sample data from the database using SQL.""" - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) - - with database.snapshot() as snapshot: - results = snapshot.execute_sql( - "SELECT SingerId, AlbumId, AlbumTitle FROM Albums" - ) + """Queries sample data from the database using SQL.""" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + with database.snapshot() as snapshot: + results = snapshot.execute_sql( + "SELECT SingerId, AlbumId, AlbumTitle FROM Albums" + ) - for row in results: - print(u"SingerId: {}, AlbumId: {}, AlbumTitle: {}".format(*row)) + for row in results: + print("SingerId: {}, AlbumId: {}, AlbumTitle: {}".format(*row)) # [END spanner_postgresql_query_data] @@ -210,19 +211,19 @@ def query_data(instance_id, database_id): # [START spanner_postgresql_read_data] def read_data(instance_id, database_id): - """Reads sample data from the database.""" - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) - - with database.snapshot() as snapshot: - keyset = spanner.KeySet(all_=True) - results = snapshot.read( - table="Albums", columns=("SingerId", "AlbumId", "AlbumTitle"), keyset=keyset - ) + """Reads sample data from the database.""" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + with database.snapshot() as snapshot: + keyset = spanner.KeySet(all_=True) + results = snapshot.read( + table="Albums", columns=("SingerId", "AlbumId", "AlbumTitle"), keyset=keyset + ) - for row in results: - print(u"SingerId: {}, AlbumId: {}, AlbumTitle: {}".format(*row)) + for row in results: + print("SingerId: {}, AlbumId: {}, AlbumTitle: {}".format(*row)) # [END spanner_postgresql_read_data] @@ -230,19 +231,19 @@ def read_data(instance_id, database_id): # [START spanner_postgresql_add_column] def add_column(instance_id, database_id): - """Adds a new column to the Albums table in the example database.""" - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) + """Adds a new column to the Albums table in the example database.""" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) - operation = database.update_ddl( - ["ALTER TABLE Albums ADD COLUMN MarketingBudget BIGINT"] - ) + operation = database.update_ddl( + ["ALTER TABLE Albums ADD COLUMN MarketingBudget BIGINT"] + ) - print("Waiting for operation to complete...") - operation.result(OPERATION_TIMEOUT_SECONDS) + print("Waiting for operation to complete...") + operation.result(OPERATION_TIMEOUT_SECONDS) - print("Added the MarketingBudget column.") + print("Added the MarketingBudget column.") # [END spanner_postgresql_add_column] @@ -250,27 +251,27 @@ def add_column(instance_id, database_id): # [START spanner_postgresql_update_data] def update_data(instance_id, database_id): - """Updates sample data in the database. + """Updates sample data in the database. - This updates the `MarketingBudget` column which must be created before - running this sample. You can add the column by running the `add_column` - sample or by running this DDL statement against your database: + This updates the `MarketingBudget` column which must be created before + running this sample. You can add the column by running the `add_column` + sample or by running this DDL statement against your database: - ALTER TABLE Albums ADD COLUMN MarketingBudget INT64 + ALTER TABLE Albums ADD COLUMN MarketingBudget INT64 - """ - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) + """ + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) - with database.batch() as batch: - batch.update( - table="Albums", - columns=("SingerId", "AlbumId", "MarketingBudget"), - values=[(1, 1, 100000), (2, 2, 500000)], - ) + with database.batch() as batch: + batch.update( + table="Albums", + columns=("SingerId", "AlbumId", "MarketingBudget"), + values=[(1, 1, 100000), (2, 2, 500000)], + ) - print("Updated data.") + print("Updated data.") # [END spanner_postgresql_update_data] @@ -278,68 +279,68 @@ def update_data(instance_id, database_id): # [START spanner_postgresql_read_write_transaction] def read_write_transaction(instance_id, database_id): - """Performs a read-write transaction to update two sample records in the - database. - - This will transfer 200,000 from the `MarketingBudget` field for the second - Album to the first Album. If the `MarketingBudget` is too low, it will - raise an exception. - - Before running this sample, you will need to run the `update_data` sample - to populate the fields. - """ - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) - - def update_albums(transaction): - # Read the second album budget. - second_album_keyset = spanner.KeySet(keys=[(2, 2)]) - second_album_result = transaction.read( - table="Albums", - columns=("MarketingBudget",), - keyset=second_album_keyset, - limit=1, - ) - second_album_row = list(second_album_result)[0] - second_album_budget = second_album_row[0] - - transfer_amount = 200000 - - if second_album_budget < transfer_amount: - # Raising an exception will automatically roll back the - # transaction. - raise ValueError("The second album doesn't have enough funds to transfer") - - # Read the first album's budget. - first_album_keyset = spanner.KeySet(keys=[(1, 1)]) - first_album_result = transaction.read( - table="Albums", - columns=("MarketingBudget",), - keyset=first_album_keyset, - limit=1, - ) - first_album_row = list(first_album_result)[0] - first_album_budget = first_album_row[0] - - # Update the budgets. - second_album_budget -= transfer_amount - first_album_budget += transfer_amount - print( - "Setting first album's budget to {} and the second album's " - "budget to {}.".format(first_album_budget, second_album_budget) - ) + """Performs a read-write transaction to update two sample records in the + database. + + This will transfer 200,000 from the `MarketingBudget` field for the second + Album to the first Album. If the `MarketingBudget` is too low, it will + raise an exception. + + Before running this sample, you will need to run the `update_data` sample + to populate the fields. + """ + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + def update_albums(transaction): + # Read the second album budget. + second_album_keyset = spanner.KeySet(keys=[(2, 2)]) + second_album_result = transaction.read( + table="Albums", + columns=("MarketingBudget",), + keyset=second_album_keyset, + limit=1, + ) + second_album_row = list(second_album_result)[0] + second_album_budget = second_album_row[0] + + transfer_amount = 200000 + + if second_album_budget < transfer_amount: + # Raising an exception will automatically roll back the + # transaction. + raise ValueError("The second album doesn't have enough funds to transfer") + + # Read the first album's budget. + first_album_keyset = spanner.KeySet(keys=[(1, 1)]) + first_album_result = transaction.read( + table="Albums", + columns=("MarketingBudget",), + keyset=first_album_keyset, + limit=1, + ) + first_album_row = list(first_album_result)[0] + first_album_budget = first_album_row[0] + + # Update the budgets. + second_album_budget -= transfer_amount + first_album_budget += transfer_amount + print( + "Setting first album's budget to {} and the second album's " + "budget to {}.".format(first_album_budget, second_album_budget) + ) - # Update the rows. - transaction.update( - table="Albums", - columns=("SingerId", "AlbumId", "MarketingBudget"), - values=[(1, 1, first_album_budget), (2, 2, second_album_budget)], - ) + # Update the rows. + transaction.update( + table="Albums", + columns=("SingerId", "AlbumId", "MarketingBudget"), + values=[(1, 1, first_album_budget), (2, 2, second_album_budget)], + ) - database.run_in_transaction(update_albums) + database.run_in_transaction(update_albums) - print("Transaction complete.") + print("Transaction complete.") # [END spanner_postgresql_read_write_transaction] @@ -347,25 +348,25 @@ def update_albums(transaction): # [START spanner_postgresql_query_data_with_new_column] def query_data_with_new_column(instance_id, database_id): - """Queries sample data from the database using SQL. + """Queries sample data from the database using SQL. - This sample uses the `MarketingBudget` column. You can add the column - by running the `add_column` sample or by running this DDL statement against - your database: + This sample uses the `MarketingBudget` column. You can add the column + by running the `add_column` sample or by running this DDL statement against + your database: - ALTER TABLE Albums ADD COLUMN MarketingBudget INT64 - """ - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) + ALTER TABLE Albums ADD COLUMN MarketingBudget INT64 + """ + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) - with database.snapshot() as snapshot: - results = snapshot.execute_sql( - "SELECT SingerId, AlbumId, MarketingBudget FROM Albums" - ) + with database.snapshot() as snapshot: + results = snapshot.execute_sql( + "SELECT SingerId, AlbumId, MarketingBudget FROM Albums" + ) - for row in results: - print(u"SingerId: {}, AlbumId: {}, MarketingBudget: {}".format(*row)) + for row in results: + print("SingerId: {}, AlbumId: {}, MarketingBudget: {}".format(*row)) # [END spanner_postgresql_query_data_with_new_column] @@ -373,49 +374,49 @@ def query_data_with_new_column(instance_id, database_id): # [START spanner_postgresql_create_index] def add_index(instance_id, database_id): - """Adds a simple index to the example database.""" - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) + """Adds a simple index to the example database.""" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) - operation = database.update_ddl( - ["CREATE INDEX AlbumsByAlbumTitle ON Albums(AlbumTitle)"] - ) + operation = database.update_ddl( + ["CREATE INDEX AlbumsByAlbumTitle ON Albums(AlbumTitle)"] + ) - print("Waiting for operation to complete...") - operation.result(OPERATION_TIMEOUT_SECONDS) + print("Waiting for operation to complete...") + operation.result(OPERATION_TIMEOUT_SECONDS) - print("Added the AlbumsByAlbumTitle index.") + print("Added the AlbumsByAlbumTitle index.") # [END spanner_postgresql_create_index] # [START spanner_postgresql_read_data_with_index] def read_data_with_index(instance_id, database_id): - """Reads sample data from the database using an index. - - The index must exist before running this sample. You can add the index - by running the `add_index` sample or by running this DDL statement against - your database: - - CREATE INDEX AlbumsByAlbumTitle ON Albums(AlbumTitle) - - """ - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) - - with database.snapshot() as snapshot: - keyset = spanner.KeySet(all_=True) - results = snapshot.read( - table="Albums", - columns=("AlbumId", "AlbumTitle"), - keyset=keyset, - index="AlbumsByAlbumTitle", - ) + """Reads sample data from the database using an index. + + The index must exist before running this sample. You can add the index + by running the `add_index` sample or by running this DDL statement against + your database: + + CREATE INDEX AlbumsByAlbumTitle ON Albums(AlbumTitle) + + """ + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + with database.snapshot() as snapshot: + keyset = spanner.KeySet(all_=True) + results = snapshot.read( + table="Albums", + columns=("AlbumId", "AlbumTitle"), + keyset=keyset, + index="AlbumsByAlbumTitle", + ) - for row in results: - print("AlbumId: {}, AlbumTitle: {}".format(*row)) + for row in results: + print("AlbumId: {}, AlbumTitle: {}".format(*row)) # [END spanner_postgresql_read_data_with_index] @@ -423,22 +424,22 @@ def read_data_with_index(instance_id, database_id): # [START spanner_postgresql_create_storing_index] def add_storing_index(instance_id, database_id): - """Adds an storing index to the example database.""" - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) - - operation = database.update_ddl( - [ - "CREATE INDEX AlbumsByAlbumTitle2 ON Albums(AlbumTitle)" - "INCLUDE (MarketingBudget)" - ] - ) + """Adds an storing index to the example database.""" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + operation = database.update_ddl( + [ + "CREATE INDEX AlbumsByAlbumTitle2 ON Albums(AlbumTitle)" + "INCLUDE (MarketingBudget)" + ] + ) - print("Waiting for operation to complete...") - operation.result(OPERATION_TIMEOUT_SECONDS) + print("Waiting for operation to complete...") + operation.result(OPERATION_TIMEOUT_SECONDS) - print("Added the AlbumsByAlbumTitle2 index.") + print("Added the AlbumsByAlbumTitle2 index.") # [END spanner_postgresql_create_storing_index] @@ -446,32 +447,32 @@ def add_storing_index(instance_id, database_id): # [START spanner_postgresql_read_data_with_storing_index] def read_data_with_storing_index(instance_id, database_id): - """Reads sample data from the database using an index with a storing - clause. - - The index must exist before running this sample. You can add the index - by running the `add_scoring_index` sample or by running this DDL statement - against your database: - - CREATE INDEX AlbumsByAlbumTitle2 ON Albums(AlbumTitle) - INCLUDE (MarketingBudget) - - """ - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) - - with database.snapshot() as snapshot: - keyset = spanner.KeySet(all_=True) - results = snapshot.read( - table="Albums", - columns=("AlbumId", "AlbumTitle", "MarketingBudget"), - keyset=keyset, - index="AlbumsByAlbumTitle2", - ) + """Reads sample data from the database using an index with a storing + clause. + + The index must exist before running this sample. You can add the index + by running the `add_scoring_index` sample or by running this DDL statement + against your database: + + CREATE INDEX AlbumsByAlbumTitle2 ON Albums(AlbumTitle) + INCLUDE (MarketingBudget) + + """ + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + with database.snapshot() as snapshot: + keyset = spanner.KeySet(all_=True) + results = snapshot.read( + table="Albums", + columns=("AlbumId", "AlbumTitle", "MarketingBudget"), + keyset=keyset, + index="AlbumsByAlbumTitle2", + ) - for row in results: - print(u"AlbumId: {}, AlbumTitle: {}, " "MarketingBudget: {}".format(*row)) + for row in results: + print("AlbumId: {}, AlbumTitle: {}, " "MarketingBudget: {}".format(*row)) # [END spanner_postgresql_read_data_with_storing_index] @@ -479,171 +480,169 @@ def read_data_with_storing_index(instance_id, database_id): # [START spanner_postgresql_read_only_transaction] def read_only_transaction(instance_id, database_id): - """Reads data inside of a read-only transaction. - - Within the read-only transaction, or "snapshot", the application sees - consistent view of the database at a particular timestamp. - """ - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) - - with database.snapshot(multi_use=True) as snapshot: - # Read using SQL. - results = snapshot.execute_sql( - "SELECT SingerId, AlbumId, AlbumTitle FROM Albums" - ) + """Reads data inside of a read-only transaction. + + Within the read-only transaction, or "snapshot", the application sees + consistent view of the database at a particular timestamp. + """ + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + with database.snapshot(multi_use=True) as snapshot: + # Read using SQL. + results = snapshot.execute_sql( + "SELECT SingerId, AlbumId, AlbumTitle FROM Albums" + ) - print("Results from first read:") - for row in results: - print(u"SingerId: {}, AlbumId: {}, AlbumTitle: {}".format(*row)) - - # Perform another read using the `read` method. Even if the data - # is updated in-between the reads, the snapshot ensures that both - # return the same data. - keyset = spanner.KeySet(all_=True) - results = snapshot.read( - table="Albums", columns=("SingerId", "AlbumId", "AlbumTitle"), keyset=keyset - ) + print("Results from first read:") + for row in results: + print("SingerId: {}, AlbumId: {}, AlbumTitle: {}".format(*row)) - print("Results from second read:") - for row in results: - print(u"SingerId: {}, AlbumId: {}, AlbumTitle: {}".format(*row)) + # Perform another read using the `read` method. Even if the data + # is updated in-between the reads, the snapshot ensures that both + # return the same data. + keyset = spanner.KeySet(all_=True) + results = snapshot.read( + table="Albums", columns=("SingerId", "AlbumId", "AlbumTitle"), keyset=keyset + ) + + print("Results from second read:") + for row in results: + print("SingerId: {}, AlbumId: {}, AlbumTitle: {}".format(*row)) # [END spanner_postgresql_read_only_transaction] def insert_with_dml(instance_id, database_id): - """Inserts data with a DML statement into the database. """ - # [START spanner_postgresql_dml_getting_started_insert] - # instance_id = "your-spanner-instance" - # database_id = "your-spanner-db-id" - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) - - def insert_singers(transaction): - row_ct = transaction.execute_update( - "INSERT INTO Singers (SingerId, FirstName, LastName) VALUES " - "(12, 'Melissa', 'Garcia'), " - "(13, 'Russell', 'Morales'), " - "(14, 'Jacqueline', 'Long'), " - "(15, 'Dylan', 'Shaw')" - ) - print("{} record(s) inserted.".format(row_ct)) + """Inserts data with a DML statement into the database.""" + # [START spanner_postgresql_dml_getting_started_insert] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + def insert_singers(transaction): + row_ct = transaction.execute_update( + "INSERT INTO Singers (SingerId, FirstName, LastName) VALUES " + "(12, 'Melissa', 'Garcia'), " + "(13, 'Russell', 'Morales'), " + "(14, 'Jacqueline', 'Long'), " + "(15, 'Dylan', 'Shaw')" + ) + print("{} record(s) inserted.".format(row_ct)) - database.run_in_transaction(insert_singers) - # [END spanner_postgresql_dml_getting_started_insert] + database.run_in_transaction(insert_singers) + # [END spanner_postgresql_dml_getting_started_insert] def query_data_with_parameter(instance_id, database_id): - """Queries sample data from the database using SQL with a parameter.""" - # [START spanner_postgresql_query_with_parameter] - # instance_id = "your-spanner-instance" - # database_id = "your-spanner-db-id" - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) - - with database.snapshot() as snapshot: - results = snapshot.execute_sql( - "SELECT SingerId, FirstName, LastName FROM Singers " - "WHERE LastName = $1", - params={"p1": "Garcia"}, - param_types={"p1": spanner.param_types.STRING}, - ) - - for row in results: - print(u"SingerId: {}, FirstName: {}, LastName: {}".format(*row)) - # [END spanner_postgresql_query_with_parameter] + """Queries sample data from the database using SQL with a parameter.""" + # [START spanner_postgresql_query_with_parameter] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + with database.snapshot() as snapshot: + results = snapshot.execute_sql( + "SELECT SingerId, FirstName, LastName FROM Singers " "WHERE LastName = $1", + params={"p1": "Garcia"}, + param_types={"p1": spanner.param_types.STRING}, + ) + for row in results: + print("SingerId: {}, FirstName: {}, LastName: {}".format(*row)) + # [END spanner_postgresql_query_with_parameter] def write_with_dml_transaction(instance_id, database_id): - """ Transfers part of a marketing budget from one album to another. """ - # [START spanner_postgresql_dml_getting_started_update] - # instance_id = "your-spanner-instance" - # database_id = "your-spanner-db-id" - - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) - - def transfer_budget(transaction): - # Transfer marketing budget from one album to another. Performed in a - # single transaction to ensure that the transfer is atomic. - second_album_result = transaction.execute_sql( - "SELECT MarketingBudget from Albums " "WHERE SingerId = 2 and AlbumId = 2" - ) - second_album_row = list(second_album_result)[0] - second_album_budget = second_album_row[0] - - transfer_amount = 200000 - - # Transaction will only be committed if this condition still holds at - # the time of commit. Otherwise it will be aborted and the callable - # will be rerun by the client library - if second_album_budget >= transfer_amount: - first_album_result = transaction.execute_sql( - "SELECT MarketingBudget from Albums " - "WHERE SingerId = 1 and AlbumId = 1" - ) - first_album_row = list(first_album_result)[0] - first_album_budget = first_album_row[0] - - second_album_budget -= transfer_amount - first_album_budget += transfer_amount - - # Update first album - transaction.execute_update( - "UPDATE Albums " - "SET MarketingBudget = $1 " - "WHERE SingerId = 1 and AlbumId = 1", - params={"p1": first_album_budget}, - param_types={"p1": spanner.param_types.INT64}, - ) - - # Update second album - transaction.execute_update( - "UPDATE Albums " - "SET MarketingBudget = $1 " - "WHERE SingerId = 2 and AlbumId = 2", - params={"p1": second_album_budget}, - param_types={"p1": spanner.param_types.INT64}, - ) - - print( - "Transferred {} from Album2's budget to Album1's".format( - transfer_amount + """Transfers part of a marketing budget from one album to another.""" + # [START spanner_postgresql_dml_getting_started_update] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + def transfer_budget(transaction): + # Transfer marketing budget from one album to another. Performed in a + # single transaction to ensure that the transfer is atomic. + second_album_result = transaction.execute_sql( + "SELECT MarketingBudget from Albums " "WHERE SingerId = 2 and AlbumId = 2" ) - ) - - database.run_in_transaction(transfer_budget) - # [END spanner_postgresql_dml_getting_started_update] + second_album_row = list(second_album_result)[0] + second_album_budget = second_album_row[0] + + transfer_amount = 200000 + + # Transaction will only be committed if this condition still holds at + # the time of commit. Otherwise it will be aborted and the callable + # will be rerun by the client library + if second_album_budget >= transfer_amount: + first_album_result = transaction.execute_sql( + "SELECT MarketingBudget from Albums " + "WHERE SingerId = 1 and AlbumId = 1" + ) + first_album_row = list(first_album_result)[0] + first_album_budget = first_album_row[0] + + second_album_budget -= transfer_amount + first_album_budget += transfer_amount + + # Update first album + transaction.execute_update( + "UPDATE Albums " + "SET MarketingBudget = $1 " + "WHERE SingerId = 1 and AlbumId = 1", + params={"p1": first_album_budget}, + param_types={"p1": spanner.param_types.INT64}, + ) + + # Update second album + transaction.execute_update( + "UPDATE Albums " + "SET MarketingBudget = $1 " + "WHERE SingerId = 2 and AlbumId = 2", + params={"p1": second_album_budget}, + param_types={"p1": spanner.param_types.INT64}, + ) + + print( + "Transferred {} from Album2's budget to Album1's".format( + transfer_amount + ) + ) + + database.run_in_transaction(transfer_budget) + # [END spanner_postgresql_dml_getting_started_update] # [START spanner_postgresql_read_stale_data] def read_stale_data(instance_id, database_id): - """Reads sample data from the database. The data is exactly 15 seconds - stale.""" - import datetime - - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) - staleness = datetime.timedelta(seconds=15) - - with database.snapshot(exact_staleness=staleness) as snapshot: - keyset = spanner.KeySet(all_=True) - results = snapshot.read( - table="Albums", - columns=("SingerId", "AlbumId", "MarketingBudget"), - keyset=keyset, - ) + """Reads sample data from the database. The data is exactly 15 seconds + stale.""" + import datetime + + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + staleness = datetime.timedelta(seconds=15) + + with database.snapshot(exact_staleness=staleness) as snapshot: + keyset = spanner.KeySet(all_=True) + results = snapshot.read( + table="Albums", + columns=("SingerId", "AlbumId", "MarketingBudget"), + keyset=keyset, + ) - for row in results: - print(u"SingerId: {}, AlbumId: {}, MarketingBudget: {}".format(*row)) + for row in results: + print("SingerId: {}, AlbumId: {}, MarketingBudget: {}".format(*row)) # [END spanner_postgresql_read_stale_data] @@ -651,36 +650,36 @@ def read_stale_data(instance_id, database_id): # [START spanner_postgresql_update_data_with_timestamp_column] def update_data_with_timestamp(instance_id, database_id): - """Updates Performances tables in the database with the COMMIT_TIMESTAMP - column. + """Updates Performances tables in the database with the COMMIT_TIMESTAMP + column. - This updates the `MarketingBudget` column which must be created before - running this sample. You can add the column by running the `add_column` - sample or by running this DDL statement against your database: + This updates the `MarketingBudget` column which must be created before + running this sample. You can add the column by running the `add_column` + sample or by running this DDL statement against your database: - ALTER TABLE Albums ADD COLUMN MarketingBudget BIGINT + ALTER TABLE Albums ADD COLUMN MarketingBudget BIGINT - In addition this update expects the LastUpdateTime column added by - applying this DDL statement against your database: + In addition this update expects the LastUpdateTime column added by + applying this DDL statement against your database: - ALTER TABLE Albums ADD COLUMN LastUpdateTime SPANNER.COMMIT_TIMESTAMP - """ - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) + ALTER TABLE Albums ADD COLUMN LastUpdateTime SPANNER.COMMIT_TIMESTAMP + """ + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) - database = instance.database(database_id) + database = instance.database(database_id) - with database.batch() as batch: - batch.update( - table="Albums", - columns=("SingerId", "AlbumId", "MarketingBudget", "LastUpdateTime"), - values=[ - (1, 1, 1000000, spanner.COMMIT_TIMESTAMP), - (2, 2, 750000, spanner.COMMIT_TIMESTAMP), - ], - ) + with database.batch() as batch: + batch.update( + table="Albums", + columns=("SingerId", "AlbumId", "MarketingBudget", "LastUpdateTime"), + values=[ + (1, 1, 1000000, spanner.COMMIT_TIMESTAMP), + (2, 2, 750000, spanner.COMMIT_TIMESTAMP), + ], + ) - print("Updated data.") + print("Updated data.") # [END spanner_postgresql_update_data_with_timestamp_column] @@ -688,28 +687,24 @@ def update_data_with_timestamp(instance_id, database_id): # [START spanner_postgresql_add_timestamp_column] def add_timestamp_column(instance_id, database_id): - """ Adds a new TIMESTAMP column to the Albums table in the example database. - """ - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - - database = instance.database(database_id) - - operation = database.update_ddl( - [ - "ALTER TABLE Albums ADD COLUMN LastUpdateTime " - "SPANNER.COMMIT_TIMESTAMP" - ] - ) - - print("Waiting for operation to complete...") - operation.result(OPERATION_TIMEOUT_SECONDS) - - print( - 'Altered table "Albums" on database {} on instance {}.'.format( - database_id, instance_id + """Adds a new TIMESTAMP column to the Albums table in the example database.""" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + + database = instance.database(database_id) + + operation = database.update_ddl( + ["ALTER TABLE Albums ADD COLUMN LastUpdateTime " "SPANNER.COMMIT_TIMESTAMP"] + ) + + print("Waiting for operation to complete...") + operation.result(OPERATION_TIMEOUT_SECONDS) + + print( + 'Altered table "Albums" on database {} on instance {}.'.format( + database_id, instance_id + ) ) - ) # [END spanner_postgresql_add_timestamp_column] @@ -717,30 +712,30 @@ def add_timestamp_column(instance_id, database_id): # [START spanner_postgresql_query_data_with_timestamp_column] def query_data_with_timestamp(instance_id, database_id): - """Queries sample data from the database using SQL. + """Queries sample data from the database using SQL. - This updates the `LastUpdateTime` column which must be created before - running this sample. You can add the column by running the - `add_timestamp_column` sample or by running this DDL statement - against your database: + This updates the `LastUpdateTime` column which must be created before + running this sample. You can add the column by running the + `add_timestamp_column` sample or by running this DDL statement + against your database: - ALTER TABLE Performances ADD COLUMN LastUpdateTime TIMESTAMP - OPTIONS (allow_commit_timestamp=true) + ALTER TABLE Performances ADD COLUMN LastUpdateTime TIMESTAMP + OPTIONS (allow_commit_timestamp=true) - """ - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) + """ + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) - database = instance.database(database_id) + database = instance.database(database_id) - with database.snapshot() as snapshot: - results = snapshot.execute_sql( - "SELECT SingerId, AlbumId, MarketingBudget FROM Albums " - "ORDER BY LastUpdateTime DESC" - ) + with database.snapshot() as snapshot: + results = snapshot.execute_sql( + "SELECT SingerId, AlbumId, MarketingBudget FROM Albums " + "ORDER BY LastUpdateTime DESC" + ) - for row in results: - print(u"SingerId: {}, AlbumId: {}, MarketingBudget: {}".format(*row)) + for row in results: + print("SingerId: {}, AlbumId: {}, MarketingBudget: {}".format(*row)) # [END spanner_postgresql_query_data_with_timestamp_column] @@ -748,17 +743,16 @@ def query_data_with_timestamp(instance_id, database_id): # [START spanner_postgresql_create_table_with_timestamp_column] def create_table_with_timestamp(instance_id, database_id): - """Creates a table with a COMMIT_TIMESTAMP column.""" + """Creates a table with a COMMIT_TIMESTAMP column.""" - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) - request = spanner_admin_database_v1.UpdateDatabaseDdlRequest( - database=database.name, - statements= - [ - """CREATE TABLE Performances ( + request = spanner_admin_database_v1.UpdateDatabaseDdlRequest( + database=database.name, + statements=[ + """CREATE TABLE Performances ( SingerId BIGINT NOT NULL, VenueId BIGINT NOT NULL, EventDate Date, @@ -766,18 +760,18 @@ def create_table_with_timestamp(instance_id, database_id): LastUpdateTime SPANNER.COMMIT_TIMESTAMP NOT NULL, PRIMARY KEY (SingerId, VenueId, EventDate)) INTERLEAVE IN PARENT Singers ON DELETE CASCADE""" - ], - ) - operation = spanner_client.database_admin_api.update_database_ddl(request) + ], + ) + operation = spanner_client.database_admin_api.update_database_ddl(request) - print("Waiting for operation to complete...") - operation.result(OPERATION_TIMEOUT_SECONDS) + print("Waiting for operation to complete...") + operation.result(OPERATION_TIMEOUT_SECONDS) - print( - "Created Performances table on database {} on instance {}".format( - database_id, instance_id + print( + "Created Performances table on database {} on instance {}".format( + database_id, instance_id + ) ) - ) # [END spanner_postgresql_create_table_with_timestamp_column] @@ -785,212 +779,210 @@ def create_table_with_timestamp(instance_id, database_id): # [START spanner_postgresql_insert_data_with_timestamp_column] def insert_data_with_timestamp(instance_id, database_id): - """Inserts data with a COMMIT_TIMESTAMP field into a table. """ - - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - - database = instance.database(database_id) - - with database.batch() as batch: - batch.insert( - table="Performances", - columns=("SingerId", "VenueId", "EventDate", "Revenue", "LastUpdateTime"), - values=[ - (1, 4, "2017-10-05", 11000, spanner.COMMIT_TIMESTAMP), - (1, 19, "2017-11-02", 15000, spanner.COMMIT_TIMESTAMP), - (2, 42, "2017-12-23", 7000, spanner.COMMIT_TIMESTAMP), - ], - ) + """Inserts data with a COMMIT_TIMESTAMP field into a table.""" + + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + + database = instance.database(database_id) + + with database.batch() as batch: + batch.insert( + table="Performances", + columns=("SingerId", "VenueId", "EventDate", "Revenue", "LastUpdateTime"), + values=[ + (1, 4, "2017-10-05", 11000, spanner.COMMIT_TIMESTAMP), + (1, 19, "2017-11-02", 15000, spanner.COMMIT_TIMESTAMP), + (2, 42, "2017-12-23", 7000, spanner.COMMIT_TIMESTAMP), + ], + ) - print("Inserted data.") + print("Inserted data.") # [END spanner_postgresql_insert_data_with_timestamp_column] def insert_data_with_dml(instance_id, database_id): - """Inserts sample data into the given database using a DML statement. """ - # [START spanner_postgresql_dml_standard_insert] - # instance_id = "your-spanner-instance" - # database_id = "your-spanner-db-id" - - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) - - def insert_singers(transaction): - row_ct = transaction.execute_update( - "INSERT INTO Singers (SingerId, FirstName, LastName) " - " VALUES (10, 'Virginia', 'Watson')" - ) - - print("{} record(s) inserted.".format(row_ct)) + """Inserts sample data into the given database using a DML statement.""" + # [START spanner_postgresql_dml_standard_insert] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + def insert_singers(transaction): + row_ct = transaction.execute_update( + "INSERT INTO Singers (SingerId, FirstName, LastName) " + " VALUES (10, 'Virginia', 'Watson')" + ) - database.run_in_transaction(insert_singers) - # [END spanner_postgresql_dml_standard_insert] + print("{} record(s) inserted.".format(row_ct)) + database.run_in_transaction(insert_singers) + # [END spanner_postgresql_dml_standard_insert] def update_data_with_dml(instance_id, database_id): - """Updates sample data from the database using a DML statement. """ - # [START spanner_postgresql_dml_standard_update] - # instance_id = "your-spanner-instance" - # database_id = "your-spanner-db-id" - - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) - - def update_albums(transaction): - row_ct = transaction.execute_update( - "UPDATE Albums " - "SET MarketingBudget = MarketingBudget * 2 " - "WHERE SingerId = 1 and AlbumId = 1" - ) + """Updates sample data from the database using a DML statement.""" + # [START spanner_postgresql_dml_standard_update] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + def update_albums(transaction): + row_ct = transaction.execute_update( + "UPDATE Albums " + "SET MarketingBudget = MarketingBudget * 2 " + "WHERE SingerId = 1 and AlbumId = 1" + ) - print("{} record(s) updated.".format(row_ct)) + print("{} record(s) updated.".format(row_ct)) - database.run_in_transaction(update_albums) - # [END spanner_postgresql_dml_standard_update] + database.run_in_transaction(update_albums) + # [END spanner_postgresql_dml_standard_update] def delete_data_with_dml(instance_id, database_id): - """Deletes sample data from the database using a DML statement. """ - # [START spanner_postgresql_dml_standard_delete] - # instance_id = "your-spanner-instance" - # database_id = "your-spanner-db-id" - - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) - - def delete_singers(transaction): - row_ct = transaction.execute_update( - "DELETE FROM Singers WHERE FirstName = 'Alice'" - ) + """Deletes sample data from the database using a DML statement.""" + # [START spanner_postgresql_dml_standard_delete] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + def delete_singers(transaction): + row_ct = transaction.execute_update( + "DELETE FROM Singers WHERE FirstName = 'Alice'" + ) - print("{} record(s) deleted.".format(row_ct)) + print("{} record(s) deleted.".format(row_ct)) - database.run_in_transaction(delete_singers) - # [END spanner_postgresql_dml_standard_delete] + database.run_in_transaction(delete_singers) + # [END spanner_postgresql_dml_standard_delete] def dml_write_read_transaction(instance_id, database_id): - """First inserts data then reads it from within a transaction using DML.""" - # [START spanner_postgresql_dml_write_then_read] - # instance_id = "your-spanner-instance" - # database_id = "your-spanner-db-id" - - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) - - def write_then_read(transaction): - # Insert record. - row_ct = transaction.execute_update( - "INSERT INTO Singers (SingerId, FirstName, LastName) " - " VALUES (11, 'Timothy', 'Campbell')" - ) - print("{} record(s) inserted.".format(row_ct)) + """First inserts data then reads it from within a transaction using DML.""" + # [START spanner_postgresql_dml_write_then_read] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + def write_then_read(transaction): + # Insert record. + row_ct = transaction.execute_update( + "INSERT INTO Singers (SingerId, FirstName, LastName) " + " VALUES (11, 'Timothy', 'Campbell')" + ) + print("{} record(s) inserted.".format(row_ct)) - # Read newly inserted record. - results = transaction.execute_sql( - "SELECT FirstName, LastName FROM Singers WHERE SingerId = 11" - ) - for result in results: - print("FirstName: {}, LastName: {}".format(*result)) + # Read newly inserted record. + results = transaction.execute_sql( + "SELECT FirstName, LastName FROM Singers WHERE SingerId = 11" + ) + for result in results: + print("FirstName: {}, LastName: {}".format(*result)) - database.run_in_transaction(write_then_read) - # [END spanner_postgresql_dml_write_then_read] + database.run_in_transaction(write_then_read) + # [END spanner_postgresql_dml_write_then_read] def update_data_with_partitioned_dml(instance_id, database_id): - """ Update sample data with a partitioned DML statement. """ - # [START spanner_postgresql_dml_partitioned_update] - # instance_id = "your-spanner-instance" - # database_id = "your-spanner-db-id" + """Update sample data with a partitioned DML statement.""" + # [START spanner_postgresql_dml_partitioned_update] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) - row_ct = database.execute_partitioned_dml( - "UPDATE Albums SET MarketingBudget = 100000 WHERE SingerId > 1" - ) + row_ct = database.execute_partitioned_dml( + "UPDATE Albums SET MarketingBudget = 100000 WHERE SingerId > 1" + ) - print("{} records updated.".format(row_ct)) - # [END spanner_postgresql_dml_partitioned_update] + print("{} records updated.".format(row_ct)) + # [END spanner_postgresql_dml_partitioned_update] def delete_data_with_partitioned_dml(instance_id, database_id): - """ Delete sample data with a partitioned DML statement. """ - # [START spanner_postgresql_dml_partitioned_delete] - # instance_id = "your-spanner-instance" - # database_id = "your-spanner-db-id" - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) + """Delete sample data with a partitioned DML statement.""" + # [START spanner_postgresql_dml_partitioned_delete] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) - row_ct = database.execute_partitioned_dml("DELETE FROM Singers WHERE SingerId > 10") + row_ct = database.execute_partitioned_dml("DELETE FROM Singers WHERE SingerId > 10") - print("{} record(s) deleted.".format(row_ct)) - # [END spanner_postgresql_dml_partitioned_delete] + print("{} record(s) deleted.".format(row_ct)) + # [END spanner_postgresql_dml_partitioned_delete] def update_with_batch_dml(instance_id, database_id): - """Updates sample data in the database using Batch DML. """ - # [START spanner_postgresql_dml_batch_update] - from google.rpc.code_pb2 import OK + """Updates sample data in the database using Batch DML.""" + # [START spanner_postgresql_dml_batch_update] + from google.rpc.code_pb2 import OK - # instance_id = "your-spanner-instance" - # database_id = "your-spanner-db-id" + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) - insert_statement = ( - "INSERT INTO Albums " - "(SingerId, AlbumId, AlbumTitle, MarketingBudget) " - "VALUES (1, 3, 'Test Album Title', 10000)" - ) + insert_statement = ( + "INSERT INTO Albums " + "(SingerId, AlbumId, AlbumTitle, MarketingBudget) " + "VALUES (1, 3, 'Test Album Title', 10000)" + ) - update_statement = ( - "UPDATE Albums " - "SET MarketingBudget = MarketingBudget * 2 " - "WHERE SingerId = 1 and AlbumId = 3" - ) + update_statement = ( + "UPDATE Albums " + "SET MarketingBudget = MarketingBudget * 2 " + "WHERE SingerId = 1 and AlbumId = 3" + ) - def update_albums(transaction): - status, row_cts = transaction.batch_update([insert_statement, update_statement]) + def update_albums(transaction): + status, row_cts = transaction.batch_update([insert_statement, update_statement]) - if status.code != OK: - # Do handling here. - # Note: the exception will still be raised when - # `commit` is called by `run_in_transaction`. - return + if status.code != OK: + # Do handling here. + # Note: the exception will still be raised when + # `commit` is called by `run_in_transaction`. + return - print("Executed {} SQL statements using Batch DML.".format(len(row_cts))) + print("Executed {} SQL statements using Batch DML.".format(len(row_cts))) - database.run_in_transaction(update_albums) - # [END spanner_postgresql_dml_batch_update] + database.run_in_transaction(update_albums) + # [END spanner_postgresql_dml_batch_update] def create_table_with_datatypes(instance_id, database_id): - """Creates a table with supported datatypes. """ - # [START spanner_postgresql_create_table_with_datatypes] - # instance_id = "your-spanner-instance" - # database_id = "your-spanner-db-id" - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) - - request = spanner_admin_database_v1.UpdateDatabaseDdlRequest( - database=database.name, - statements= - [ - """CREATE TABLE Venues ( + """Creates a table with supported datatypes.""" + # [START spanner_postgresql_create_table_with_datatypes] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + request = spanner_admin_database_v1.UpdateDatabaseDdlRequest( + database=database.name, + statements=[ + """CREATE TABLE Venues ( VenueId BIGINT NOT NULL, VenueName character varying(100), VenueInfo BYTEA, @@ -1000,488 +992,487 @@ def create_table_with_datatypes(instance_id, database_id): Revenue NUMERIC, LastUpdateTime SPANNER.COMMIT_TIMESTAMP NOT NULL, PRIMARY KEY (VenueId))""" - ], - ) - operation = spanner_client.database_admin_api.update_database_ddl(request) + ], + ) + operation = spanner_client.database_admin_api.update_database_ddl(request) - print("Waiting for operation to complete...") - operation.result(OPERATION_TIMEOUT_SECONDS) + print("Waiting for operation to complete...") + operation.result(OPERATION_TIMEOUT_SECONDS) - print( - "Created Venues table on database {} on instance {}".format( - database_id, instance_id + print( + "Created Venues table on database {} on instance {}".format( + database_id, instance_id + ) ) - ) - # [END spanner_postgresql_create_table_with_datatypes] + # [END spanner_postgresql_create_table_with_datatypes] def insert_datatypes_data(instance_id, database_id): - """Inserts data with supported datatypes into a table. """ - # [START spanner_postgresql_insert_datatypes_data] - # instance_id = "your-spanner-instance" - # database_id = "your-spanner-db-id" - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) - - exampleBytes1 = base64.b64encode(u"Hello World 1".encode()) - exampleBytes2 = base64.b64encode(u"Hello World 2".encode()) - exampleBytes3 = base64.b64encode(u"Hello World 3".encode()) - with database.batch() as batch: - batch.insert( - table="Venues", - columns=( - "VenueId", - "VenueName", - "VenueInfo", - "Capacity", - "OutdoorVenue", - "PopularityScore", - "Revenue", - "LastUpdateTime", - ), - values=[ - ( - 4, - "Venue 4", - exampleBytes1, - 1800, - False, - 0.85543, - decimal.Decimal("215100.10"), - spanner.COMMIT_TIMESTAMP, - ), - ( - 19, - "Venue 19", - exampleBytes2, - 6300, - True, - 0.98716, - decimal.Decimal("1200100.00"), - spanner.COMMIT_TIMESTAMP, - ), - ( - 42, - "Venue 42", - exampleBytes3, - 3000, - False, - 0.72598, - decimal.Decimal("390650.99"), - spanner.COMMIT_TIMESTAMP, - ), - ], - ) + """Inserts data with supported datatypes into a table.""" + # [START spanner_postgresql_insert_datatypes_data] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + exampleBytes1 = base64.b64encode("Hello World 1".encode()) + exampleBytes2 = base64.b64encode("Hello World 2".encode()) + exampleBytes3 = base64.b64encode("Hello World 3".encode()) + with database.batch() as batch: + batch.insert( + table="Venues", + columns=( + "VenueId", + "VenueName", + "VenueInfo", + "Capacity", + "OutdoorVenue", + "PopularityScore", + "Revenue", + "LastUpdateTime", + ), + values=[ + ( + 4, + "Venue 4", + exampleBytes1, + 1800, + False, + 0.85543, + decimal.Decimal("215100.10"), + spanner.COMMIT_TIMESTAMP, + ), + ( + 19, + "Venue 19", + exampleBytes2, + 6300, + True, + 0.98716, + decimal.Decimal("1200100.00"), + spanner.COMMIT_TIMESTAMP, + ), + ( + 42, + "Venue 42", + exampleBytes3, + 3000, + False, + 0.72598, + decimal.Decimal("390650.99"), + spanner.COMMIT_TIMESTAMP, + ), + ], + ) - print("Inserted data.") - # [END spanner_postgresql_insert_datatypes_data] + print("Inserted data.") + # [END spanner_postgresql_insert_datatypes_data] def query_data_with_bool(instance_id, database_id): - """Queries sample data using SQL with a BOOL parameter. """ - # [START spanner_postgresql_query_with_bool_parameter] - # instance_id = "your-spanner-instance" - # database_id = "your-spanner-db-id" - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) - - exampleBool = True - param = {"p1": exampleBool} - param_type = {"p1": param_types.BOOL} - - with database.snapshot() as snapshot: - results = snapshot.execute_sql( - "SELECT VenueId, VenueName, OutdoorVenue FROM Venues " - "WHERE OutdoorVenue = $1", - params=param, - param_types=param_type, - ) + """Queries sample data using SQL with a BOOL parameter.""" + # [START spanner_postgresql_query_with_bool_parameter] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + exampleBool = True + param = {"p1": exampleBool} + param_type = {"p1": param_types.BOOL} + + with database.snapshot() as snapshot: + results = snapshot.execute_sql( + "SELECT VenueId, VenueName, OutdoorVenue FROM Venues " + "WHERE OutdoorVenue = $1", + params=param, + param_types=param_type, + ) - for row in results: - print(u"VenueId: {}, VenueName: {}, OutdoorVenue: {}".format(*row)) - # [END spanner_postgresql_query_with_bool_parameter] + for row in results: + print("VenueId: {}, VenueName: {}, OutdoorVenue: {}".format(*row)) + # [END spanner_postgresql_query_with_bool_parameter] def query_data_with_bytes(instance_id, database_id): - """Queries sample data using SQL with a BYTES parameter. """ - # [START spanner_postgresql_query_with_bytes_parameter] - # instance_id = "your-spanner-instance" - # database_id = "your-spanner-db-id" - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) - - exampleBytes = base64.b64encode(u"Hello World 1".encode()) - param = {"p1": exampleBytes} - param_type = {"p1": param_types.BYTES} - - with database.snapshot() as snapshot: - results = snapshot.execute_sql( - "SELECT VenueId, VenueName FROM Venues " "WHERE VenueInfo = $1", - params=param, - param_types=param_type, - ) + """Queries sample data using SQL with a BYTES parameter.""" + # [START spanner_postgresql_query_with_bytes_parameter] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + exampleBytes = base64.b64encode("Hello World 1".encode()) + param = {"p1": exampleBytes} + param_type = {"p1": param_types.BYTES} + + with database.snapshot() as snapshot: + results = snapshot.execute_sql( + "SELECT VenueId, VenueName FROM Venues " "WHERE VenueInfo = $1", + params=param, + param_types=param_type, + ) - for row in results: - print(u"VenueId: {}, VenueName: {}".format(*row)) - # [END spanner_postgresql_query_with_bytes_parameter] + for row in results: + print("VenueId: {}, VenueName: {}".format(*row)) + # [END spanner_postgresql_query_with_bytes_parameter] def query_data_with_float(instance_id, database_id): - """Queries sample data using SQL with a FLOAT8 parameter. """ - # [START spanner_postgresql_query_with_float_parameter] - # instance_id = "your-spanner-instance" - # database_id = "your-spanner-db-id" - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) - - exampleFloat = 0.8 - param = {"p1": exampleFloat} - param_type = {"p1": param_types.FLOAT64} - - with database.snapshot() as snapshot: - results = snapshot.execute_sql( - "SELECT VenueId, VenueName, PopularityScore FROM Venues " - "WHERE PopularityScore > $1", - params=param, - param_types=param_type, - ) + """Queries sample data using SQL with a FLOAT8 parameter.""" + # [START spanner_postgresql_query_with_float_parameter] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + exampleFloat = 0.8 + param = {"p1": exampleFloat} + param_type = {"p1": param_types.FLOAT64} + + with database.snapshot() as snapshot: + results = snapshot.execute_sql( + "SELECT VenueId, VenueName, PopularityScore FROM Venues " + "WHERE PopularityScore > $1", + params=param, + param_types=param_type, + ) - for row in results: - print(u"VenueId: {}, VenueName: {}, PopularityScore: {}".format(*row)) - # [END spanner_postgresql_query_with_float_parameter] + for row in results: + print("VenueId: {}, VenueName: {}, PopularityScore: {}".format(*row)) + # [END spanner_postgresql_query_with_float_parameter] def query_data_with_int(instance_id, database_id): - """Queries sample data using SQL with a BIGINT parameter. """ - # [START spanner_postgresql_query_with_int_parameter] - # instance_id = "your-spanner-instance" - # database_id = "your-spanner-db-id" - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) - - exampleInt = 3000 - param = {"p1": exampleInt} - param_type = {"p1": param_types.INT64} - - with database.snapshot() as snapshot: - results = snapshot.execute_sql( - "SELECT VenueId, VenueName, Capacity FROM Venues " - "WHERE Capacity >= $1", - params=param, - param_types=param_type, - ) + """Queries sample data using SQL with a BIGINT parameter.""" + # [START spanner_postgresql_query_with_int_parameter] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + exampleInt = 3000 + param = {"p1": exampleInt} + param_type = {"p1": param_types.INT64} + + with database.snapshot() as snapshot: + results = snapshot.execute_sql( + "SELECT VenueId, VenueName, Capacity FROM Venues " "WHERE Capacity >= $1", + params=param, + param_types=param_type, + ) - for row in results: - print(u"VenueId: {}, VenueName: {}, Capacity: {}".format(*row)) - # [END spanner_postgresql_query_with_int_parameter] + for row in results: + print("VenueId: {}, VenueName: {}, Capacity: {}".format(*row)) + # [END spanner_postgresql_query_with_int_parameter] def query_data_with_string(instance_id, database_id): - """Queries sample data using SQL with a STRING parameter. """ - # [START spanner_postgresql_query_with_string_parameter] - # instance_id = "your-spanner-instance" - # database_id = "your-spanner-db-id" - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) - - exampleString = "Venue 42" - param = {"p1": exampleString} - param_type = {"p1": param_types.STRING} - - with database.snapshot() as snapshot: - results = snapshot.execute_sql( - "SELECT VenueId, VenueName FROM Venues " "WHERE VenueName = $1", - params=param, - param_types=param_type, - ) + """Queries sample data using SQL with a STRING parameter.""" + # [START spanner_postgresql_query_with_string_parameter] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + exampleString = "Venue 42" + param = {"p1": exampleString} + param_type = {"p1": param_types.STRING} + + with database.snapshot() as snapshot: + results = snapshot.execute_sql( + "SELECT VenueId, VenueName FROM Venues " "WHERE VenueName = $1", + params=param, + param_types=param_type, + ) - for row in results: - print(u"VenueId: {}, VenueName: {}".format(*row)) - # [END spanner_postgresql_query_with_string_parameter] + for row in results: + print("VenueId: {}, VenueName: {}".format(*row)) + # [END spanner_postgresql_query_with_string_parameter] def query_data_with_timestamp_parameter(instance_id, database_id): - """Queries sample data using SQL with a TIMESTAMPTZ parameter. """ - # [START spanner_postgresql_query_with_timestamp_parameter] - # instance_id = "your-spanner-instance" - # database_id = "your-spanner-db-id" - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) - - example_timestamp = datetime.datetime.utcnow().isoformat() + "Z" - # [END spanner_postgresql_query_with_timestamp_parameter] - # Avoid time drift on the local machine. - # https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4197. - example_timestamp = ( - datetime.datetime.utcnow() + datetime.timedelta(days=1) - ).isoformat() + "Z" - # [START spanner_postgresql_query_with_timestamp_parameter] - param = {"p1": example_timestamp} - param_type = {"p1": param_types.TIMESTAMP} - - with database.snapshot() as snapshot: - results = snapshot.execute_sql( - "SELECT VenueId, VenueName, LastUpdateTime FROM Venues " - "WHERE LastUpdateTime < $1", - params=param, - param_types=param_type, - ) + """Queries sample data using SQL with a TIMESTAMPTZ parameter.""" + # [START spanner_postgresql_query_with_timestamp_parameter] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + example_timestamp = datetime.datetime.utcnow().isoformat() + "Z" + # [END spanner_postgresql_query_with_timestamp_parameter] + # Avoid time drift on the local machine. + # https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4197. + example_timestamp = ( + datetime.datetime.utcnow() + datetime.timedelta(days=1) + ).isoformat() + "Z" + # [START spanner_postgresql_query_with_timestamp_parameter] + param = {"p1": example_timestamp} + param_type = {"p1": param_types.TIMESTAMP} + + with database.snapshot() as snapshot: + results = snapshot.execute_sql( + "SELECT VenueId, VenueName, LastUpdateTime FROM Venues " + "WHERE LastUpdateTime < $1", + params=param, + param_types=param_type, + ) - for row in results: - print(u"VenueId: {}, VenueName: {}, LastUpdateTime: {}".format(*row)) - # [END spanner_postgresql_query_with_timestamp_parameter] + for row in results: + print("VenueId: {}, VenueName: {}, LastUpdateTime: {}".format(*row)) + # [END spanner_postgresql_query_with_timestamp_parameter] def query_data_with_numeric_parameter(instance_id, database_id): - """Queries sample data using SQL with a NUMERIC parameter. """ - # [START spanner_postgresql_query_with_numeric_parameter] - # instance_id = "your-spanner-instance" - # database_id = "your-spanner-db-id" - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) - - example_numeric = decimal.Decimal("300000") - param = {"p1": example_numeric} - param_type = {"p1": param_types.NUMERIC} - - with database.snapshot() as snapshot: - results = snapshot.execute_sql( - "SELECT VenueId, Revenue FROM Venues WHERE Revenue < $1", - params=param, - param_types=param_type, - ) + """Queries sample data using SQL with a NUMERIC parameter.""" + # [START spanner_postgresql_query_with_numeric_parameter] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + example_numeric = decimal.Decimal("300000") + param = {"p1": example_numeric} + param_type = {"p1": param_types.NUMERIC} + + with database.snapshot() as snapshot: + results = snapshot.execute_sql( + "SELECT VenueId, Revenue FROM Venues WHERE Revenue < $1", + params=param, + param_types=param_type, + ) - for row in results: - print(u"VenueId: {}, Revenue: {}".format(*row)) - # [END spanner_postgresql_query_with_numeric_parameter] + for row in results: + print("VenueId: {}, Revenue: {}".format(*row)) + # [END spanner_postgresql_query_with_numeric_parameter] def create_client_with_query_options(instance_id, database_id): - """Create a client with query options.""" - # [START spanner_postgresql_create_client_with_query_options] - # instance_id = "your-spanner-instance" - # database_id = "your-spanner-db-id" - spanner_client = spanner.Client( - query_options={ - "optimizer_version": "1", - "optimizer_statistics_package": "latest", - } - ) - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) - - with database.snapshot() as snapshot: - results = snapshot.execute_sql( - "SELECT VenueId, VenueName, LastUpdateTime FROM Venues" + """Create a client with query options.""" + # [START spanner_postgresql_create_client_with_query_options] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + spanner_client = spanner.Client( + query_options={ + "optimizer_version": "1", + "optimizer_statistics_package": "latest", + } ) + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) - for row in results: - print(u"VenueId: {}, VenueName: {}, LastUpdateTime: {}".format(*row)) - # [END spanner_postgresql_create_client_with_query_options] + with database.snapshot() as snapshot: + results = snapshot.execute_sql( + "SELECT VenueId, VenueName, LastUpdateTime FROM Venues" + ) + + for row in results: + print("VenueId: {}, VenueName: {}, LastUpdateTime: {}".format(*row)) + # [END spanner_postgresql_create_client_with_query_options] def query_data_with_query_options(instance_id, database_id): - """Queries sample data using SQL with query options.""" - # [START spanner_postgresql_query_with_query_options] - # instance_id = "your-spanner-instance" - # database_id = "your-spanner-db-id" - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) - - with database.snapshot() as snapshot: - results = snapshot.execute_sql( - "SELECT VenueId, VenueName, LastUpdateTime FROM Venues", - query_options={ - "optimizer_version": "1", - "optimizer_statistics_package": "latest", - }, - ) + """Queries sample data using SQL with query options.""" + # [START spanner_postgresql_query_with_query_options] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + with database.snapshot() as snapshot: + results = snapshot.execute_sql( + "SELECT VenueId, VenueName, LastUpdateTime FROM Venues", + query_options={ + "optimizer_version": "1", + "optimizer_statistics_package": "latest", + }, + ) - for row in results: - print(u"VenueId: {}, VenueName: {}, LastUpdateTime: {}".format(*row)) - # [END spanner_postgresql_query_with_query_options] + for row in results: + print("VenueId: {}, VenueName: {}, LastUpdateTime: {}".format(*row)) + # [END spanner_postgresql_query_with_query_options] if __name__ == "__main__": # noqa: C901 - parser = argparse.ArgumentParser( - description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter - ) - parser.add_argument("instance_id", help="Your Cloud Spanner instance ID.") - parser.add_argument( - "--database-id", help="Your Cloud Spanner database ID.", default="example_db" - ) - - subparsers = parser.add_subparsers(dest="command") - subparsers.add_parser("create_instance", help=create_instance.__doc__) - subparsers.add_parser("create_database", help=create_database.__doc__) - subparsers.add_parser("insert_data", help=insert_data.__doc__) - subparsers.add_parser("delete_data", help=delete_data.__doc__) - subparsers.add_parser("query_data", help=query_data.__doc__) - subparsers.add_parser("read_data", help=read_data.__doc__) - subparsers.add_parser("read_stale_data", help=read_stale_data.__doc__) - subparsers.add_parser("add_column", help=add_column.__doc__) - subparsers.add_parser("update_data", help=update_data.__doc__) - subparsers.add_parser( - "query_data_with_new_column", help=query_data_with_new_column.__doc__ - ) - subparsers.add_parser("read_write_transaction", help=read_write_transaction.__doc__) - subparsers.add_parser("read_only_transaction", help=read_only_transaction.__doc__) - subparsers.add_parser("add_index", help=add_index.__doc__) - subparsers.add_parser("read_data_with_index", help=insert_data.__doc__) - subparsers.add_parser("add_storing_index", help=add_storing_index.__doc__) - subparsers.add_parser("read_data_with_storing_index", help=insert_data.__doc__) - subparsers.add_parser( - "create_table_with_timestamp", help=create_table_with_timestamp.__doc__ - ) - subparsers.add_parser( - "insert_data_with_timestamp", help=insert_data_with_timestamp.__doc__ - ) - subparsers.add_parser("add_timestamp_column", help=add_timestamp_column.__doc__) - subparsers.add_parser( - "update_data_with_timestamp", help=update_data_with_timestamp.__doc__ - ) - subparsers.add_parser( - "query_data_with_timestamp", help=query_data_with_timestamp.__doc__ - ) - subparsers.add_parser("insert_data_with_dml", help=insert_data_with_dml.__doc__) - subparsers.add_parser("update_data_with_dml", help=update_data_with_dml.__doc__) - subparsers.add_parser("delete_data_with_dml", help=delete_data_with_dml.__doc__) - subparsers.add_parser( - "dml_write_read_transaction", help=dml_write_read_transaction.__doc__ - ) - subparsers.add_parser("insert_with_dml", help=insert_with_dml.__doc__) - subparsers.add_parser( - "query_data_with_parameter", help=query_data_with_parameter.__doc__ - ) - subparsers.add_parser( - "write_with_dml_transaction", help=write_with_dml_transaction.__doc__ - ) - subparsers.add_parser( - "update_data_with_partitioned_dml", - help=update_data_with_partitioned_dml.__doc__, - ) - subparsers.add_parser( - "delete_data_with_partitioned_dml", - help=delete_data_with_partitioned_dml.__doc__, - ) - subparsers.add_parser("update_with_batch_dml", help=update_with_batch_dml.__doc__) - subparsers.add_parser( - "create_table_with_datatypes", help=create_table_with_datatypes.__doc__ - ) - subparsers.add_parser("insert_datatypes_data", help=insert_datatypes_data.__doc__) - subparsers.add_parser("query_data_with_bool", help=query_data_with_bool.__doc__) - subparsers.add_parser("query_data_with_bytes", help=query_data_with_bytes.__doc__) - subparsers.add_parser("query_data_with_float", help=query_data_with_float.__doc__) - subparsers.add_parser("query_data_with_int", help=query_data_with_int.__doc__) - subparsers.add_parser("query_data_with_string", help=query_data_with_string.__doc__) - subparsers.add_parser( - "query_data_with_timestamp_parameter", - help=query_data_with_timestamp_parameter.__doc__, - ) - subparsers.add_parser( - "query_data_with_numeric_parameter", - help=query_data_with_numeric_parameter.__doc__, - ) - subparsers.add_parser( - "query_data_with_query_options", help=query_data_with_query_options.__doc__ - ) - subparsers.add_parser( - "create_client_with_query_options", - help=create_client_with_query_options.__doc__, - ) - - args = parser.parse_args() - - if args.command == "create_instance": - create_instance(args.instance_id) - elif args.command == "create_database": - create_database(args.instance_id, args.database_id) - elif args.command == "insert_data": - insert_data(args.instance_id, args.database_id) - elif args.command == "delete_data": - delete_data(args.instance_id, args.database_id) - elif args.command == "query_data": - query_data(args.instance_id, args.database_id) - elif args.command == "read_data": - read_data(args.instance_id, args.database_id) - elif args.command == "read_stale_data": - read_stale_data(args.instance_id, args.database_id) - elif args.command == "add_column": - add_column(args.instance_id, args.database_id) - elif args.command == "update_data": - update_data(args.instance_id, args.database_id) - elif args.command == "query_data_with_new_column": - query_data_with_new_column(args.instance_id, args.database_id) - elif args.command == "read_write_transaction": - read_write_transaction(args.instance_id, args.database_id) - elif args.command == "read_only_transaction": - read_only_transaction(args.instance_id, args.database_id) - elif args.command == "add_index": - add_index(args.instance_id, args.database_id) - elif args.command == "read_data_with_index": - read_data_with_index(args.instance_id, args.database_id) - elif args.command == "add_storing_index": - add_storing_index(args.instance_id, args.database_id) - elif args.command == "read_data_with_storing_index": - read_data_with_storing_index(args.instance_id, args.database_id) - elif args.command == "create_table_with_timestamp": - create_table_with_timestamp(args.instance_id, args.database_id) - elif args.command == "insert_data_with_timestamp": - insert_data_with_timestamp(args.instance_id, args.database_id) - elif args.command == "add_timestamp_column": - add_timestamp_column(args.instance_id, args.database_id) - elif args.command == "update_data_with_timestamp": - update_data_with_timestamp(args.instance_id, args.database_id) - elif args.command == "query_data_with_timestamp": - query_data_with_timestamp(args.instance_id, args.database_id) - elif args.command == "insert_data_with_dml": - insert_data_with_dml(args.instance_id, args.database_id) - elif args.command == "update_data_with_dml": - update_data_with_dml(args.instance_id, args.database_id) - elif args.command == "delete_data_with_dml": - delete_data_with_dml(args.instance_id, args.database_id) - elif args.command == "dml_write_read_transaction": - dml_write_read_transaction(args.instance_id, args.database_id) - elif args.command == "insert_with_dml": - insert_with_dml(args.instance_id, args.database_id) - elif args.command == "query_data_with_parameter": - query_data_with_parameter(args.instance_id, args.database_id) - elif args.command == "write_with_dml_transaction": - write_with_dml_transaction(args.instance_id, args.database_id) - elif args.command == "update_data_with_partitioned_dml": - update_data_with_partitioned_dml(args.instance_id, args.database_id) - elif args.command == "delete_data_with_partitioned_dml": - delete_data_with_partitioned_dml(args.instance_id, args.database_id) - elif args.command == "update_with_batch_dml": - update_with_batch_dml(args.instance_id, args.database_id) - elif args.command == "create_table_with_datatypes": - create_table_with_datatypes(args.instance_id, args.database_id) - elif args.command == "insert_datatypes_data": - insert_datatypes_data(args.instance_id, args.database_id) - elif args.command == "query_data_with_bool": - query_data_with_bool(args.instance_id, args.database_id) - elif args.command == "query_data_with_bytes": - query_data_with_bytes(args.instance_id, args.database_id) - elif args.command == "query_data_with_date": - query_data_with_date(args.instance_id, args.database_id) - elif args.command == "query_data_with_float": - query_data_with_float(args.instance_id, args.database_id) - elif args.command == "query_data_with_int": - query_data_with_int(args.instance_id, args.database_id) - elif args.command == "query_data_with_string": - query_data_with_string(args.instance_id, args.database_id) - elif args.command == "query_data_with_timestamp_parameter": - query_data_with_timestamp_parameter(args.instance_id, args.database_id) - elif args.command == "query_data_with_query_options": - query_data_with_query_options(args.instance_id, args.database_id) - elif args.command == "create_client_with_query_options": - create_client_with_query_options(args.instance_id, args.database_id) + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter + ) + parser.add_argument("instance_id", help="Your Cloud Spanner instance ID.") + parser.add_argument( + "--database-id", help="Your Cloud Spanner database ID.", default="example_db" + ) + + subparsers = parser.add_subparsers(dest="command") + subparsers.add_parser("create_instance", help=create_instance.__doc__) + subparsers.add_parser("create_database", help=create_database.__doc__) + subparsers.add_parser("insert_data", help=insert_data.__doc__) + subparsers.add_parser("delete_data", help=delete_data.__doc__) + subparsers.add_parser("query_data", help=query_data.__doc__) + subparsers.add_parser("read_data", help=read_data.__doc__) + subparsers.add_parser("read_stale_data", help=read_stale_data.__doc__) + subparsers.add_parser("add_column", help=add_column.__doc__) + subparsers.add_parser("update_data", help=update_data.__doc__) + subparsers.add_parser( + "query_data_with_new_column", help=query_data_with_new_column.__doc__ + ) + subparsers.add_parser("read_write_transaction", help=read_write_transaction.__doc__) + subparsers.add_parser("read_only_transaction", help=read_only_transaction.__doc__) + subparsers.add_parser("add_index", help=add_index.__doc__) + subparsers.add_parser("read_data_with_index", help=insert_data.__doc__) + subparsers.add_parser("add_storing_index", help=add_storing_index.__doc__) + subparsers.add_parser("read_data_with_storing_index", help=insert_data.__doc__) + subparsers.add_parser( + "create_table_with_timestamp", help=create_table_with_timestamp.__doc__ + ) + subparsers.add_parser( + "insert_data_with_timestamp", help=insert_data_with_timestamp.__doc__ + ) + subparsers.add_parser("add_timestamp_column", help=add_timestamp_column.__doc__) + subparsers.add_parser( + "update_data_with_timestamp", help=update_data_with_timestamp.__doc__ + ) + subparsers.add_parser( + "query_data_with_timestamp", help=query_data_with_timestamp.__doc__ + ) + subparsers.add_parser("insert_data_with_dml", help=insert_data_with_dml.__doc__) + subparsers.add_parser("update_data_with_dml", help=update_data_with_dml.__doc__) + subparsers.add_parser("delete_data_with_dml", help=delete_data_with_dml.__doc__) + subparsers.add_parser( + "dml_write_read_transaction", help=dml_write_read_transaction.__doc__ + ) + subparsers.add_parser("insert_with_dml", help=insert_with_dml.__doc__) + subparsers.add_parser( + "query_data_with_parameter", help=query_data_with_parameter.__doc__ + ) + subparsers.add_parser( + "write_with_dml_transaction", help=write_with_dml_transaction.__doc__ + ) + subparsers.add_parser( + "update_data_with_partitioned_dml", + help=update_data_with_partitioned_dml.__doc__, + ) + subparsers.add_parser( + "delete_data_with_partitioned_dml", + help=delete_data_with_partitioned_dml.__doc__, + ) + subparsers.add_parser("update_with_batch_dml", help=update_with_batch_dml.__doc__) + subparsers.add_parser( + "create_table_with_datatypes", help=create_table_with_datatypes.__doc__ + ) + subparsers.add_parser("insert_datatypes_data", help=insert_datatypes_data.__doc__) + subparsers.add_parser("query_data_with_bool", help=query_data_with_bool.__doc__) + subparsers.add_parser("query_data_with_bytes", help=query_data_with_bytes.__doc__) + subparsers.add_parser("query_data_with_float", help=query_data_with_float.__doc__) + subparsers.add_parser("query_data_with_int", help=query_data_with_int.__doc__) + subparsers.add_parser("query_data_with_string", help=query_data_with_string.__doc__) + subparsers.add_parser( + "query_data_with_timestamp_parameter", + help=query_data_with_timestamp_parameter.__doc__, + ) + subparsers.add_parser( + "query_data_with_numeric_parameter", + help=query_data_with_numeric_parameter.__doc__, + ) + subparsers.add_parser( + "query_data_with_query_options", help=query_data_with_query_options.__doc__ + ) + subparsers.add_parser( + "create_client_with_query_options", + help=create_client_with_query_options.__doc__, + ) + + args = parser.parse_args() + + if args.command == "create_instance": + create_instance(args.instance_id) + elif args.command == "create_database": + create_database(args.instance_id, args.database_id) + elif args.command == "insert_data": + insert_data(args.instance_id, args.database_id) + elif args.command == "delete_data": + delete_data(args.instance_id, args.database_id) + elif args.command == "query_data": + query_data(args.instance_id, args.database_id) + elif args.command == "read_data": + read_data(args.instance_id, args.database_id) + elif args.command == "read_stale_data": + read_stale_data(args.instance_id, args.database_id) + elif args.command == "add_column": + add_column(args.instance_id, args.database_id) + elif args.command == "update_data": + update_data(args.instance_id, args.database_id) + elif args.command == "query_data_with_new_column": + query_data_with_new_column(args.instance_id, args.database_id) + elif args.command == "read_write_transaction": + read_write_transaction(args.instance_id, args.database_id) + elif args.command == "read_only_transaction": + read_only_transaction(args.instance_id, args.database_id) + elif args.command == "add_index": + add_index(args.instance_id, args.database_id) + elif args.command == "read_data_with_index": + read_data_with_index(args.instance_id, args.database_id) + elif args.command == "add_storing_index": + add_storing_index(args.instance_id, args.database_id) + elif args.command == "read_data_with_storing_index": + read_data_with_storing_index(args.instance_id, args.database_id) + elif args.command == "create_table_with_timestamp": + create_table_with_timestamp(args.instance_id, args.database_id) + elif args.command == "insert_data_with_timestamp": + insert_data_with_timestamp(args.instance_id, args.database_id) + elif args.command == "add_timestamp_column": + add_timestamp_column(args.instance_id, args.database_id) + elif args.command == "update_data_with_timestamp": + update_data_with_timestamp(args.instance_id, args.database_id) + elif args.command == "query_data_with_timestamp": + query_data_with_timestamp(args.instance_id, args.database_id) + elif args.command == "insert_data_with_dml": + insert_data_with_dml(args.instance_id, args.database_id) + elif args.command == "update_data_with_dml": + update_data_with_dml(args.instance_id, args.database_id) + elif args.command == "delete_data_with_dml": + delete_data_with_dml(args.instance_id, args.database_id) + elif args.command == "dml_write_read_transaction": + dml_write_read_transaction(args.instance_id, args.database_id) + elif args.command == "insert_with_dml": + insert_with_dml(args.instance_id, args.database_id) + elif args.command == "query_data_with_parameter": + query_data_with_parameter(args.instance_id, args.database_id) + elif args.command == "write_with_dml_transaction": + write_with_dml_transaction(args.instance_id, args.database_id) + elif args.command == "update_data_with_partitioned_dml": + update_data_with_partitioned_dml(args.instance_id, args.database_id) + elif args.command == "delete_data_with_partitioned_dml": + delete_data_with_partitioned_dml(args.instance_id, args.database_id) + elif args.command == "update_with_batch_dml": + update_with_batch_dml(args.instance_id, args.database_id) + elif args.command == "create_table_with_datatypes": + create_table_with_datatypes(args.instance_id, args.database_id) + elif args.command == "insert_datatypes_data": + insert_datatypes_data(args.instance_id, args.database_id) + elif args.command == "query_data_with_bool": + query_data_with_bool(args.instance_id, args.database_id) + elif args.command == "query_data_with_bytes": + query_data_with_bytes(args.instance_id, args.database_id) + elif args.command == "query_data_with_date": + query_data_with_date(args.instance_id, args.database_id) + elif args.command == "query_data_with_float": + query_data_with_float(args.instance_id, args.database_id) + elif args.command == "query_data_with_int": + query_data_with_int(args.instance_id, args.database_id) + elif args.command == "query_data_with_string": + query_data_with_string(args.instance_id, args.database_id) + elif args.command == "query_data_with_timestamp_parameter": + query_data_with_timestamp_parameter(args.instance_id, args.database_id) + elif args.command == "query_data_with_query_options": + query_data_with_query_options(args.instance_id, args.database_id) + elif args.command == "create_client_with_query_options": + create_client_with_query_options(args.instance_id, args.database_id) diff --git a/samples/samples/snippets.py b/samples/samples/snippets.py index 3d65ab9c7b..39be5bb013 100644 --- a/samples/samples/snippets.py +++ b/samples/samples/snippets.py @@ -351,11 +351,11 @@ def insert_data(instance_id, database_id): table="Singers", columns=("SingerId", "FirstName", "LastName"), values=[ - (1, u"Marc", u"Richards"), - (2, u"Catalina", u"Smith"), - (3, u"Alice", u"Trentor"), - (4, u"Lea", u"Martin"), - (5, u"David", u"Lomond"), + (1, "Marc", "Richards"), + (2, "Catalina", "Smith"), + (3, "Alice", "Trentor"), + (4, "Lea", "Martin"), + (5, "David", "Lomond"), ], ) @@ -363,11 +363,11 @@ def insert_data(instance_id, database_id): table="Albums", columns=("SingerId", "AlbumId", "AlbumTitle"), values=[ - (1, 1, u"Total Junk"), - (1, 2, u"Go, Go, Go"), - (2, 1, u"Green"), - (2, 2, u"Forever Hold Your Peace"), - (2, 3, u"Terrified"), + (1, 1, "Total Junk"), + (1, 2, "Go, Go, Go"), + (2, 1, "Green"), + (2, 2, "Forever Hold Your Peace"), + (2, 3, "Terrified"), ], ) @@ -423,7 +423,7 @@ def query_data(instance_id, database_id): ) for row in results: - print(u"SingerId: {}, AlbumId: {}, AlbumTitle: {}".format(*row)) + print("SingerId: {}, AlbumId: {}, AlbumTitle: {}".format(*row)) # [END spanner_query_data] @@ -443,7 +443,7 @@ def read_data(instance_id, database_id): ) for row in results: - print(u"SingerId: {}, AlbumId: {}, AlbumTitle: {}".format(*row)) + print("SingerId: {}, AlbumId: {}, AlbumTitle: {}".format(*row)) # [END spanner_read_data] @@ -469,7 +469,7 @@ def read_stale_data(instance_id, database_id): ) for row in results: - print(u"SingerId: {}, AlbumId: {}, MarketingBudget: {}".format(*row)) + print("SingerId: {}, AlbumId: {}, MarketingBudget: {}".format(*row)) # [END spanner_read_stale_data] @@ -495,7 +495,7 @@ def query_data_with_new_column(instance_id, database_id): ) for row in results: - print(u"SingerId: {}, AlbumId: {}, MarketingBudget: {}".format(*row)) + print("SingerId: {}, AlbumId: {}, MarketingBudget: {}".format(*row)) # [END spanner_query_data_with_new_column] @@ -560,7 +560,7 @@ def query_data_with_index( ) for row in results: - print(u"AlbumId: {}, AlbumTitle: {}, " "MarketingBudget: {}".format(*row)) + print("AlbumId: {}, AlbumTitle: {}, " "MarketingBudget: {}".format(*row)) # [END spanner_query_data_with_index] @@ -647,7 +647,7 @@ def read_data_with_storing_index(instance_id, database_id): ) for row in results: - print(u"AlbumId: {}, AlbumTitle: {}, " "MarketingBudget: {}".format(*row)) + print("AlbumId: {}, AlbumTitle: {}, " "MarketingBudget: {}".format(*row)) # [END spanner_read_data_with_storing_index] @@ -789,7 +789,7 @@ def read_only_transaction(instance_id, database_id): print("Results from first read:") for row in results: - print(u"SingerId: {}, AlbumId: {}, AlbumTitle: {}".format(*row)) + print("SingerId: {}, AlbumId: {}, AlbumTitle: {}".format(*row)) # Perform another read using the `read` method. Even if the data # is updated in-between the reads, the snapshot ensures that both @@ -801,7 +801,7 @@ def read_only_transaction(instance_id, database_id): print("Results from second read:") for row in results: - print(u"SingerId: {}, AlbumId: {}, AlbumTitle: {}".format(*row)) + print("SingerId: {}, AlbumId: {}, AlbumTitle: {}".format(*row)) # [END spanner_read_only_transaction] @@ -844,7 +844,7 @@ def create_table_with_timestamp(instance_id, database_id): # [START spanner_insert_data_with_timestamp_column] def insert_data_with_timestamp(instance_id, database_id): - """Inserts data with a COMMIT_TIMESTAMP field into a table. """ + """Inserts data with a COMMIT_TIMESTAMP field into a table.""" spanner_client = spanner.Client() instance = spanner_client.instance(instance_id) @@ -870,8 +870,7 @@ def insert_data_with_timestamp(instance_id, database_id): # [START spanner_add_timestamp_column] def add_timestamp_column(instance_id, database_id): - """ Adds a new TIMESTAMP column to the Albums table in the example database. - """ + """Adds a new TIMESTAMP column to the Albums table in the example database.""" spanner_client = spanner.Client() instance = spanner_client.instance(instance_id) @@ -960,7 +959,7 @@ def query_data_with_timestamp(instance_id, database_id): ) for row in results: - print(u"SingerId: {}, AlbumId: {}, MarketingBudget: {}".format(*row)) + print("SingerId: {}, AlbumId: {}, MarketingBudget: {}".format(*row)) # [END spanner_query_data_with_timestamp_column] @@ -968,8 +967,7 @@ def query_data_with_timestamp(instance_id, database_id): # [START spanner_add_numeric_column] def add_numeric_column(instance_id, database_id): - """ Adds a new NUMERIC column to the Venues table in the example database. - """ + """Adds a new NUMERIC column to the Venues table in the example database.""" spanner_client = spanner.Client() instance = spanner_client.instance(instance_id) @@ -1026,8 +1024,7 @@ def update_data_with_numeric(instance_id, database_id): # [START spanner_add_json_column] def add_json_column(instance_id, database_id): - """ Adds a new JSON column to the Venues table in the example database. - """ + """Adds a new JSON column to the Venues table in the example database.""" spanner_client = spanner.Client() instance = spanner_client.instance(instance_id) @@ -1113,10 +1110,10 @@ def write_struct_data(instance_id, database_id): table="Singers", columns=("SingerId", "FirstName", "LastName"), values=[ - (6, u"Elena", u"Campbell"), - (7, u"Gabriel", u"Wright"), - (8, u"Benjamin", u"Martinez"), - (9, u"Hannah", u"Harris"), + (6, "Elena", "Campbell"), + (7, "Gabriel", "Wright"), + (8, "Benjamin", "Martinez"), + (9, "Hannah", "Harris"), ], ) @@ -1127,7 +1124,7 @@ def write_struct_data(instance_id, database_id): def query_with_struct(instance_id, database_id): - """Query a table using STRUCT parameters. """ + """Query a table using STRUCT parameters.""" # [START spanner_create_struct_with_data] record_type = param_types.Struct( [ @@ -1152,12 +1149,12 @@ def query_with_struct(instance_id, database_id): ) for row in results: - print(u"SingerId: {}".format(*row)) + print("SingerId: {}".format(*row)) # [END spanner_query_data_with_struct] def query_with_array_of_struct(instance_id, database_id): - """Query a table using an array of STRUCT parameters. """ + """Query a table using an array of STRUCT parameters.""" # [START spanner_create_user_defined_struct] name_type = param_types.Struct( [ @@ -1190,13 +1187,13 @@ def query_with_array_of_struct(instance_id, database_id): ) for row in results: - print(u"SingerId: {}".format(*row)) + print("SingerId: {}".format(*row)) # [END spanner_query_data_with_array_of_struct] # [START spanner_field_access_on_struct_parameters] def query_struct_field(instance_id, database_id): - """Query a table using field access on a STRUCT parameter. """ + """Query a table using field access on a STRUCT parameter.""" spanner_client = spanner.Client() instance = spanner_client.instance(instance_id) database = instance.database(database_id) @@ -1216,7 +1213,7 @@ def query_struct_field(instance_id, database_id): ) for row in results: - print(u"SingerId: {}".format(*row)) + print("SingerId: {}".format(*row)) # [END spanner_field_access_on_struct_parameters] @@ -1224,7 +1221,7 @@ def query_struct_field(instance_id, database_id): # [START spanner_field_access_on_nested_struct_parameters] def query_nested_struct_field(instance_id, database_id): - """Query a table using nested field access on a STRUCT parameter. """ + """Query a table using nested field access on a STRUCT parameter.""" spanner_client = spanner.Client() instance = spanner_client.instance(instance_id) database = instance.database(database_id) @@ -1260,14 +1257,14 @@ def query_nested_struct_field(instance_id, database_id): ) for row in results: - print(u"SingerId: {} SongName: {}".format(*row)) + print("SingerId: {} SongName: {}".format(*row)) # [END spanner_field_access_on_nested_struct_parameters] def insert_data_with_dml(instance_id, database_id): - """Inserts sample data into the given database using a DML statement. """ + """Inserts sample data into the given database using a DML statement.""" # [START spanner_dml_standard_insert] # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" @@ -1290,7 +1287,7 @@ def insert_singers(transaction): # [START spanner_get_commit_stats] def log_commit_stats(instance_id, database_id): - """Inserts sample data using DML and displays the commit statistics. """ + """Inserts sample data using DML and displays the commit statistics.""" # By default, commit statistics are logged via stdout at level Info. # This sample uses a custom logger to access the commit statistics. class CommitStatsSampleLogger(logging.Logger): @@ -1325,7 +1322,7 @@ def insert_singers(transaction): def update_data_with_dml(instance_id, database_id): - """Updates sample data from the database using a DML statement. """ + """Updates sample data from the database using a DML statement.""" # [START spanner_dml_standard_update] # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" @@ -1348,7 +1345,7 @@ def update_albums(transaction): def delete_data_with_dml(instance_id, database_id): - """Deletes sample data from the database using a DML statement. """ + """Deletes sample data from the database using a DML statement.""" # [START spanner_dml_standard_delete] # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" @@ -1369,7 +1366,7 @@ def delete_singers(transaction): def update_data_with_dml_timestamp(instance_id, database_id): - """Updates data with Timestamp from the database using a DML statement. """ + """Updates data with Timestamp from the database using a DML statement.""" # [START spanner_dml_standard_update_with_timestamp] # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" @@ -1421,7 +1418,7 @@ def write_then_read(transaction): def update_data_with_dml_struct(instance_id, database_id): - """Updates data with a DML statement and STRUCT parameters. """ + """Updates data with a DML statement and STRUCT parameters.""" # [START spanner_dml_structs] # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" @@ -1453,7 +1450,7 @@ def write_with_struct(transaction): def insert_with_dml(instance_id, database_id): - """Inserts data with a DML statement into the database. """ + """Inserts data with a DML statement into the database.""" # [START spanner_dml_getting_started_insert] # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" @@ -1493,12 +1490,12 @@ def query_data_with_parameter(instance_id, database_id): ) for row in results: - print(u"SingerId: {}, FirstName: {}, LastName: {}".format(*row)) + print("SingerId: {}, FirstName: {}, LastName: {}".format(*row)) # [END spanner_query_with_parameter] def write_with_dml_transaction(instance_id, database_id): - """ Transfers part of a marketing budget from one album to another. """ + """Transfers part of a marketing budget from one album to another.""" # [START spanner_dml_getting_started_update] # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" @@ -1561,7 +1558,7 @@ def transfer_budget(transaction): def update_data_with_partitioned_dml(instance_id, database_id): - """ Update sample data with a partitioned DML statement. """ + """Update sample data with a partitioned DML statement.""" # [START spanner_dml_partitioned_update] # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" @@ -1579,7 +1576,7 @@ def update_data_with_partitioned_dml(instance_id, database_id): def delete_data_with_partitioned_dml(instance_id, database_id): - """ Delete sample data with a partitioned DML statement. """ + """Delete sample data with a partitioned DML statement.""" # [START spanner_dml_partitioned_delete] # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" @@ -1594,7 +1591,7 @@ def delete_data_with_partitioned_dml(instance_id, database_id): def update_with_batch_dml(instance_id, database_id): - """Updates sample data in the database using Batch DML. """ + """Updates sample data in the database using Batch DML.""" # [START spanner_dml_batch_update] from google.rpc.code_pb2 import OK @@ -1633,7 +1630,7 @@ def update_albums(transaction): def create_table_with_datatypes(instance_id, database_id): - """Creates a table with supported dataypes. """ + """Creates a table with supported dataypes.""" # [START spanner_create_table_with_datatypes] # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" @@ -1670,7 +1667,7 @@ def create_table_with_datatypes(instance_id, database_id): def insert_datatypes_data(instance_id, database_id): - """Inserts data with supported datatypes into a table. """ + """Inserts data with supported datatypes into a table.""" # [START spanner_insert_datatypes_data] # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" @@ -1678,9 +1675,9 @@ def insert_datatypes_data(instance_id, database_id): instance = spanner_client.instance(instance_id) database = instance.database(database_id) - exampleBytes1 = base64.b64encode(u"Hello World 1".encode()) - exampleBytes2 = base64.b64encode(u"Hello World 2".encode()) - exampleBytes3 = base64.b64encode(u"Hello World 3".encode()) + exampleBytes1 = base64.b64encode("Hello World 1".encode()) + exampleBytes2 = base64.b64encode("Hello World 2".encode()) + exampleBytes3 = base64.b64encode("Hello World 3".encode()) available_dates1 = ["2020-12-01", "2020-12-02", "2020-12-03"] available_dates2 = ["2020-11-01", "2020-11-05", "2020-11-15"] available_dates3 = ["2020-10-01", "2020-10-07"] @@ -1701,7 +1698,7 @@ def insert_datatypes_data(instance_id, database_id): values=[ ( 4, - u"Venue 4", + "Venue 4", exampleBytes1, 1800, available_dates1, @@ -1712,7 +1709,7 @@ def insert_datatypes_data(instance_id, database_id): ), ( 19, - u"Venue 19", + "Venue 19", exampleBytes2, 6300, available_dates2, @@ -1723,7 +1720,7 @@ def insert_datatypes_data(instance_id, database_id): ), ( 42, - u"Venue 42", + "Venue 42", exampleBytes3, 3000, available_dates3, @@ -1740,7 +1737,7 @@ def insert_datatypes_data(instance_id, database_id): def query_data_with_array(instance_id, database_id): - """Queries sample data using SQL with an ARRAY parameter. """ + """Queries sample data using SQL with an ARRAY parameter.""" # [START spanner_query_with_array_parameter] # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" @@ -1762,12 +1759,12 @@ def query_data_with_array(instance_id, database_id): ) for row in results: - print(u"VenueId: {}, VenueName: {}, AvailableDate: {}".format(*row)) + print("VenueId: {}, VenueName: {}, AvailableDate: {}".format(*row)) # [END spanner_query_with_array_parameter] def query_data_with_bool(instance_id, database_id): - """Queries sample data using SQL with a BOOL parameter. """ + """Queries sample data using SQL with a BOOL parameter.""" # [START spanner_query_with_bool_parameter] # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" @@ -1788,12 +1785,12 @@ def query_data_with_bool(instance_id, database_id): ) for row in results: - print(u"VenueId: {}, VenueName: {}, OutdoorVenue: {}".format(*row)) + print("VenueId: {}, VenueName: {}, OutdoorVenue: {}".format(*row)) # [END spanner_query_with_bool_parameter] def query_data_with_bytes(instance_id, database_id): - """Queries sample data using SQL with a BYTES parameter. """ + """Queries sample data using SQL with a BYTES parameter.""" # [START spanner_query_with_bytes_parameter] # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" @@ -1801,7 +1798,7 @@ def query_data_with_bytes(instance_id, database_id): instance = spanner_client.instance(instance_id) database = instance.database(database_id) - exampleBytes = base64.b64encode(u"Hello World 1".encode()) + exampleBytes = base64.b64encode("Hello World 1".encode()) param = {"venue_info": exampleBytes} param_type = {"venue_info": param_types.BYTES} @@ -1813,12 +1810,12 @@ def query_data_with_bytes(instance_id, database_id): ) for row in results: - print(u"VenueId: {}, VenueName: {}".format(*row)) + print("VenueId: {}, VenueName: {}".format(*row)) # [END spanner_query_with_bytes_parameter] def query_data_with_date(instance_id, database_id): - """Queries sample data using SQL with a DATE parameter. """ + """Queries sample data using SQL with a DATE parameter.""" # [START spanner_query_with_date_parameter] # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" @@ -1839,12 +1836,12 @@ def query_data_with_date(instance_id, database_id): ) for row in results: - print(u"VenueId: {}, VenueName: {}, LastContactDate: {}".format(*row)) + print("VenueId: {}, VenueName: {}, LastContactDate: {}".format(*row)) # [END spanner_query_with_date_parameter] def query_data_with_float(instance_id, database_id): - """Queries sample data using SQL with a FLOAT64 parameter. """ + """Queries sample data using SQL with a FLOAT64 parameter.""" # [START spanner_query_with_float_parameter] # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" @@ -1865,12 +1862,12 @@ def query_data_with_float(instance_id, database_id): ) for row in results: - print(u"VenueId: {}, VenueName: {}, PopularityScore: {}".format(*row)) + print("VenueId: {}, VenueName: {}, PopularityScore: {}".format(*row)) # [END spanner_query_with_float_parameter] def query_data_with_int(instance_id, database_id): - """Queries sample data using SQL with a INT64 parameter. """ + """Queries sample data using SQL with a INT64 parameter.""" # [START spanner_query_with_int_parameter] # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" @@ -1891,12 +1888,12 @@ def query_data_with_int(instance_id, database_id): ) for row in results: - print(u"VenueId: {}, VenueName: {}, Capacity: {}".format(*row)) + print("VenueId: {}, VenueName: {}, Capacity: {}".format(*row)) # [END spanner_query_with_int_parameter] def query_data_with_string(instance_id, database_id): - """Queries sample data using SQL with a STRING parameter. """ + """Queries sample data using SQL with a STRING parameter.""" # [START spanner_query_with_string_parameter] # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" @@ -1916,12 +1913,12 @@ def query_data_with_string(instance_id, database_id): ) for row in results: - print(u"VenueId: {}, VenueName: {}".format(*row)) + print("VenueId: {}, VenueName: {}".format(*row)) # [END spanner_query_with_string_parameter] def query_data_with_numeric_parameter(instance_id, database_id): - """Queries sample data using SQL with a NUMERIC parameter. """ + """Queries sample data using SQL with a NUMERIC parameter.""" # [START spanner_query_with_numeric_parameter] # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" @@ -1941,12 +1938,12 @@ def query_data_with_numeric_parameter(instance_id, database_id): ) for row in results: - print(u"VenueId: {}, Revenue: {}".format(*row)) + print("VenueId: {}, Revenue: {}".format(*row)) # [END spanner_query_with_numeric_parameter] def query_data_with_json_parameter(instance_id, database_id): - """Queries sample data using SQL with a JSON parameter. """ + """Queries sample data using SQL with a JSON parameter.""" # [START spanner_query_with_json_parameter] # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" @@ -1969,12 +1966,12 @@ def query_data_with_json_parameter(instance_id, database_id): ) for row in results: - print(u"VenueId: {}, VenueDetails: {}".format(*row)) + print("VenueId: {}, VenueDetails: {}".format(*row)) # [END spanner_query_with_json_parameter] def query_data_with_timestamp_parameter(instance_id, database_id): - """Queries sample data using SQL with a TIMESTAMP parameter. """ + """Queries sample data using SQL with a TIMESTAMP parameter.""" # [START spanner_query_with_timestamp_parameter] # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" @@ -2002,7 +1999,7 @@ def query_data_with_timestamp_parameter(instance_id, database_id): ) for row in results: - print(u"VenueId: {}, VenueName: {}, LastUpdateTime: {}".format(*row)) + print("VenueId: {}, VenueName: {}, LastUpdateTime: {}".format(*row)) # [END spanner_query_with_timestamp_parameter] @@ -2025,7 +2022,7 @@ def query_data_with_query_options(instance_id, database_id): ) for row in results: - print(u"VenueId: {}, VenueName: {}, LastUpdateTime: {}".format(*row)) + print("VenueId: {}, VenueName: {}, LastUpdateTime: {}".format(*row)) # [END spanner_query_with_query_options] @@ -2049,7 +2046,7 @@ def create_client_with_query_options(instance_id, database_id): ) for row in results: - print(u"VenueId: {}, VenueName: {}, LastUpdateTime: {}".format(*row)) + print("VenueId: {}, VenueName: {}, LastUpdateTime: {}".format(*row)) # [END spanner_create_client_with_query_options] @@ -2113,7 +2110,7 @@ def set_request_tag(instance_id, database_id): ) for row in results: - print(u"SingerId: {}, AlbumId: {}, AlbumTitle: {}".format(*row)) + print("SingerId: {}, AlbumId: {}, AlbumTitle: {}".format(*row)) # [END spanner_set_request_tag] @@ -2126,7 +2123,8 @@ def create_instance_config(user_config_name, base_config_id): # base_config_id = `projects//instanceConfigs/nam11` spanner_client = spanner.Client() base_config = spanner_client.instance_admin_api.get_instance_config( - name=base_config_id) + name=base_config_id + ) # The replicas for the custom instance configuration must include all the replicas of the base # configuration, in addition to at least one from the list of optional replicas of the base @@ -2139,15 +2137,16 @@ def create_instance_config(user_config_name, base_config_id): parent=spanner_client.project_name, instance_config_id=user_config_name, instance_config=spanner_instance_admin.InstanceConfig( - name="{}/instanceConfigs/{}".format(spanner_client.project_name, user_config_name), + name="{}/instanceConfigs/{}".format( + spanner_client.project_name, user_config_name + ), display_name="custom-python-samples", config_type=spanner_instance_admin.InstanceConfig.Type.USER_MANAGED, replicas=replicas, base_config=base_config.name, - labels={ - "python_cloud_spanner_samples": "true" - } - )) + labels={"python_cloud_spanner_samples": "true"}, + ), + ) print("Waiting for operation to complete...") operation.result(OPERATION_TIMEOUT_SECONDS) @@ -2163,12 +2162,16 @@ def update_instance_config(user_config_name): # user_config_name = `custom-nam11` spanner_client = spanner.Client() config = spanner_client.instance_admin_api.get_instance_config( - name="{}/instanceConfigs/{}".format(spanner_client.project_name, user_config_name)) + name="{}/instanceConfigs/{}".format( + spanner_client.project_name, user_config_name + ) + ) config.display_name = "updated custom instance config" config.labels["updated"] = "true" - operation = spanner_client.instance_admin_api.update_instance_config(instance_config=config, - update_mask=field_mask_pb2.FieldMask( - paths=["display_name", "labels"])) + operation = spanner_client.instance_admin_api.update_instance_config( + instance_config=config, + update_mask=field_mask_pb2.FieldMask(paths=["display_name", "labels"]), + ) print("Waiting for operation to complete...") operation.result(OPERATION_TIMEOUT_SECONDS) print("Updated instance configuration {}".format(user_config_name)) @@ -2180,8 +2183,7 @@ def update_instance_config(user_config_name): def delete_instance_config(user_config_id): """Deleted the user-managed instance configuration.""" spanner_client = spanner.Client() - spanner_client.instance_admin_api.delete_instance_config( - name=user_config_id) + spanner_client.instance_admin_api.delete_instance_config(name=user_config_id) print("Instance config {} successfully deleted".format(user_config_id)) @@ -2193,10 +2195,15 @@ def list_instance_config_operations(): """List the user-managed instance configuration operations.""" spanner_client = spanner.Client() operations = spanner_client.instance_admin_api.list_instance_config_operations( - request=spanner_instance_admin.ListInstanceConfigOperationsRequest(parent=spanner_client.project_name, - filter="(metadata.@type=type.googleapis.com/google.spanner.admin.instance.v1.CreateInstanceConfigMetadata)")) + request=spanner_instance_admin.ListInstanceConfigOperationsRequest( + parent=spanner_client.project_name, + filter="(metadata.@type=type.googleapis.com/google.spanner.admin.instance.v1.CreateInstanceConfigMetadata)", + ) + ) for op in operations: - metadata = spanner_instance_admin.CreateInstanceConfigMetadata.pb(spanner_instance_admin.CreateInstanceConfigMetadata()) + metadata = spanner_instance_admin.CreateInstanceConfigMetadata.pb( + spanner_instance_admin.CreateInstanceConfigMetadata() + ) op.metadata.Unpack(metadata) print( "List instance config operations {} is {}% completed.".format( diff --git a/samples/samples/snippets_test.py b/samples/samples/snippets_test.py index f085a0e71c..4fcd06b897 100644 --- a/samples/samples/snippets_test.py +++ b/samples/samples/snippets_test.py @@ -99,8 +99,9 @@ def default_leader(): def user_managed_instance_config_name(spanner_client): name = f"custom-python-samples-config-{uuid.uuid4().hex[:10]}" yield name - snippets.delete_instance_config("{}/instanceConfigs/{}".format( - spanner_client.project_name, name)) + snippets.delete_instance_config( + "{}/instanceConfigs/{}".format(spanner_client.project_name, name) + ) return @@ -128,7 +129,8 @@ def test_create_database_explicit(sample_instance, create_database_id): def test_create_instance_with_processing_units(capsys, lci_instance_id): processing_units = 500 retry_429(snippets.create_instance_with_processing_units)( - lci_instance_id, processing_units, + lci_instance_id, + processing_units, ) out, _ = capsys.readouterr() assert lci_instance_id in out @@ -163,8 +165,12 @@ def test_list_instance_config(capsys): @pytest.mark.dependency(name="create_instance_config") -def test_create_instance_config(capsys, user_managed_instance_config_name, base_instance_config_id): - snippets.create_instance_config(user_managed_instance_config_name, base_instance_config_id) +def test_create_instance_config( + capsys, user_managed_instance_config_name, base_instance_config_id +): + snippets.create_instance_config( + user_managed_instance_config_name, base_instance_config_id + ) out, _ = capsys.readouterr() assert "Created instance configuration" in out @@ -179,8 +185,11 @@ def test_update_instance_config(capsys, user_managed_instance_config_name): @pytest.mark.dependency(depends=["create_instance_config"]) def test_delete_instance_config(capsys, user_managed_instance_config_name): spanner_client = spanner.Client() - snippets.delete_instance_config("{}/instanceConfigs/{}".format( - spanner_client.project_name, user_managed_instance_config_name)) + snippets.delete_instance_config( + "{}/instanceConfigs/{}".format( + spanner_client.project_name, user_managed_instance_config_name + ) + ) out, _ = capsys.readouterr() assert "successfully deleted" in out @@ -543,7 +552,8 @@ def test_create_table_with_datatypes(capsys, instance_id, sample_database): @pytest.mark.dependency( - name="insert_datatypes_data", depends=["create_table_with_datatypes"], + name="insert_datatypes_data", + depends=["create_table_with_datatypes"], ) def test_insert_datatypes_data(capsys, instance_id, sample_database): snippets.insert_datatypes_data(instance_id, sample_database.database_id) @@ -605,7 +615,8 @@ def test_query_data_with_string(capsys, instance_id, sample_database): @pytest.mark.dependency( - name="add_numeric_column", depends=["create_table_with_datatypes"], + name="add_numeric_column", + depends=["create_table_with_datatypes"], ) def test_add_numeric_column(capsys, instance_id, sample_database): snippets.add_numeric_column(instance_id, sample_database.database_id) @@ -628,7 +639,8 @@ def test_query_data_with_numeric_parameter(capsys, instance_id, sample_database) @pytest.mark.dependency( - name="add_json_column", depends=["create_table_with_datatypes"], + name="add_json_column", + depends=["create_table_with_datatypes"], ) def test_add_json_column(capsys, instance_id, sample_database): snippets.add_json_column(instance_id, sample_database.database_id) From fc4a87f3008f37ba178036fa75feae3134cb2948 Mon Sep 17 00:00:00 2001 From: Rajat Bhatta Date: Tue, 4 Oct 2022 07:44:35 +0000 Subject: [PATCH 03/25] fix: remove unnecessary imports --- samples/samples/pg_snippets.py | 3 --- samples/samples/snippets.py | 10 +++++----- 2 files changed, 5 insertions(+), 8 deletions(-) diff --git a/samples/samples/pg_snippets.py b/samples/samples/pg_snippets.py index 43b19b022f..1f991cd4f6 100644 --- a/samples/samples/pg_snippets.py +++ b/samples/samples/pg_snippets.py @@ -23,13 +23,10 @@ import base64 import datetime import decimal -import json -import logging import time from google.cloud import spanner, spanner_admin_database_v1 from google.cloud.spanner_admin_database_v1.types.common import DatabaseDialect -from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin from google.cloud.spanner_v1 import param_types from google.protobuf import field_mask_pb2 # type: ignore diff --git a/samples/samples/snippets.py b/samples/samples/snippets.py index 39be5bb013..9317d7de13 100644 --- a/samples/samples/snippets.py +++ b/samples/samples/snippets.py @@ -626,7 +626,7 @@ def read_data_with_storing_index(instance_id, database_id): clause. The index must exist before running this sample. You can add the index - by running the `add_soring_index` sample or by running this DDL statement + by running the `add_scoring_index` sample or by running this DDL statement against your database: CREATE INDEX AlbumsByAlbumTitle2 ON Albums(AlbumTitle) @@ -1275,7 +1275,7 @@ def insert_data_with_dml(instance_id, database_id): def insert_singers(transaction): row_ct = transaction.execute_update( - "INSERT Singers (SingerId, FirstName, LastName) " + "INSERT INTO Singers (SingerId, FirstName, LastName) " " VALUES (10, 'Virginia', 'Watson')" ) @@ -1401,7 +1401,7 @@ def dml_write_read_transaction(instance_id, database_id): def write_then_read(transaction): # Insert record. row_ct = transaction.execute_update( - "INSERT Singers (SingerId, FirstName, LastName) " + "INSERT INTO Singers (SingerId, FirstName, LastName) " " VALUES (11, 'Timothy', 'Campbell')" ) print("{} record(s) inserted.".format(row_ct)) @@ -1460,7 +1460,7 @@ def insert_with_dml(instance_id, database_id): def insert_singers(transaction): row_ct = transaction.execute_update( - "INSERT Singers (SingerId, FirstName, LastName) VALUES " + "INSERT INTO Singers (SingerId, FirstName, LastName) VALUES " "(12, 'Melissa', 'Garcia'), " "(13, 'Russell', 'Morales'), " "(14, 'Jacqueline', 'Long'), " @@ -1630,7 +1630,7 @@ def update_albums(transaction): def create_table_with_datatypes(instance_id, database_id): - """Creates a table with supported dataypes.""" + """Creates a table with supported datatypes. """ # [START spanner_create_table_with_datatypes] # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" From 138ac422361d7965665f16fc2dedb5982121dc8c Mon Sep 17 00:00:00 2001 From: Rajat Bhatta Date: Thu, 6 Oct 2022 03:57:39 +0000 Subject: [PATCH 04/25] remove unused import --- samples/samples/pg_snippets.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/samples/samples/pg_snippets.py b/samples/samples/pg_snippets.py index 1f991cd4f6..01bea16e3d 100644 --- a/samples/samples/pg_snippets.py +++ b/samples/samples/pg_snippets.py @@ -28,7 +28,6 @@ from google.cloud import spanner, spanner_admin_database_v1 from google.cloud.spanner_admin_database_v1.types.common import DatabaseDialect from google.cloud.spanner_v1 import param_types -from google.protobuf import field_mask_pb2 # type: ignore OPERATION_TIMEOUT_SECONDS = 240 @@ -1459,8 +1458,6 @@ def query_data_with_query_options(instance_id, database_id): query_data_with_bool(args.instance_id, args.database_id) elif args.command == "query_data_with_bytes": query_data_with_bytes(args.instance_id, args.database_id) - elif args.command == "query_data_with_date": - query_data_with_date(args.instance_id, args.database_id) elif args.command == "query_data_with_float": query_data_with_float(args.instance_id, args.database_id) elif args.command == "query_data_with_int": From 5a8a540ae4eb23c1ec73aac5390aeaff92037024 Mon Sep 17 00:00:00 2001 From: Rajat Bhatta Date: Thu, 6 Oct 2022 05:59:58 +0000 Subject: [PATCH 05/25] fix: change method doc references in parser --- samples/samples/pg_snippets.py | 4 ++-- samples/samples/snippets.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/samples/samples/pg_snippets.py b/samples/samples/pg_snippets.py index 01bea16e3d..25bc93b422 100644 --- a/samples/samples/pg_snippets.py +++ b/samples/samples/pg_snippets.py @@ -1323,9 +1323,9 @@ def query_data_with_query_options(instance_id, database_id): subparsers.add_parser("read_write_transaction", help=read_write_transaction.__doc__) subparsers.add_parser("read_only_transaction", help=read_only_transaction.__doc__) subparsers.add_parser("add_index", help=add_index.__doc__) - subparsers.add_parser("read_data_with_index", help=insert_data.__doc__) + subparsers.add_parser("read_data_with_index", help=read_data_with_index.__doc__) subparsers.add_parser("add_storing_index", help=add_storing_index.__doc__) - subparsers.add_parser("read_data_with_storing_index", help=insert_data.__doc__) + subparsers.add_parser("read_data_with_storing_index", help=read_data_with_storing_index.__doc__) subparsers.add_parser( "create_table_with_timestamp", help=create_table_with_timestamp.__doc__ ) diff --git a/samples/samples/snippets.py b/samples/samples/snippets.py index 00cb999fad..7a64c2c818 100644 --- a/samples/samples/snippets.py +++ b/samples/samples/snippets.py @@ -2245,9 +2245,9 @@ def list_instance_config_operations(): ) query_data_with_index_parser.add_argument("--start_title", default="Aardvark") query_data_with_index_parser.add_argument("--end_title", default="Goo") - subparsers.add_parser("read_data_with_index", help=insert_data.__doc__) + subparsers.add_parser("read_data_with_index", help=read_data_with_index.__doc__) subparsers.add_parser("add_storing_index", help=add_storing_index.__doc__) - subparsers.add_parser("read_data_with_storing_index", help=insert_data.__doc__) + subparsers.add_parser("read_data_with_storing_index", help=read_data_with_storing_index.__doc__) subparsers.add_parser( "create_table_with_timestamp", help=create_table_with_timestamp.__doc__ ) From e30729b86586c7a557136c24f67cdcfcfffd2b66 Mon Sep 17 00:00:00 2001 From: Rajat Bhatta Date: Thu, 6 Oct 2022 06:05:04 +0000 Subject: [PATCH 06/25] add another command --- samples/samples/pg_snippets.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/samples/samples/pg_snippets.py b/samples/samples/pg_snippets.py index 25bc93b422..518f206f7b 100644 --- a/samples/samples/pg_snippets.py +++ b/samples/samples/pg_snippets.py @@ -1466,6 +1466,8 @@ def query_data_with_query_options(instance_id, database_id): query_data_with_string(args.instance_id, args.database_id) elif args.command == "query_data_with_timestamp_parameter": query_data_with_timestamp_parameter(args.instance_id, args.database_id) + elif args.command == "query_data_with_timestamp_parameter": + query_data_with_numeric_parameter(args.instance_id, args.database_id) elif args.command == "query_data_with_query_options": query_data_with_query_options(args.instance_id, args.database_id) elif args.command == "create_client_with_query_options": From 9d08391c4420bf024a097b8024b194838f2a2f8e Mon Sep 17 00:00:00 2001 From: Rajat Bhatta Date: Thu, 6 Oct 2022 14:14:38 +0000 Subject: [PATCH 07/25] test: add samples tests for PG --- samples/samples/pg_snippets_test.py | 445 ++++++++++++++++++++++++++++ 1 file changed, 445 insertions(+) create mode 100644 samples/samples/pg_snippets_test.py diff --git a/samples/samples/pg_snippets_test.py b/samples/samples/pg_snippets_test.py new file mode 100644 index 0000000000..e662165f13 --- /dev/null +++ b/samples/samples/pg_snippets_test.py @@ -0,0 +1,445 @@ +# Copyright 2022 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time +import uuid + +from google.api_core import exceptions +from google.cloud import spanner +import pytest +from test_utils.retry import RetryErrors + +import pg_snippets as snippets + +CREATE_TABLE_SINGERS = """\ +CREATE TABLE Singers ( + SingerId BIGINT NOT NULL, + FirstName CHARACTER VARYING(1024), + LastName CHARACTER VARYING(1024), + SingerInfo BYTEA, + PRIMARY KEY (SingerId) +) +""" + +CREATE_TABLE_ALBUMS = """\ +CREATE TABLE Albums ( + SingerId BIGINT NOT NULL, + AlbumId BIGINT NOT NULL, + AlbumTitle CHARACTER VARYING(MAX), + PRIMARY KEY (SingerId, AlbumId) + ) INTERLEAVE IN PARENT Singers ON DELETE CASCADE +""" + +retry_429 = RetryErrors(exceptions.ResourceExhausted, delay=15) + + +@pytest.fixture(scope="module") +def sample_name(): + return "pg_snippets" + + +@pytest.fixture(scope="module") +def create_instance_id(): + """Id for the low-cost instance.""" + return f"create-instance-{uuid.uuid4().hex[:10]}" + + +@pytest.fixture(scope="module") +def lci_instance_id(): + """Id for the low-cost instance.""" + return f"lci-instance-{uuid.uuid4().hex[:10]}" + + +@pytest.fixture(scope="module") +def database_id(): + return f"test-db-{uuid.uuid4().hex[:10]}" + + +@pytest.fixture(scope="module") +def create_database_id(): + return f"create-db-{uuid.uuid4().hex[:10]}" + + +@pytest.fixture(scope="module") +def cmek_database_id(): + return f"cmek-db-{uuid.uuid4().hex[:10]}" + + +@pytest.fixture(scope="module") +def default_leader_database_id(): + return f"leader_db_{uuid.uuid4().hex[:10]}" + + +@pytest.fixture(scope="module") +def database_ddl(): + """Sequence of DDL statements used to set up the database. + + Sample testcase modules can override as needed. + """ + return [CREATE_TABLE_SINGERS, CREATE_TABLE_ALBUMS] + + +@pytest.fixture(scope="module") +def default_leader(): + """Default leader for multi-region instances.""" + return "us-east4" + + +@pytest.fixture(scope="module") +def user_managed_instance_config_name(spanner_client): + name = f"custom-python-samples-config-{uuid.uuid4().hex[:10]}" + yield name + snippets.delete_instance_config( + "{}/instanceConfigs/{}".format(spanner_client.project_name, name) + ) + return + + +@pytest.fixture(scope="module") +def base_instance_config_id(spanner_client): + return "{}/instanceConfigs/{}".format(spanner_client.project_name, "nam7") + + +def test_create_instance_explicit(spanner_client, create_instance_id): + # Rather than re-use 'sample_isntance', we create a new instance, to + # ensure that the 'create_instance' snippet is tested. + retry_429(snippets.create_instance)(create_instance_id) + instance = spanner_client.instance(create_instance_id) + retry_429(instance.delete)() + + +def test_create_database_explicit(sample_instance, create_database_id): + # Rather than re-use 'sample_database', we create a new database, to + # ensure that the 'create_database' snippet is tested. + snippets.create_database(sample_instance.instance_id, create_database_id) + database = sample_instance.database(create_database_id) + database.drop() + + +@pytest.mark.dependency(name="insert_data") +def test_insert_data(capsys, instance_id, sample_database): + snippets.insert_data(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "Inserted data" in out + + +@pytest.mark.dependency(depends=["insert_data"]) +def test_delete_data(capsys, instance_id, sample_database): + snippets.delete_data(instance_id, sample_database.database_id) + # put it back for other tests + snippets.insert_data(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "Deleted data" in out + + +@pytest.mark.dependency(depends=["insert_data"]) +def test_query_data(capsys, instance_id, sample_database): + snippets.query_data(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk" in out + + +@pytest.mark.dependency(name="add_column", depends=["insert_data"]) +def test_add_column(capsys, instance_id, sample_database): + snippets.add_column(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "Added the MarketingBudget column." in out + + +@pytest.mark.dependency(depends=["insert_data"]) +def test_read_data(capsys, instance_id, sample_database): + snippets.read_data(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk" in out + + +@pytest.mark.dependency(name="update_data", depends=["add_column"]) +def test_update_data(capsys, instance_id, sample_database): + # Sleep for 15 seconds to ensure previous inserts will be + # 'stale' by the time test_read_stale_data is run. + time.sleep(15) + + snippets.update_data(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "Updated data." in out + + +@pytest.mark.dependency(depends=["update_data"]) +def test_read_stale_data(capsys, instance_id, sample_database): + # This snippet relies on test_update_data inserting data + # at least 15 seconds after the previous insert + snippets.read_stale_data(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "SingerId: 1, AlbumId: 1, MarketingBudget: None" in out + + +@pytest.mark.dependency(depends=["add_column"]) +def test_read_write_transaction(capsys, instance_id, sample_database): + snippets.read_write_transaction(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "Transaction complete" in out + + +@pytest.mark.dependency(depends=["add_column"]) +def test_query_data_with_new_column(capsys, instance_id, sample_database): + snippets.query_data_with_new_column(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "SingerId: 1, AlbumId: 1, MarketingBudget: 300000" in out + assert "SingerId: 2, AlbumId: 2, MarketingBudget: 300000" in out + + +@pytest.mark.dependency(name="add_index", depends=["insert_data"]) +def test_add_index(capsys, instance_id, sample_database): + snippets.add_index(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "Added the AlbumsByAlbumTitle index" in out + +@pytest.mark.dependency(depends=["add_index"]) +def test_read_data_with_index(capsys, instance_id, sample_database): + snippets.read_data_with_index(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "Go, Go, Go" in out + assert "Forever Hold Your Peace" in out + assert "Green" in out + + +@pytest.mark.dependency(name="add_storing_index", depends=["insert_data"]) +def test_add_storing_index(capsys, instance_id, sample_database): + snippets.add_storing_index(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "Added the AlbumsByAlbumTitle2 index." in out + + +@pytest.mark.dependency(depends=["add_storing_index"]) +def test_read_data_with_storing_index(capsys, instance_id, sample_database): + snippets.read_data_with_storing_index(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "300000" in out + + +@pytest.mark.dependency(depends=["insert_data"]) +def test_read_only_transaction(capsys, instance_id, sample_database): + snippets.read_only_transaction(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + # Snippet does two reads, so entry should be listed twice + assert out.count("SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk") == 2 + + +@pytest.mark.dependency(name="add_timestamp_column", depends=["insert_data"]) +def test_add_timestamp_column(capsys, instance_id, sample_database): + snippets.add_timestamp_column(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert 'Altered table "Albums" on database ' in out + + +@pytest.mark.dependency(depends=["add_timestamp_column"]) +def test_update_data_with_timestamp(capsys, instance_id, sample_database): + snippets.update_data_with_timestamp(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "Updated data" in out + + +@pytest.mark.dependency(depends=["add_timestamp_column"]) +def test_query_data_with_timestamp(capsys, instance_id, sample_database): + snippets.query_data_with_timestamp(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "SingerId: 1, AlbumId: 1, MarketingBudget: 1000000" in out + assert "SingerId: 2, AlbumId: 2, MarketingBudget: 750000" in out + + +@pytest.mark.dependency(name="create_table_with_timestamp") +def test_create_table_with_timestamp(capsys, instance_id, sample_database): + snippets.create_table_with_timestamp(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "Created Performances table on database" in out + + +@pytest.mark.dependency(depends=["create_table_with_datatypes"]) +def test_insert_data_with_timestamp(capsys, instance_id, sample_database): + snippets.insert_data_with_timestamp(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "Inserted data." in out + + +@pytest.mark.dependency(name="insert_data_with_dml") +def test_insert_data_with_dml(capsys, instance_id, sample_database): + snippets.insert_data_with_dml(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "1 record(s) inserted." in out + + +@pytest.mark.dependency(depends=["insert_data"]) +def test_update_data_with_dml(capsys, instance_id, sample_database): + snippets.update_data_with_dml(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "1 record(s) updated." in out + + +@pytest.mark.dependency(depends=["insert_data"]) +def test_delete_data_with_dml(capsys, instance_id, sample_database): + snippets.delete_data_with_dml(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "1 record(s) deleted." in out + + +@pytest.mark.dependency(name="dml_write_read_transaction") +def test_dml_write_read_transaction(capsys, instance_id, sample_database): + snippets.dml_write_read_transaction(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "1 record(s) inserted." in out + assert "FirstName: Timothy, LastName: Campbell" in out + + +@pytest.mark.dependency(name="insert_with_dml") +def test_insert_with_dml(capsys, instance_id, sample_database): + snippets.insert_with_dml(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "4 record(s) inserted" in out + + +@pytest.mark.dependency(depends=["insert_with_dml"]) +def test_query_data_with_parameter(capsys, instance_id, sample_database): + snippets.query_data_with_parameter(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "SingerId: 12, FirstName: Melissa, LastName: Garcia" in out + + +@pytest.mark.dependency(depends=["add_column"]) +def test_write_with_dml_transaction(capsys, instance_id, sample_database): + snippets.write_with_dml_transaction(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "Transferred 200000 from Album2's budget to Album1's" in out + + +@pytest.mark.dependency(depends=["add_column"]) +def update_data_with_partitioned_dml(capsys, instance_id, sample_database): + snippets.update_data_with_partitioned_dml(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "3 record(s) updated" in out + + +@pytest.mark.dependency(depends=["insert_with_dml"]) +def test_delete_data_with_partitioned_dml(capsys, instance_id, sample_database): + snippets.delete_data_with_partitioned_dml(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "6 record(s) deleted" in out + + +@pytest.mark.dependency(depends=["add_column"]) +def test_update_with_batch_dml(capsys, instance_id, sample_database): + snippets.update_with_batch_dml(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "Executed 2 SQL statements using Batch DML" in out + + +@pytest.mark.dependency(name="create_table_with_datatypes") +def test_create_table_with_datatypes(capsys, instance_id, sample_database): + snippets.create_table_with_datatypes(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "Created Venues table on database" in out + + +@pytest.mark.dependency( + name="insert_datatypes_data", + depends=["create_table_with_datatypes"], +) +def test_insert_datatypes_data(capsys, instance_id, sample_database): + snippets.insert_datatypes_data(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "Inserted data." in out + + +@pytest.mark.dependency(depends=["insert_datatypes_data"]) +def test_query_data_with_bool(capsys, instance_id, sample_database): + snippets.query_data_with_bool(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "VenueId: 19, VenueName: Venue 19, OutdoorVenue: True" in out + + +@pytest.mark.dependency(depends=["insert_datatypes_data"]) +def test_query_data_with_bytes(capsys, instance_id, sample_database): + snippets.query_data_with_bytes(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "VenueId: 4, VenueName: Venue 4" in out + + +@pytest.mark.dependency(depends=["insert_datatypes_data"]) +def test_query_data_with_float(capsys, instance_id, sample_database): + snippets.query_data_with_float(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "VenueId: 4, VenueName: Venue 4, PopularityScore: 0.8" in out + assert "VenueId: 19, VenueName: Venue 19, PopularityScore: 0.9" in out + + +@pytest.mark.dependency(depends=["insert_datatypes_data"]) +def test_query_data_with_int(capsys, instance_id, sample_database): + snippets.query_data_with_int(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "VenueId: 19, VenueName: Venue 19, Capacity: 6300" in out + assert "VenueId: 42, VenueName: Venue 42, Capacity: 3000" in out + + +@pytest.mark.dependency(depends=["insert_datatypes_data"]) +def test_query_data_with_string(capsys, instance_id, sample_database): + snippets.query_data_with_string(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "VenueId: 42, VenueName: Venue 42" in out + + +@pytest.mark.dependency( + name="add_numeric_column", + depends=["create_table_with_datatypes"], +) + + +@pytest.mark.dependency(depends=["add_numeric_column"]) +def test_query_data_with_numeric_parameter(capsys, instance_id, sample_database): + snippets.query_data_with_numeric_parameter(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "VenueId: 4, Revenue: 35000" in out + + +@pytest.mark.dependency( + name="add_json_column", + depends=["create_table_with_datatypes"], +) + + +@pytest.mark.dependency(depends=["insert_datatypes_data"]) +def test_query_data_with_timestamp_parameter(capsys, instance_id, sample_database): + snippets.query_data_with_timestamp_parameter( + instance_id, sample_database.database_id + ) + out, _ = capsys.readouterr() + assert "VenueId: 4, VenueName: Venue 4, LastUpdateTime:" in out + assert "VenueId: 19, VenueName: Venue 19, LastUpdateTime:" in out + assert "VenueId: 42, VenueName: Venue 42, LastUpdateTime:" in out + + +@pytest.mark.dependency(depends=["insert_datatypes_data"]) +def test_query_data_with_query_options(capsys, instance_id, sample_database): + snippets.query_data_with_query_options(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "VenueId: 4, VenueName: Venue 4, LastUpdateTime:" in out + assert "VenueId: 19, VenueName: Venue 19, LastUpdateTime:" in out + assert "VenueId: 42, VenueName: Venue 42, LastUpdateTime:" in out + + +@pytest.mark.dependency(depends=["insert_datatypes_data"]) +def test_create_client_with_query_options(capsys, instance_id, sample_database): + snippets.create_client_with_query_options(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "VenueId: 4, VenueName: Venue 4, LastUpdateTime:" in out + assert "VenueId: 19, VenueName: Venue 19, LastUpdateTime:" in out + assert "VenueId: 42, VenueName: Venue 42, LastUpdateTime:" in out From 43f3918406e8e2941fb079c29df957f23c9440b1 Mon Sep 17 00:00:00 2001 From: Rajat Bhatta Date: Thu, 6 Oct 2022 14:37:26 +0000 Subject: [PATCH 08/25] fix: linting --- samples/samples/pg_snippets_test.py | 14 +------------- 1 file changed, 1 insertion(+), 13 deletions(-) diff --git a/samples/samples/pg_snippets_test.py b/samples/samples/pg_snippets_test.py index e662165f13..aab3d4dce2 100644 --- a/samples/samples/pg_snippets_test.py +++ b/samples/samples/pg_snippets_test.py @@ -16,7 +16,6 @@ import uuid from google.api_core import exceptions -from google.cloud import spanner import pytest from test_utils.retry import RetryErrors @@ -205,6 +204,7 @@ def test_add_index(capsys, instance_id, sample_database): out, _ = capsys.readouterr() assert "Added the AlbumsByAlbumTitle index" in out + @pytest.mark.dependency(depends=["add_index"]) def test_read_data_with_index(capsys, instance_id, sample_database): snippets.read_data_with_index(instance_id, sample_database.database_id) @@ -397,12 +397,6 @@ def test_query_data_with_string(capsys, instance_id, sample_database): assert "VenueId: 42, VenueName: Venue 42" in out -@pytest.mark.dependency( - name="add_numeric_column", - depends=["create_table_with_datatypes"], -) - - @pytest.mark.dependency(depends=["add_numeric_column"]) def test_query_data_with_numeric_parameter(capsys, instance_id, sample_database): snippets.query_data_with_numeric_parameter(instance_id, sample_database.database_id) @@ -410,12 +404,6 @@ def test_query_data_with_numeric_parameter(capsys, instance_id, sample_database) assert "VenueId: 4, Revenue: 35000" in out -@pytest.mark.dependency( - name="add_json_column", - depends=["create_table_with_datatypes"], -) - - @pytest.mark.dependency(depends=["insert_datatypes_data"]) def test_query_data_with_timestamp_parameter(capsys, instance_id, sample_database): snippets.query_data_with_timestamp_parameter( From 078d8c898c245edc88a41baeee083b67cdbbb715 Mon Sep 17 00:00:00 2001 From: Rajat Bhatta Date: Sun, 9 Oct 2022 22:39:13 +0000 Subject: [PATCH 09/25] feat: sample tests config changes --- samples/samples/conftest.py | 217 ++++++++++++++++------------ samples/samples/pg_snippets_test.py | 27 ++-- samples/samples/snippets_test.py | 11 ++ 3 files changed, 148 insertions(+), 107 deletions(-) diff --git a/samples/samples/conftest.py b/samples/samples/conftest.py index c745afa151..f11b3f0bf6 100644 --- a/samples/samples/conftest.py +++ b/samples/samples/conftest.py @@ -17,6 +17,7 @@ import uuid from google.api_core import exceptions +from google.cloud.spanner_admin_database_v1.types.common import DatabaseDialect from google.cloud.spanner_v1 import backup from google.cloud.spanner_v1 import client from google.cloud.spanner_v1 import database @@ -31,180 +32,214 @@ @pytest.fixture(scope="module") def sample_name(): - """Sample testcase modules must define this fixture. + """Sample testcase modules must define this fixture. The name is used to label the instance created by the sample, to aid in debugging leaked instances. """ - raise NotImplementedError("Define 'sample_name' fixture in sample test driver") + raise NotImplementedError( + "Define 'sample_name' fixture in sample test driver") + + +@pytest.fixture(scope="module") +def spanner_dialect(): + """Spanner dialect to be used for this sample. + + The dialect is used to initialize the dialect for the database. + It can either be GoogleStandardSql or PostgreSql. + """ + raise NotImplementedError( + "Define 'spanner_dialect' fixture in sample test driver") @pytest.fixture(scope="session") def spanner_client(): - """Shared client used across all samples in a session.""" - return client.Client() + """Shared client used across all samples in a session.""" + return client.Client() def scrub_instance_ignore_not_found(to_scrub): - """Helper for func:`cleanup_old_instances`""" - try: - for backup_pb in to_scrub.list_backups(): - backup.Backup.from_pb(backup_pb, to_scrub).delete() + """Helper for func:`cleanup_old_instances`""" + try: + for backup_pb in to_scrub.list_backups(): + backup.Backup.from_pb(backup_pb, to_scrub).delete() - retry_429(to_scrub.delete)() - except exceptions.NotFound: - pass + retry_429(to_scrub.delete)() + except exceptions.NotFound: + pass @pytest.fixture(scope="session") def cleanup_old_instances(spanner_client): - """Delete instances, created by samples, that are older than an hour.""" - cutoff = int(time.time()) - 1 * 60 * 60 - instance_filter = "labels.cloud_spanner_samples:true" + """Delete instances, created by samples, that are older than an hour.""" + cutoff = int(time.time()) - 1 * 60 * 60 + instance_filter = "labels.cloud_spanner_samples:true" - for instance_pb in spanner_client.list_instances(filter_=instance_filter): - inst = instance.Instance.from_pb(instance_pb, spanner_client) + for instance_pb in spanner_client.list_instances(filter_=instance_filter): + inst = instance.Instance.from_pb(instance_pb, spanner_client) - if "created" in inst.labels: - create_time = int(inst.labels["created"]) + if "created" in inst.labels: + create_time = int(inst.labels["created"]) - if create_time <= cutoff: - scrub_instance_ignore_not_found(inst) + if create_time <= cutoff: + scrub_instance_ignore_not_found(inst) @pytest.fixture(scope="module") def instance_id(): - """Unique id for the instance used in samples.""" - return f"test-instance-{uuid.uuid4().hex[:10]}" + """Unique id for the instance used in samples.""" + return f"test-instance-{uuid.uuid4().hex[:10]}" @pytest.fixture(scope="module") def multi_region_instance_id(): - """Unique id for the multi-region instance used in samples.""" - return f"multi-instance-{uuid.uuid4().hex[:10]}" + """Unique id for the multi-region instance used in samples.""" + return f"multi-instance-{uuid.uuid4().hex[:10]}" @pytest.fixture(scope="module") def instance_config(spanner_client): - return "{}/instanceConfigs/{}".format( - spanner_client.project_name, "regional-us-central1" - ) + return "{}/instanceConfigs/{}".format( + spanner_client.project_name, "regional-us-central1" + ) @pytest.fixture(scope="module") def multi_region_instance_config(spanner_client): - return "{}/instanceConfigs/{}".format(spanner_client.project_name, "nam3") + return "{}/instanceConfigs/{}".format(spanner_client.project_name, "nam3") @pytest.fixture(scope="module") def sample_instance( - spanner_client, - cleanup_old_instances, + spanner_client, + cleanup_old_instances, + instance_id, + instance_config, + sample_name, +): + sample_instance = spanner_client.instance( instance_id, instance_config, - sample_name, -): - sample_instance = spanner_client.instance( - instance_id, - instance_config, - labels={ - "cloud_spanner_samples": "true", - "sample_name": sample_name, - "created": str(int(time.time())), - }, - ) - op = retry_429(sample_instance.create)() - op.result(INSTANCE_CREATION_TIMEOUT) # block until completion + labels={ + "cloud_spanner_samples": "true", + "sample_name": sample_name, + "created": str(int(time.time())), + }, + ) + op = retry_429(sample_instance.create)() + op.result(INSTANCE_CREATION_TIMEOUT) # block until completion - # Eventual consistency check - retry_found = retry.RetryResult(bool) - retry_found(sample_instance.exists)() + # Eventual consistency check + retry_found = retry.RetryResult(bool) + retry_found(sample_instance.exists)() - yield sample_instance + yield sample_instance - for database_pb in sample_instance.list_databases(): - database.Database.from_pb(database_pb, sample_instance).drop() + for database_pb in sample_instance.list_databases(): + database.Database.from_pb(database_pb, sample_instance).drop() - for backup_pb in sample_instance.list_backups(): - backup.Backup.from_pb(backup_pb, sample_instance).delete() + for backup_pb in sample_instance.list_backups(): + backup.Backup.from_pb(backup_pb, sample_instance).delete() - sample_instance.delete() + sample_instance.delete() @pytest.fixture(scope="module") def multi_region_instance( - spanner_client, - cleanup_old_instances, + spanner_client, + cleanup_old_instances, + multi_region_instance_id, + multi_region_instance_config, + sample_name, +): + multi_region_instance = spanner_client.instance( multi_region_instance_id, multi_region_instance_config, - sample_name, -): - multi_region_instance = spanner_client.instance( - multi_region_instance_id, - multi_region_instance_config, - labels={ - "cloud_spanner_samples": "true", - "sample_name": sample_name, - "created": str(int(time.time())), - }, - ) - op = retry_429(multi_region_instance.create)() - op.result(INSTANCE_CREATION_TIMEOUT) # block until completion + labels={ + "cloud_spanner_samples": "true", + "sample_name": sample_name, + "created": str(int(time.time())), + }, + ) + op = retry_429(multi_region_instance.create)() + op.result(INSTANCE_CREATION_TIMEOUT) # block until completion - # Eventual consistency check - retry_found = retry.RetryResult(bool) - retry_found(multi_region_instance.exists)() + # Eventual consistency check + retry_found = retry.RetryResult(bool) + retry_found(multi_region_instance.exists)() - yield multi_region_instance + yield multi_region_instance - for database_pb in multi_region_instance.list_databases(): - database.Database.from_pb(database_pb, multi_region_instance).drop() + for database_pb in multi_region_instance.list_databases(): + database.Database.from_pb(database_pb, multi_region_instance).drop() - for backup_pb in multi_region_instance.list_backups(): - backup.Backup.from_pb(backup_pb, multi_region_instance).delete() + for backup_pb in multi_region_instance.list_backups(): + backup.Backup.from_pb(backup_pb, multi_region_instance).delete() - multi_region_instance.delete() + multi_region_instance.delete() @pytest.fixture(scope="module") def database_id(): - """Id for the database used in samples. + """Id for the database used in samples. Sample testcase modules can override as needed. """ - return "my-database-id" + return "my-database-id" @pytest.fixture(scope="module") def database_ddl(): - """Sequence of DDL statements used to set up the database. + """Sequence of DDL statements used to set up the database. Sample testcase modules can override as needed. """ - return [] + return [] @pytest.fixture(scope="module") -def sample_database(sample_instance, database_id, database_ddl): - +def sample_database( + sample_instance, + database_id, + database_ddl, + database_dialect): + if database_dialect == DatabaseDialect.POSTGRESQL: sample_database = sample_instance.database( - database_id, - ddl_statements=database_ddl, + database_id, + database_dialect=DatabaseDialect.POSTGRESQL, ) if not sample_database.exists(): - sample_database.create() + sample_database.create() + + request = sample_instance.spanner_admin_database_v1.UpdateDatabaseDdlRequest( + database=sample_database.name, + statements=database_ddl, + ) + sample_instance.database_admin_api.update_database_ddl(request) yield sample_database sample_database.drop() + sample_database = sample_instance.database( + database_id, + ddl_statements=database_ddl, + ) + + if not sample_database.exists(): + sample_database.create() + + yield sample_database + + sample_database.drop() + @pytest.fixture(scope="module") def kms_key_name(spanner_client): - return "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( - spanner_client.project, - "us-central1", - "spanner-test-keyring", - "spanner-test-cmek", - ) + return "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( + spanner_client.project, + "us-central1", + "spanner-test-keyring", + "spanner-test-cmek", + ) diff --git a/samples/samples/pg_snippets_test.py b/samples/samples/pg_snippets_test.py index aab3d4dce2..0ccad622f8 100644 --- a/samples/samples/pg_snippets_test.py +++ b/samples/samples/pg_snippets_test.py @@ -16,6 +16,7 @@ import uuid from google.api_core import exceptions +from google.cloud.spanner_admin_database_v1.types.common import DatabaseDialect import pytest from test_utils.retry import RetryErrors @@ -48,6 +49,16 @@ def sample_name(): return "pg_snippets" +@pytest.fixture(scope="module") +def spanner_dialect(): + """Spanner dialect to be used for this sample. + + The dialect is used to initialize the dialect for the database. + It can either be GoogleStandardSql or PostgreSql. + """ + return DatabaseDialect.POSTGRESQL + + @pytest.fixture(scope="module") def create_instance_id(): """Id for the low-cost instance.""" @@ -79,7 +90,6 @@ def cmek_database_id(): def default_leader_database_id(): return f"leader_db_{uuid.uuid4().hex[:10]}" - @pytest.fixture(scope="module") def database_ddl(): """Sequence of DDL statements used to set up the database. @@ -95,21 +105,6 @@ def default_leader(): return "us-east4" -@pytest.fixture(scope="module") -def user_managed_instance_config_name(spanner_client): - name = f"custom-python-samples-config-{uuid.uuid4().hex[:10]}" - yield name - snippets.delete_instance_config( - "{}/instanceConfigs/{}".format(spanner_client.project_name, name) - ) - return - - -@pytest.fixture(scope="module") -def base_instance_config_id(spanner_client): - return "{}/instanceConfigs/{}".format(spanner_client.project_name, "nam7") - - def test_create_instance_explicit(spanner_client, create_instance_id): # Rather than re-use 'sample_isntance', we create a new instance, to # ensure that the 'create_instance' snippet is tested. diff --git a/samples/samples/snippets_test.py b/samples/samples/snippets_test.py index 4fcd06b897..376ef50f87 100644 --- a/samples/samples/snippets_test.py +++ b/samples/samples/snippets_test.py @@ -17,6 +17,7 @@ from google.api_core import exceptions from google.cloud import spanner +from google.cloud.spanner_admin_database_v1.types.common import DatabaseDialect import pytest from test_utils.retry import RetryErrors @@ -48,6 +49,16 @@ def sample_name(): return "snippets" +@pytest.fixture(scope="module") +def spanner_dialect(): + """Spanner dialect to be used for this sample. + + The dialect is used to initialize the dialect for the database. + It can either be GoogleStandardSql or PostgreSql. + """ + return DatabaseDialect. + + @pytest.fixture(scope="module") def create_instance_id(): """Id for the low-cost instance.""" From 9a7dc8ff7a8d0cd61655528c65e0d48f8951a1cd Mon Sep 17 00:00:00 2001 From: Rajat Bhatta Date: Sun, 9 Oct 2022 22:44:48 +0000 Subject: [PATCH 10/25] refactor --- samples/samples/conftest.py | 252 ++++++++++++++-------------- samples/samples/pg_snippets.py | 92 ++++++---- samples/samples/pg_snippets_test.py | 46 +++-- samples/samples/snippets_test.py | 57 ++++--- 4 files changed, 256 insertions(+), 191 deletions(-) diff --git a/samples/samples/conftest.py b/samples/samples/conftest.py index f11b3f0bf6..f68705520f 100644 --- a/samples/samples/conftest.py +++ b/samples/samples/conftest.py @@ -32,214 +32,214 @@ @pytest.fixture(scope="module") def sample_name(): - """Sample testcase modules must define this fixture. + """Sample testcase modules must define this fixture. - The name is used to label the instance created by the sample, to - aid in debugging leaked instances. - """ - raise NotImplementedError( - "Define 'sample_name' fixture in sample test driver") + The name is used to label the instance created by the sample, to + aid in debugging leaked instances. + """ + raise NotImplementedError( + "Define 'sample_name' fixture in sample test driver") @pytest.fixture(scope="module") def spanner_dialect(): - """Spanner dialect to be used for this sample. + """Spanner dialect to be used for this sample. - The dialect is used to initialize the dialect for the database. - It can either be GoogleStandardSql or PostgreSql. - """ - raise NotImplementedError( - "Define 'spanner_dialect' fixture in sample test driver") + The dialect is used to initialize the dialect for the database. + It can either be GoogleStandardSql or PostgreSql. + """ + raise NotImplementedError( + "Define 'spanner_dialect' fixture in sample test driver") @pytest.fixture(scope="session") def spanner_client(): - """Shared client used across all samples in a session.""" - return client.Client() + """Shared client used across all samples in a session.""" + return client.Client() def scrub_instance_ignore_not_found(to_scrub): - """Helper for func:`cleanup_old_instances`""" - try: - for backup_pb in to_scrub.list_backups(): - backup.Backup.from_pb(backup_pb, to_scrub).delete() + """Helper for func:`cleanup_old_instances`""" + try: + for backup_pb in to_scrub.list_backups(): + backup.Backup.from_pb(backup_pb, to_scrub).delete() - retry_429(to_scrub.delete)() - except exceptions.NotFound: - pass + retry_429(to_scrub.delete)() + except exceptions.NotFound: + pass @pytest.fixture(scope="session") def cleanup_old_instances(spanner_client): - """Delete instances, created by samples, that are older than an hour.""" - cutoff = int(time.time()) - 1 * 60 * 60 - instance_filter = "labels.cloud_spanner_samples:true" + """Delete instances, created by samples, that are older than an hour.""" + cutoff = int(time.time()) - 1 * 60 * 60 + instance_filter = "labels.cloud_spanner_samples:true" - for instance_pb in spanner_client.list_instances(filter_=instance_filter): - inst = instance.Instance.from_pb(instance_pb, spanner_client) + for instance_pb in spanner_client.list_instances(filter_=instance_filter): + inst = instance.Instance.from_pb(instance_pb, spanner_client) - if "created" in inst.labels: - create_time = int(inst.labels["created"]) + if "created" in inst.labels: + create_time = int(inst.labels["created"]) - if create_time <= cutoff: - scrub_instance_ignore_not_found(inst) + if create_time <= cutoff: + scrub_instance_ignore_not_found(inst) @pytest.fixture(scope="module") def instance_id(): - """Unique id for the instance used in samples.""" - return f"test-instance-{uuid.uuid4().hex[:10]}" + """Unique id for the instance used in samples.""" + return f"test-instance-{uuid.uuid4().hex[:10]}" @pytest.fixture(scope="module") def multi_region_instance_id(): - """Unique id for the multi-region instance used in samples.""" - return f"multi-instance-{uuid.uuid4().hex[:10]}" + """Unique id for the multi-region instance used in samples.""" + return f"multi-instance-{uuid.uuid4().hex[:10]}" @pytest.fixture(scope="module") def instance_config(spanner_client): - return "{}/instanceConfigs/{}".format( - spanner_client.project_name, "regional-us-central1" - ) + return "{}/instanceConfigs/{}".format( + spanner_client.project_name, "regional-us-central1" + ) @pytest.fixture(scope="module") def multi_region_instance_config(spanner_client): - return "{}/instanceConfigs/{}".format(spanner_client.project_name, "nam3") + return "{}/instanceConfigs/{}".format(spanner_client.project_name, "nam3") @pytest.fixture(scope="module") def sample_instance( - spanner_client, - cleanup_old_instances, - instance_id, - instance_config, - sample_name, -): - sample_instance = spanner_client.instance( + spanner_client, + cleanup_old_instances, instance_id, instance_config, - labels={ - "cloud_spanner_samples": "true", - "sample_name": sample_name, - "created": str(int(time.time())), - }, - ) - op = retry_429(sample_instance.create)() - op.result(INSTANCE_CREATION_TIMEOUT) # block until completion + sample_name, +): + sample_instance = spanner_client.instance( + instance_id, + instance_config, + labels={ + "cloud_spanner_samples": "true", + "sample_name": sample_name, + "created": str(int(time.time())), + }, + ) + op = retry_429(sample_instance.create)() + op.result(INSTANCE_CREATION_TIMEOUT) # block until completion - # Eventual consistency check - retry_found = retry.RetryResult(bool) - retry_found(sample_instance.exists)() + # Eventual consistency check + retry_found = retry.RetryResult(bool) + retry_found(sample_instance.exists)() - yield sample_instance + yield sample_instance - for database_pb in sample_instance.list_databases(): - database.Database.from_pb(database_pb, sample_instance).drop() + for database_pb in sample_instance.list_databases(): + database.Database.from_pb(database_pb, sample_instance).drop() - for backup_pb in sample_instance.list_backups(): - backup.Backup.from_pb(backup_pb, sample_instance).delete() + for backup_pb in sample_instance.list_backups(): + backup.Backup.from_pb(backup_pb, sample_instance).delete() - sample_instance.delete() + sample_instance.delete() @pytest.fixture(scope="module") def multi_region_instance( - spanner_client, - cleanup_old_instances, - multi_region_instance_id, - multi_region_instance_config, - sample_name, -): - multi_region_instance = spanner_client.instance( + spanner_client, + cleanup_old_instances, multi_region_instance_id, multi_region_instance_config, - labels={ - "cloud_spanner_samples": "true", - "sample_name": sample_name, - "created": str(int(time.time())), - }, - ) - op = retry_429(multi_region_instance.create)() - op.result(INSTANCE_CREATION_TIMEOUT) # block until completion + sample_name, +): + multi_region_instance = spanner_client.instance( + multi_region_instance_id, + multi_region_instance_config, + labels={ + "cloud_spanner_samples": "true", + "sample_name": sample_name, + "created": str(int(time.time())), + }, + ) + op = retry_429(multi_region_instance.create)() + op.result(INSTANCE_CREATION_TIMEOUT) # block until completion - # Eventual consistency check - retry_found = retry.RetryResult(bool) - retry_found(multi_region_instance.exists)() + # Eventual consistency check + retry_found = retry.RetryResult(bool) + retry_found(multi_region_instance.exists)() - yield multi_region_instance + yield multi_region_instance - for database_pb in multi_region_instance.list_databases(): - database.Database.from_pb(database_pb, multi_region_instance).drop() + for database_pb in multi_region_instance.list_databases(): + database.Database.from_pb(database_pb, multi_region_instance).drop() - for backup_pb in multi_region_instance.list_backups(): - backup.Backup.from_pb(backup_pb, multi_region_instance).delete() + for backup_pb in multi_region_instance.list_backups(): + backup.Backup.from_pb(backup_pb, multi_region_instance).delete() - multi_region_instance.delete() + multi_region_instance.delete() @pytest.fixture(scope="module") def database_id(): - """Id for the database used in samples. + """Id for the database used in samples. - Sample testcase modules can override as needed. - """ - return "my-database-id" + Sample testcase modules can override as needed. + """ + return "my-database-id" @pytest.fixture(scope="module") def database_ddl(): - """Sequence of DDL statements used to set up the database. + """Sequence of DDL statements used to set up the database. - Sample testcase modules can override as needed. - """ - return [] + Sample testcase modules can override as needed. + """ + return [] @pytest.fixture(scope="module") def sample_database( - sample_instance, - database_id, - database_ddl, - database_dialect): - if database_dialect == DatabaseDialect.POSTGRESQL: - sample_database = sample_instance.database( - database_id, - database_dialect=DatabaseDialect.POSTGRESQL, - ) + sample_instance, + database_id, + database_ddl, + database_dialect): + if database_dialect == DatabaseDialect.POSTGRESQL: + sample_database = sample_instance.database( + database_id, + database_dialect=DatabaseDialect.POSTGRESQL, + ) - if not sample_database.exists(): - sample_database.create() + if not sample_database.exists(): + sample_database.create() - request = sample_instance.spanner_admin_database_v1.UpdateDatabaseDdlRequest( - database=sample_database.name, - statements=database_ddl, - ) + request = sample_instance.spanner_admin_database_v1.UpdateDatabaseDdlRequest( + database=sample_database.name, + statements=database_ddl, + ) - sample_instance.database_admin_api.update_database_ddl(request) - yield sample_database + sample_instance.database_admin_api.update_database_ddl(request) + yield sample_database - sample_database.drop() + sample_database.drop() - sample_database = sample_instance.database( - database_id, - ddl_statements=database_ddl, - ) + sample_database = sample_instance.database( + database_id, + ddl_statements=database_ddl, + ) - if not sample_database.exists(): - sample_database.create() + if not sample_database.exists(): + sample_database.create() - yield sample_database + yield sample_database - sample_database.drop() + sample_database.drop() @pytest.fixture(scope="module") def kms_key_name(spanner_client): - return "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( - spanner_client.project, - "us-central1", - "spanner-test-keyring", - "spanner-test-cmek", - ) + return "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( + spanner_client.project, + "us-central1", + "spanner-test-keyring", + "spanner-test-cmek", + ) diff --git a/samples/samples/pg_snippets.py b/samples/samples/pg_snippets.py index 518f206f7b..bd2380f05b 100644 --- a/samples/samples/pg_snippets.py +++ b/samples/samples/pg_snippets.py @@ -215,7 +215,8 @@ def read_data(instance_id, database_id): with database.snapshot() as snapshot: keyset = spanner.KeySet(all_=True) results = snapshot.read( - table="Albums", columns=("SingerId", "AlbumId", "AlbumTitle"), keyset=keyset + table="Albums", columns=("SingerId", "AlbumId", "AlbumTitle"), + keyset=keyset ) for row in results: @@ -306,7 +307,8 @@ def update_albums(transaction): if second_album_budget < transfer_amount: # Raising an exception will automatically roll back the # transaction. - raise ValueError("The second album doesn't have enough funds to transfer") + raise ValueError( + "The second album doesn't have enough funds to transfer") # Read the first album's budget. first_album_keyset = spanner.KeySet(keys=[(1, 1)]) @@ -468,7 +470,8 @@ def read_data_with_storing_index(instance_id, database_id): ) for row in results: - print("AlbumId: {}, AlbumTitle: {}, " "MarketingBudget: {}".format(*row)) + print("AlbumId: {}, AlbumTitle: {}, " "MarketingBudget: {}".format( + *row)) # [END spanner_postgresql_read_data_with_storing_index] @@ -500,7 +503,8 @@ def read_only_transaction(instance_id, database_id): # return the same data. keyset = spanner.KeySet(all_=True) results = snapshot.read( - table="Albums", columns=("SingerId", "AlbumId", "AlbumTitle"), keyset=keyset + table="Albums", columns=("SingerId", "AlbumId", "AlbumTitle"), + keyset=keyset ) print("Results from second read:") @@ -668,7 +672,8 @@ def update_data_with_timestamp(instance_id, database_id): with database.batch() as batch: batch.update( table="Albums", - columns=("SingerId", "AlbumId", "MarketingBudget", "LastUpdateTime"), + columns=( + "SingerId", "AlbumId", "MarketingBudget", "LastUpdateTime"), values=[ (1, 1, 1000000, spanner.COMMIT_TIMESTAMP), (2, 2, 750000, spanner.COMMIT_TIMESTAMP), @@ -690,7 +695,8 @@ def add_timestamp_column(instance_id, database_id): database = instance.database(database_id) operation = database.update_ddl( - ["ALTER TABLE Albums ADD COLUMN LastUpdateTime " "SPANNER.COMMIT_TIMESTAMP"] + [ + "ALTER TABLE Albums ADD COLUMN LastUpdateTime " "SPANNER.COMMIT_TIMESTAMP"] ) print("Waiting for operation to complete...") @@ -785,7 +791,8 @@ def insert_data_with_timestamp(instance_id, database_id): with database.batch() as batch: batch.insert( table="Performances", - columns=("SingerId", "VenueId", "EventDate", "Revenue", "LastUpdateTime"), + columns=( + "SingerId", "VenueId", "EventDate", "Revenue", "LastUpdateTime"), values=[ (1, 4, "2017-10-05", 11000, spanner.COMMIT_TIMESTAMP), (1, 19, "2017-11-02", 15000, spanner.COMMIT_TIMESTAMP), @@ -921,7 +928,8 @@ def delete_data_with_partitioned_dml(instance_id, database_id): instance = spanner_client.instance(instance_id) database = instance.database(database_id) - row_ct = database.execute_partitioned_dml("DELETE FROM Singers WHERE SingerId > 10") + row_ct = database.execute_partitioned_dml( + "DELETE FROM Singers WHERE SingerId > 10") print("{} record(s) deleted.".format(row_ct)) # [END spanner_postgresql_dml_partitioned_delete] @@ -952,7 +960,8 @@ def update_with_batch_dml(instance_id, database_id): ) def update_albums(transaction): - status, row_cts = transaction.batch_update([insert_statement, update_statement]) + status, row_cts = transaction.batch_update( + [insert_statement, update_statement]) if status.code != OK: # Do handling here. @@ -960,7 +969,8 @@ def update_albums(transaction): # `commit` is called by `run_in_transaction`. return - print("Executed {} SQL statements using Batch DML.".format(len(row_cts))) + print( + "Executed {} SQL statements using Batch DML.".format(len(row_cts))) database.run_in_transaction(update_albums) # [END spanner_postgresql_dml_batch_update] @@ -1139,7 +1149,8 @@ def query_data_with_float(instance_id, database_id): ) for row in results: - print("VenueId: {}, VenueName: {}, PopularityScore: {}".format(*row)) + print( + "VenueId: {}, VenueName: {}, PopularityScore: {}".format(*row)) # [END spanner_postgresql_query_with_float_parameter] @@ -1207,8 +1218,9 @@ def query_data_with_timestamp_parameter(instance_id, database_id): # Avoid time drift on the local machine. # https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4197. example_timestamp = ( - datetime.datetime.utcnow() + datetime.timedelta(days=1) - ).isoformat() + "Z" + datetime.datetime.utcnow() + datetime.timedelta( + days=1) + ).isoformat() + "Z" # [START spanner_postgresql_query_with_timestamp_parameter] param = {"p1": example_timestamp} param_type = {"p1": param_types.TIMESTAMP} @@ -1300,11 +1312,13 @@ def query_data_with_query_options(instance_id, database_id): if __name__ == "__main__": # noqa: C901 parser = argparse.ArgumentParser( - description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter ) parser.add_argument("instance_id", help="Your Cloud Spanner instance ID.") parser.add_argument( - "--database-id", help="Your Cloud Spanner database ID.", default="example_db" + "--database-id", help="Your Cloud Spanner database ID.", + default="example_db" ) subparsers = parser.add_subparsers(dest="command") @@ -1320,28 +1334,36 @@ def query_data_with_query_options(instance_id, database_id): subparsers.add_parser( "query_data_with_new_column", help=query_data_with_new_column.__doc__ ) - subparsers.add_parser("read_write_transaction", help=read_write_transaction.__doc__) - subparsers.add_parser("read_only_transaction", help=read_only_transaction.__doc__) + subparsers.add_parser("read_write_transaction", + help=read_write_transaction.__doc__) + subparsers.add_parser("read_only_transaction", + help=read_only_transaction.__doc__) subparsers.add_parser("add_index", help=add_index.__doc__) - subparsers.add_parser("read_data_with_index", help=read_data_with_index.__doc__) + subparsers.add_parser("read_data_with_index", + help=read_data_with_index.__doc__) subparsers.add_parser("add_storing_index", help=add_storing_index.__doc__) - subparsers.add_parser("read_data_with_storing_index", help=read_data_with_storing_index.__doc__) + subparsers.add_parser("read_data_with_storing_index", + help=read_data_with_storing_index.__doc__) subparsers.add_parser( "create_table_with_timestamp", help=create_table_with_timestamp.__doc__ ) subparsers.add_parser( "insert_data_with_timestamp", help=insert_data_with_timestamp.__doc__ ) - subparsers.add_parser("add_timestamp_column", help=add_timestamp_column.__doc__) + subparsers.add_parser("add_timestamp_column", + help=add_timestamp_column.__doc__) subparsers.add_parser( "update_data_with_timestamp", help=update_data_with_timestamp.__doc__ ) subparsers.add_parser( "query_data_with_timestamp", help=query_data_with_timestamp.__doc__ ) - subparsers.add_parser("insert_data_with_dml", help=insert_data_with_dml.__doc__) - subparsers.add_parser("update_data_with_dml", help=update_data_with_dml.__doc__) - subparsers.add_parser("delete_data_with_dml", help=delete_data_with_dml.__doc__) + subparsers.add_parser("insert_data_with_dml", + help=insert_data_with_dml.__doc__) + subparsers.add_parser("update_data_with_dml", + help=update_data_with_dml.__doc__) + subparsers.add_parser("delete_data_with_dml", + help=delete_data_with_dml.__doc__) subparsers.add_parser( "dml_write_read_transaction", help=dml_write_read_transaction.__doc__ ) @@ -1360,16 +1382,23 @@ def query_data_with_query_options(instance_id, database_id): "delete_data_with_partitioned_dml", help=delete_data_with_partitioned_dml.__doc__, ) - subparsers.add_parser("update_with_batch_dml", help=update_with_batch_dml.__doc__) + subparsers.add_parser("update_with_batch_dml", + help=update_with_batch_dml.__doc__) subparsers.add_parser( "create_table_with_datatypes", help=create_table_with_datatypes.__doc__ ) - subparsers.add_parser("insert_datatypes_data", help=insert_datatypes_data.__doc__) - subparsers.add_parser("query_data_with_bool", help=query_data_with_bool.__doc__) - subparsers.add_parser("query_data_with_bytes", help=query_data_with_bytes.__doc__) - subparsers.add_parser("query_data_with_float", help=query_data_with_float.__doc__) - subparsers.add_parser("query_data_with_int", help=query_data_with_int.__doc__) - subparsers.add_parser("query_data_with_string", help=query_data_with_string.__doc__) + subparsers.add_parser("insert_datatypes_data", + help=insert_datatypes_data.__doc__) + subparsers.add_parser("query_data_with_bool", + help=query_data_with_bool.__doc__) + subparsers.add_parser("query_data_with_bytes", + help=query_data_with_bytes.__doc__) + subparsers.add_parser("query_data_with_float", + help=query_data_with_float.__doc__) + subparsers.add_parser("query_data_with_int", + help=query_data_with_int.__doc__) + subparsers.add_parser("query_data_with_string", + help=query_data_with_string.__doc__) subparsers.add_parser( "query_data_with_timestamp_parameter", help=query_data_with_timestamp_parameter.__doc__, @@ -1379,7 +1408,8 @@ def query_data_with_query_options(instance_id, database_id): help=query_data_with_numeric_parameter.__doc__, ) subparsers.add_parser( - "query_data_with_query_options", help=query_data_with_query_options.__doc__ + "query_data_with_query_options", + help=query_data_with_query_options.__doc__ ) subparsers.add_parser( "create_client_with_query_options", diff --git a/samples/samples/pg_snippets_test.py b/samples/samples/pg_snippets_test.py index 0ccad622f8..022bf50317 100644 --- a/samples/samples/pg_snippets_test.py +++ b/samples/samples/pg_snippets_test.py @@ -90,6 +90,7 @@ def cmek_database_id(): def default_leader_database_id(): return f"leader_db_{uuid.uuid4().hex[:10]}" + @pytest.fixture(scope="module") def database_ddl(): """Sequence of DDL statements used to set up the database. @@ -187,7 +188,8 @@ def test_read_write_transaction(capsys, instance_id, sample_database): @pytest.mark.dependency(depends=["add_column"]) def test_query_data_with_new_column(capsys, instance_id, sample_database): - snippets.query_data_with_new_column(instance_id, sample_database.database_id) + snippets.query_data_with_new_column(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "SingerId: 1, AlbumId: 1, MarketingBudget: 300000" in out assert "SingerId: 2, AlbumId: 2, MarketingBudget: 300000" in out @@ -218,7 +220,8 @@ def test_add_storing_index(capsys, instance_id, sample_database): @pytest.mark.dependency(depends=["add_storing_index"]) def test_read_data_with_storing_index(capsys, instance_id, sample_database): - snippets.read_data_with_storing_index(instance_id, sample_database.database_id) + snippets.read_data_with_storing_index(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "300000" in out @@ -240,7 +243,8 @@ def test_add_timestamp_column(capsys, instance_id, sample_database): @pytest.mark.dependency(depends=["add_timestamp_column"]) def test_update_data_with_timestamp(capsys, instance_id, sample_database): - snippets.update_data_with_timestamp(instance_id, sample_database.database_id) + snippets.update_data_with_timestamp(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "Updated data" in out @@ -255,14 +259,16 @@ def test_query_data_with_timestamp(capsys, instance_id, sample_database): @pytest.mark.dependency(name="create_table_with_timestamp") def test_create_table_with_timestamp(capsys, instance_id, sample_database): - snippets.create_table_with_timestamp(instance_id, sample_database.database_id) + snippets.create_table_with_timestamp(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "Created Performances table on database" in out @pytest.mark.dependency(depends=["create_table_with_datatypes"]) def test_insert_data_with_timestamp(capsys, instance_id, sample_database): - snippets.insert_data_with_timestamp(instance_id, sample_database.database_id) + snippets.insert_data_with_timestamp(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "Inserted data." in out @@ -290,7 +296,8 @@ def test_delete_data_with_dml(capsys, instance_id, sample_database): @pytest.mark.dependency(name="dml_write_read_transaction") def test_dml_write_read_transaction(capsys, instance_id, sample_database): - snippets.dml_write_read_transaction(instance_id, sample_database.database_id) + snippets.dml_write_read_transaction(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "1 record(s) inserted." in out assert "FirstName: Timothy, LastName: Campbell" in out @@ -312,21 +319,24 @@ def test_query_data_with_parameter(capsys, instance_id, sample_database): @pytest.mark.dependency(depends=["add_column"]) def test_write_with_dml_transaction(capsys, instance_id, sample_database): - snippets.write_with_dml_transaction(instance_id, sample_database.database_id) + snippets.write_with_dml_transaction(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "Transferred 200000 from Album2's budget to Album1's" in out @pytest.mark.dependency(depends=["add_column"]) def update_data_with_partitioned_dml(capsys, instance_id, sample_database): - snippets.update_data_with_partitioned_dml(instance_id, sample_database.database_id) + snippets.update_data_with_partitioned_dml(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "3 record(s) updated" in out @pytest.mark.dependency(depends=["insert_with_dml"]) def test_delete_data_with_partitioned_dml(capsys, instance_id, sample_database): - snippets.delete_data_with_partitioned_dml(instance_id, sample_database.database_id) + snippets.delete_data_with_partitioned_dml(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "6 record(s) deleted" in out @@ -340,7 +350,8 @@ def test_update_with_batch_dml(capsys, instance_id, sample_database): @pytest.mark.dependency(name="create_table_with_datatypes") def test_create_table_with_datatypes(capsys, instance_id, sample_database): - snippets.create_table_with_datatypes(instance_id, sample_database.database_id) + snippets.create_table_with_datatypes(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "Created Venues table on database" in out @@ -393,14 +404,17 @@ def test_query_data_with_string(capsys, instance_id, sample_database): @pytest.mark.dependency(depends=["add_numeric_column"]) -def test_query_data_with_numeric_parameter(capsys, instance_id, sample_database): - snippets.query_data_with_numeric_parameter(instance_id, sample_database.database_id) +def test_query_data_with_numeric_parameter(capsys, instance_id, + sample_database): + snippets.query_data_with_numeric_parameter(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "VenueId: 4, Revenue: 35000" in out @pytest.mark.dependency(depends=["insert_datatypes_data"]) -def test_query_data_with_timestamp_parameter(capsys, instance_id, sample_database): +def test_query_data_with_timestamp_parameter(capsys, instance_id, + sample_database): snippets.query_data_with_timestamp_parameter( instance_id, sample_database.database_id ) @@ -412,7 +426,8 @@ def test_query_data_with_timestamp_parameter(capsys, instance_id, sample_databas @pytest.mark.dependency(depends=["insert_datatypes_data"]) def test_query_data_with_query_options(capsys, instance_id, sample_database): - snippets.query_data_with_query_options(instance_id, sample_database.database_id) + snippets.query_data_with_query_options(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "VenueId: 4, VenueName: Venue 4, LastUpdateTime:" in out assert "VenueId: 19, VenueName: Venue 19, LastUpdateTime:" in out @@ -421,7 +436,8 @@ def test_query_data_with_query_options(capsys, instance_id, sample_database): @pytest.mark.dependency(depends=["insert_datatypes_data"]) def test_create_client_with_query_options(capsys, instance_id, sample_database): - snippets.create_client_with_query_options(instance_id, sample_database.database_id) + snippets.create_client_with_query_options(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "VenueId: 4, VenueName: Venue 4, LastUpdateTime:" in out assert "VenueId: 19, VenueName: Venue 19, LastUpdateTime:" in out diff --git a/samples/samples/snippets_test.py b/samples/samples/snippets_test.py index 376ef50f87..0615e4c372 100644 --- a/samples/samples/snippets_test.py +++ b/samples/samples/snippets_test.py @@ -335,7 +335,8 @@ def test_read_write_transaction(capsys, instance_id, sample_database): @pytest.mark.dependency(depends=["add_column"]) def test_query_data_with_new_column(capsys, instance_id, sample_database): - snippets.query_data_with_new_column(instance_id, sample_database.database_id) + snippets.query_data_with_new_column(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "SingerId: 1, AlbumId: 1, MarketingBudget: 300000" in out assert "SingerId: 2, AlbumId: 2, MarketingBudget: 300000" in out @@ -375,7 +376,8 @@ def test_add_storing_index(capsys, instance_id, sample_database): @pytest.mark.dependency(depends=["add_storing_index"]) def test_read_data_with_storing_index(capsys, instance_id, sample_database): - snippets.read_data_with_storing_index(instance_id, sample_database.database_id) + snippets.read_data_with_storing_index(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "300000" in out @@ -397,7 +399,8 @@ def test_add_timestamp_column(capsys, instance_id, sample_database): @pytest.mark.dependency(depends=["add_timestamp_column"]) def test_update_data_with_timestamp(capsys, instance_id, sample_database): - snippets.update_data_with_timestamp(instance_id, sample_database.database_id) + snippets.update_data_with_timestamp(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "Updated data" in out @@ -412,14 +415,16 @@ def test_query_data_with_timestamp(capsys, instance_id, sample_database): @pytest.mark.dependency(name="create_table_with_timestamp") def test_create_table_with_timestamp(capsys, instance_id, sample_database): - snippets.create_table_with_timestamp(instance_id, sample_database.database_id) + snippets.create_table_with_timestamp(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "Created Performances table on database" in out @pytest.mark.dependency(depends=["create_table_with_datatypes"]) def test_insert_data_with_timestamp(capsys, instance_id, sample_database): - snippets.insert_data_with_timestamp(instance_id, sample_database.database_id) + snippets.insert_data_with_timestamp(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "Inserted data." in out @@ -440,7 +445,8 @@ def test_query_with_struct(capsys, instance_id, sample_database): @pytest.mark.dependency(depends=["write_struct_data"]) def test_query_with_array_of_struct(capsys, instance_id, sample_database): - snippets.query_with_array_of_struct(instance_id, sample_database.database_id) + snippets.query_with_array_of_struct(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "SingerId: 8" in out assert "SingerId: 7" in out @@ -493,14 +499,16 @@ def test_delete_data_with_dml(capsys, instance_id, sample_database): @pytest.mark.dependency(depends=["add_timestamp_column"]) def test_update_data_with_dml_timestamp(capsys, instance_id, sample_database): - snippets.update_data_with_dml_timestamp(instance_id, sample_database.database_id) + snippets.update_data_with_dml_timestamp(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "2 record(s) updated." in out @pytest.mark.dependency(name="dml_write_read_transaction") def test_dml_write_read_transaction(capsys, instance_id, sample_database): - snippets.dml_write_read_transaction(instance_id, sample_database.database_id) + snippets.dml_write_read_transaction(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "1 record(s) inserted." in out assert "FirstName: Timothy, LastName: Campbell" in out @@ -508,7 +516,8 @@ def test_dml_write_read_transaction(capsys, instance_id, sample_database): @pytest.mark.dependency(depends=["dml_write_read_transaction"]) def test_update_data_with_dml_struct(capsys, instance_id, sample_database): - snippets.update_data_with_dml_struct(instance_id, sample_database.database_id) + snippets.update_data_with_dml_struct(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "1 record(s) updated" in out @@ -529,21 +538,24 @@ def test_query_data_with_parameter(capsys, instance_id, sample_database): @pytest.mark.dependency(depends=["add_column"]) def test_write_with_dml_transaction(capsys, instance_id, sample_database): - snippets.write_with_dml_transaction(instance_id, sample_database.database_id) + snippets.write_with_dml_transaction(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "Transferred 200000 from Album2's budget to Album1's" in out @pytest.mark.dependency(depends=["add_column"]) def update_data_with_partitioned_dml(capsys, instance_id, sample_database): - snippets.update_data_with_partitioned_dml(instance_id, sample_database.database_id) + snippets.update_data_with_partitioned_dml(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "3 record(s) updated" in out @pytest.mark.dependency(depends=["insert_with_dml"]) def test_delete_data_with_partitioned_dml(capsys, instance_id, sample_database): - snippets.delete_data_with_partitioned_dml(instance_id, sample_database.database_id) + snippets.delete_data_with_partitioned_dml(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "6 record(s) deleted" in out @@ -557,7 +569,8 @@ def test_update_with_batch_dml(capsys, instance_id, sample_database): @pytest.mark.dependency(name="create_table_with_datatypes") def test_create_table_with_datatypes(capsys, instance_id, sample_database): - snippets.create_table_with_datatypes(instance_id, sample_database.database_id) + snippets.create_table_with_datatypes(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "Created Venues table on database" in out @@ -643,8 +656,10 @@ def test_update_data_with_numeric(capsys, instance_id, sample_database): @pytest.mark.dependency(depends=["add_numeric_column"]) -def test_query_data_with_numeric_parameter(capsys, instance_id, sample_database): - snippets.query_data_with_numeric_parameter(instance_id, sample_database.database_id) +def test_query_data_with_numeric_parameter(capsys, instance_id, + sample_database): + snippets.query_data_with_numeric_parameter(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "VenueId: 4, Revenue: 35000" in out @@ -668,13 +683,15 @@ def test_update_data_with_json(capsys, instance_id, sample_database): @pytest.mark.dependency(depends=["add_json_column"]) def test_query_data_with_json_parameter(capsys, instance_id, sample_database): - snippets.query_data_with_json_parameter(instance_id, sample_database.database_id) + snippets.query_data_with_json_parameter(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "VenueId: 19, VenueDetails: {'open': True, 'rating': 9}" in out @pytest.mark.dependency(depends=["insert_datatypes_data"]) -def test_query_data_with_timestamp_parameter(capsys, instance_id, sample_database): +def test_query_data_with_timestamp_parameter(capsys, instance_id, + sample_database): snippets.query_data_with_timestamp_parameter( instance_id, sample_database.database_id ) @@ -686,7 +703,8 @@ def test_query_data_with_timestamp_parameter(capsys, instance_id, sample_databas @pytest.mark.dependency(depends=["insert_datatypes_data"]) def test_query_data_with_query_options(capsys, instance_id, sample_database): - snippets.query_data_with_query_options(instance_id, sample_database.database_id) + snippets.query_data_with_query_options(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "VenueId: 4, VenueName: Venue 4, LastUpdateTime:" in out assert "VenueId: 19, VenueName: Venue 19, LastUpdateTime:" in out @@ -695,7 +713,8 @@ def test_query_data_with_query_options(capsys, instance_id, sample_database): @pytest.mark.dependency(depends=["insert_datatypes_data"]) def test_create_client_with_query_options(capsys, instance_id, sample_database): - snippets.create_client_with_query_options(instance_id, sample_database.database_id) + snippets.create_client_with_query_options(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "VenueId: 4, VenueName: Venue 4, LastUpdateTime:" in out assert "VenueId: 19, VenueName: Venue 19, LastUpdateTime:" in out From 921f8c9abfe13e6593f4885a235138940d01e7ab Mon Sep 17 00:00:00 2001 From: Rajat Bhatta Date: Sun, 9 Oct 2022 22:54:57 +0000 Subject: [PATCH 11/25] refactor --- samples/samples/conftest.py | 82 ++-- samples/samples/pg_snippets.py | 570 ++++++++++++++-------------- samples/samples/pg_snippets_test.py | 10 +- samples/samples/snippets_test.py | 76 ++-- 4 files changed, 369 insertions(+), 369 deletions(-) diff --git a/samples/samples/conftest.py b/samples/samples/conftest.py index f68705520f..d3c4c58b8e 100644 --- a/samples/samples/conftest.py +++ b/samples/samples/conftest.py @@ -38,7 +38,7 @@ def sample_name(): aid in debugging leaked instances. """ raise NotImplementedError( - "Define 'sample_name' fixture in sample test driver") + "Define 'sample_name' fixture in sample test driver") @pytest.fixture(scope="module") @@ -49,7 +49,7 @@ def spanner_dialect(): It can either be GoogleStandardSql or PostgreSql. """ raise NotImplementedError( - "Define 'spanner_dialect' fixture in sample test driver") + "Define 'spanner_dialect' fixture in sample test driver") @pytest.fixture(scope="session") @@ -100,7 +100,7 @@ def multi_region_instance_id(): @pytest.fixture(scope="module") def instance_config(spanner_client): return "{}/instanceConfigs/{}".format( - spanner_client.project_name, "regional-us-central1" + spanner_client.project_name, "regional-us-central1" ) @@ -111,20 +111,20 @@ def multi_region_instance_config(spanner_client): @pytest.fixture(scope="module") def sample_instance( - spanner_client, - cleanup_old_instances, - instance_id, - instance_config, - sample_name, + spanner_client, + cleanup_old_instances, + instance_id, + instance_config, + sample_name, ): sample_instance = spanner_client.instance( - instance_id, - instance_config, - labels={ - "cloud_spanner_samples": "true", - "sample_name": sample_name, - "created": str(int(time.time())), - }, + instance_id, + instance_config, + labels={ + "cloud_spanner_samples": "true", + "sample_name": sample_name, + "created": str(int(time.time())), + }, ) op = retry_429(sample_instance.create)() op.result(INSTANCE_CREATION_TIMEOUT) # block until completion @@ -146,20 +146,20 @@ def sample_instance( @pytest.fixture(scope="module") def multi_region_instance( - spanner_client, - cleanup_old_instances, - multi_region_instance_id, - multi_region_instance_config, - sample_name, + spanner_client, + cleanup_old_instances, + multi_region_instance_id, + multi_region_instance_config, + sample_name, ): multi_region_instance = spanner_client.instance( - multi_region_instance_id, - multi_region_instance_config, - labels={ - "cloud_spanner_samples": "true", - "sample_name": sample_name, - "created": str(int(time.time())), - }, + multi_region_instance_id, + multi_region_instance_config, + labels={ + "cloud_spanner_samples": "true", + "sample_name": sample_name, + "created": str(int(time.time())), + }, ) op = retry_429(multi_region_instance.create)() op.result(INSTANCE_CREATION_TIMEOUT) # block until completion @@ -199,22 +199,22 @@ def database_ddl(): @pytest.fixture(scope="module") def sample_database( - sample_instance, - database_id, - database_ddl, - database_dialect): + sample_instance, + database_id, + database_ddl, + database_dialect): if database_dialect == DatabaseDialect.POSTGRESQL: sample_database = sample_instance.database( - database_id, - database_dialect=DatabaseDialect.POSTGRESQL, + database_id, + database_dialect=DatabaseDialect.POSTGRESQL, ) if not sample_database.exists(): sample_database.create() request = sample_instance.spanner_admin_database_v1.UpdateDatabaseDdlRequest( - database=sample_database.name, - statements=database_ddl, + database=sample_database.name, + statements=database_ddl, ) sample_instance.database_admin_api.update_database_ddl(request) @@ -223,8 +223,8 @@ def sample_database( sample_database.drop() sample_database = sample_instance.database( - database_id, - ddl_statements=database_ddl, + database_id, + ddl_statements=database_ddl, ) if not sample_database.exists(): @@ -238,8 +238,8 @@ def sample_database( @pytest.fixture(scope="module") def kms_key_name(spanner_client): return "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( - spanner_client.project, - "us-central1", - "spanner-test-keyring", - "spanner-test-cmek", + spanner_client.project, + "us-central1", + "spanner-test-keyring", + "spanner-test-cmek", ) diff --git a/samples/samples/pg_snippets.py b/samples/samples/pg_snippets.py index bd2380f05b..a93a1b12ba 100644 --- a/samples/samples/pg_snippets.py +++ b/samples/samples/pg_snippets.py @@ -38,19 +38,19 @@ def create_instance(instance_id): spanner_client = spanner.Client() config_name = "{}/instanceConfigs/regional-us-central1".format( - spanner_client.project_name + spanner_client.project_name ) instance = spanner_client.instance( - instance_id, - configuration_name=config_name, - display_name="This is a display name.", - node_count=1, - labels={ - "cloud_spanner_samples": "true", - "sample_name": "snippets-create_instance-explicit", - "created": str(int(time.time())), - }, + instance_id, + configuration_name=config_name, + display_name="This is a display name.", + node_count=1, + labels={ + "cloud_spanner_samples": "true", + "sample_name": "snippets-create_instance-explicit", + "created": str(int(time.time())), + }, ) operation = instance.create() @@ -71,8 +71,8 @@ def create_database(instance_id, database_id): instance = spanner_client.instance(instance_id) database = instance.database( - database_id, - database_dialect=DatabaseDialect.POSTGRESQL, + database_id, + database_dialect=DatabaseDialect.POSTGRESQL, ) operation = database.create() @@ -87,22 +87,22 @@ def create_database(instance_id, database_id): def create_table_using_ddl(database_name): spanner_client = spanner.Client() request = spanner_admin_database_v1.UpdateDatabaseDdlRequest( - database=database_name, - statements=[ - """CREATE TABLE Singers ( - SingerId bigint NOT NULL, - FirstName character varying(1024), - LastName character varying(1024), - SingerInfo bytea, - PRIMARY KEY (SingerId) - )""", - """CREATE TABLE Albums ( - SingerId bigint NOT NULL, - AlbumId bigint NOT NULL, - AlbumTitle character varying(1024), - PRIMARY KEY (SingerId, AlbumId) - ) INTERLEAVE IN PARENT Singers ON DELETE CASCADE""", - ], + database=database_name, + statements=[ + """CREATE TABLE Singers ( + SingerId bigint NOT NULL, + FirstName character varying(1024), + LastName character varying(1024), + SingerInfo bytea, + PRIMARY KEY (SingerId) + )""", + """CREATE TABLE Albums ( + SingerId bigint NOT NULL, + AlbumId bigint NOT NULL, + AlbumTitle character varying(1024), + PRIMARY KEY (SingerId, AlbumId) + ) INTERLEAVE IN PARENT Singers ON DELETE CASCADE""", + ], ) operation = spanner_client.database_admin_api.update_database_ddl(request) operation.result(OPERATION_TIMEOUT_SECONDS) @@ -124,27 +124,27 @@ def insert_data(instance_id, database_id): with database.batch() as batch: batch.insert( - table="Singers", - columns=("SingerId", "FirstName", "LastName"), - values=[ - (1, "Marc", "Richards"), - (2, "Catalina", "Smith"), - (3, "Alice", "Trentor"), - (4, "Lea", "Martin"), - (5, "David", "Lomond"), - ], + table="Singers", + columns=("SingerId", "FirstName", "LastName"), + values=[ + (1, "Marc", "Richards"), + (2, "Catalina", "Smith"), + (3, "Alice", "Trentor"), + (4, "Lea", "Martin"), + (5, "David", "Lomond"), + ], ) batch.insert( - table="Albums", - columns=("SingerId", "AlbumId", "AlbumTitle"), - values=[ - (1, 1, "Total Junk"), - (1, 2, "Go, Go, Go"), - (2, 1, "Green"), - (2, 2, "Forever Hold Your Peace"), - (2, 3, "Terrified"), - ], + table="Albums", + columns=("SingerId", "AlbumId", "AlbumTitle"), + values=[ + (1, 1, "Total Junk"), + (1, 2, "Go, Go, Go"), + (2, 1, "Green"), + (2, 2, "Forever Hold Your Peace"), + (2, 3, "Terrified"), + ], ) print("Inserted data.") @@ -195,7 +195,7 @@ def query_data(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT SingerId, AlbumId, AlbumTitle FROM Albums" + "SELECT SingerId, AlbumId, AlbumTitle FROM Albums" ) for row in results: @@ -215,8 +215,8 @@ def read_data(instance_id, database_id): with database.snapshot() as snapshot: keyset = spanner.KeySet(all_=True) results = snapshot.read( - table="Albums", columns=("SingerId", "AlbumId", "AlbumTitle"), - keyset=keyset + table="Albums", columns=("SingerId", "AlbumId", "AlbumTitle"), + keyset=keyset ) for row in results: @@ -234,7 +234,7 @@ def add_column(instance_id, database_id): database = instance.database(database_id) operation = database.update_ddl( - ["ALTER TABLE Albums ADD COLUMN MarketingBudget BIGINT"] + ["ALTER TABLE Albums ADD COLUMN MarketingBudget BIGINT"] ) print("Waiting for operation to complete...") @@ -263,9 +263,9 @@ def update_data(instance_id, database_id): with database.batch() as batch: batch.update( - table="Albums", - columns=("SingerId", "AlbumId", "MarketingBudget"), - values=[(1, 1, 100000), (2, 2, 500000)], + table="Albums", + columns=("SingerId", "AlbumId", "MarketingBudget"), + values=[(1, 1, 100000), (2, 2, 500000)], ) print("Updated data.") @@ -294,10 +294,10 @@ def update_albums(transaction): # Read the second album budget. second_album_keyset = spanner.KeySet(keys=[(2, 2)]) second_album_result = transaction.read( - table="Albums", - columns=("MarketingBudget",), - keyset=second_album_keyset, - limit=1, + table="Albums", + columns=("MarketingBudget",), + keyset=second_album_keyset, + limit=1, ) second_album_row = list(second_album_result)[0] second_album_budget = second_album_row[0] @@ -308,15 +308,15 @@ def update_albums(transaction): # Raising an exception will automatically roll back the # transaction. raise ValueError( - "The second album doesn't have enough funds to transfer") + "The second album doesn't have enough funds to transfer") # Read the first album's budget. first_album_keyset = spanner.KeySet(keys=[(1, 1)]) first_album_result = transaction.read( - table="Albums", - columns=("MarketingBudget",), - keyset=first_album_keyset, - limit=1, + table="Albums", + columns=("MarketingBudget",), + keyset=first_album_keyset, + limit=1, ) first_album_row = list(first_album_result)[0] first_album_budget = first_album_row[0] @@ -325,15 +325,15 @@ def update_albums(transaction): second_album_budget -= transfer_amount first_album_budget += transfer_amount print( - "Setting first album's budget to {} and the second album's " - "budget to {}.".format(first_album_budget, second_album_budget) + "Setting first album's budget to {} and the second album's " + "budget to {}.".format(first_album_budget, second_album_budget) ) # Update the rows. transaction.update( - table="Albums", - columns=("SingerId", "AlbumId", "MarketingBudget"), - values=[(1, 1, first_album_budget), (2, 2, second_album_budget)], + table="Albums", + columns=("SingerId", "AlbumId", "MarketingBudget"), + values=[(1, 1, first_album_budget), (2, 2, second_album_budget)], ) database.run_in_transaction(update_albums) @@ -360,7 +360,7 @@ def query_data_with_new_column(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT SingerId, AlbumId, MarketingBudget FROM Albums" + "SELECT SingerId, AlbumId, MarketingBudget FROM Albums" ) for row in results: @@ -378,7 +378,7 @@ def add_index(instance_id, database_id): database = instance.database(database_id) operation = database.update_ddl( - ["CREATE INDEX AlbumsByAlbumTitle ON Albums(AlbumTitle)"] + ["CREATE INDEX AlbumsByAlbumTitle ON Albums(AlbumTitle)"] ) print("Waiting for operation to complete...") @@ -407,10 +407,10 @@ def read_data_with_index(instance_id, database_id): with database.snapshot() as snapshot: keyset = spanner.KeySet(all_=True) results = snapshot.read( - table="Albums", - columns=("AlbumId", "AlbumTitle"), - keyset=keyset, - index="AlbumsByAlbumTitle", + table="Albums", + columns=("AlbumId", "AlbumTitle"), + keyset=keyset, + index="AlbumsByAlbumTitle", ) for row in results: @@ -428,10 +428,10 @@ def add_storing_index(instance_id, database_id): database = instance.database(database_id) operation = database.update_ddl( - [ - "CREATE INDEX AlbumsByAlbumTitle2 ON Albums(AlbumTitle)" - "INCLUDE (MarketingBudget)" - ] + [ + "CREATE INDEX AlbumsByAlbumTitle2 ON Albums(AlbumTitle)" + "INCLUDE (MarketingBudget)" + ] ) print("Waiting for operation to complete...") @@ -463,15 +463,15 @@ def read_data_with_storing_index(instance_id, database_id): with database.snapshot() as snapshot: keyset = spanner.KeySet(all_=True) results = snapshot.read( - table="Albums", - columns=("AlbumId", "AlbumTitle", "MarketingBudget"), - keyset=keyset, - index="AlbumsByAlbumTitle2", + table="Albums", + columns=("AlbumId", "AlbumTitle", "MarketingBudget"), + keyset=keyset, + index="AlbumsByAlbumTitle2", ) for row in results: print("AlbumId: {}, AlbumTitle: {}, " "MarketingBudget: {}".format( - *row)) + *row)) # [END spanner_postgresql_read_data_with_storing_index] @@ -491,7 +491,7 @@ def read_only_transaction(instance_id, database_id): with database.snapshot(multi_use=True) as snapshot: # Read using SQL. results = snapshot.execute_sql( - "SELECT SingerId, AlbumId, AlbumTitle FROM Albums" + "SELECT SingerId, AlbumId, AlbumTitle FROM Albums" ) print("Results from first read:") @@ -503,8 +503,8 @@ def read_only_transaction(instance_id, database_id): # return the same data. keyset = spanner.KeySet(all_=True) results = snapshot.read( - table="Albums", columns=("SingerId", "AlbumId", "AlbumTitle"), - keyset=keyset + table="Albums", columns=("SingerId", "AlbumId", "AlbumTitle"), + keyset=keyset ) print("Results from second read:") @@ -526,11 +526,11 @@ def insert_with_dml(instance_id, database_id): def insert_singers(transaction): row_ct = transaction.execute_update( - "INSERT INTO Singers (SingerId, FirstName, LastName) VALUES " - "(12, 'Melissa', 'Garcia'), " - "(13, 'Russell', 'Morales'), " - "(14, 'Jacqueline', 'Long'), " - "(15, 'Dylan', 'Shaw')" + "INSERT INTO Singers (SingerId, FirstName, LastName) VALUES " + "(12, 'Melissa', 'Garcia'), " + "(13, 'Russell', 'Morales'), " + "(14, 'Jacqueline', 'Long'), " + "(15, 'Dylan', 'Shaw')" ) print("{} record(s) inserted.".format(row_ct)) @@ -549,9 +549,9 @@ def query_data_with_parameter(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT SingerId, FirstName, LastName FROM Singers " "WHERE LastName = $1", - params={"p1": "Garcia"}, - param_types={"p1": spanner.param_types.STRING}, + "SELECT SingerId, FirstName, LastName FROM Singers " "WHERE LastName = $1", + params={"p1": "Garcia"}, + param_types={"p1": spanner.param_types.STRING}, ) for row in results: @@ -573,7 +573,7 @@ def transfer_budget(transaction): # Transfer marketing budget from one album to another. Performed in a # single transaction to ensure that the transfer is atomic. second_album_result = transaction.execute_sql( - "SELECT MarketingBudget from Albums " "WHERE SingerId = 2 and AlbumId = 2" + "SELECT MarketingBudget from Albums " "WHERE SingerId = 2 and AlbumId = 2" ) second_album_row = list(second_album_result)[0] second_album_budget = second_album_row[0] @@ -585,8 +585,8 @@ def transfer_budget(transaction): # will be rerun by the client library if second_album_budget >= transfer_amount: first_album_result = transaction.execute_sql( - "SELECT MarketingBudget from Albums " - "WHERE SingerId = 1 and AlbumId = 1" + "SELECT MarketingBudget from Albums " + "WHERE SingerId = 1 and AlbumId = 1" ) first_album_row = list(first_album_result)[0] first_album_budget = first_album_row[0] @@ -596,26 +596,26 @@ def transfer_budget(transaction): # Update first album transaction.execute_update( - "UPDATE Albums " - "SET MarketingBudget = $1 " - "WHERE SingerId = 1 and AlbumId = 1", - params={"p1": first_album_budget}, - param_types={"p1": spanner.param_types.INT64}, + "UPDATE Albums " + "SET MarketingBudget = $1 " + "WHERE SingerId = 1 and AlbumId = 1", + params={"p1": first_album_budget}, + param_types={"p1": spanner.param_types.INT64}, ) # Update second album transaction.execute_update( - "UPDATE Albums " - "SET MarketingBudget = $1 " - "WHERE SingerId = 2 and AlbumId = 2", - params={"p1": second_album_budget}, - param_types={"p1": spanner.param_types.INT64}, + "UPDATE Albums " + "SET MarketingBudget = $1 " + "WHERE SingerId = 2 and AlbumId = 2", + params={"p1": second_album_budget}, + param_types={"p1": spanner.param_types.INT64}, ) print( - "Transferred {} from Album2's budget to Album1's".format( - transfer_amount - ) + "Transferred {} from Album2's budget to Album1's".format( + transfer_amount + ) ) database.run_in_transaction(transfer_budget) @@ -636,9 +636,9 @@ def read_stale_data(instance_id, database_id): with database.snapshot(exact_staleness=staleness) as snapshot: keyset = spanner.KeySet(all_=True) results = snapshot.read( - table="Albums", - columns=("SingerId", "AlbumId", "MarketingBudget"), - keyset=keyset, + table="Albums", + columns=("SingerId", "AlbumId", "MarketingBudget"), + keyset=keyset, ) for row in results: @@ -671,13 +671,13 @@ def update_data_with_timestamp(instance_id, database_id): with database.batch() as batch: batch.update( - table="Albums", - columns=( + table="Albums", + columns=( "SingerId", "AlbumId", "MarketingBudget", "LastUpdateTime"), - values=[ - (1, 1, 1000000, spanner.COMMIT_TIMESTAMP), - (2, 2, 750000, spanner.COMMIT_TIMESTAMP), - ], + values=[ + (1, 1, 1000000, spanner.COMMIT_TIMESTAMP), + (2, 2, 750000, spanner.COMMIT_TIMESTAMP), + ], ) print("Updated data.") @@ -695,17 +695,17 @@ def add_timestamp_column(instance_id, database_id): database = instance.database(database_id) operation = database.update_ddl( - [ - "ALTER TABLE Albums ADD COLUMN LastUpdateTime " "SPANNER.COMMIT_TIMESTAMP"] + [ + "ALTER TABLE Albums ADD COLUMN LastUpdateTime " "SPANNER.COMMIT_TIMESTAMP"] ) print("Waiting for operation to complete...") operation.result(OPERATION_TIMEOUT_SECONDS) print( - 'Altered table "Albums" on database {} on instance {}.'.format( - database_id, instance_id - ) + 'Altered table "Albums" on database {} on instance {}.'.format( + database_id, instance_id + ) ) @@ -732,8 +732,8 @@ def query_data_with_timestamp(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT SingerId, AlbumId, MarketingBudget FROM Albums " - "ORDER BY LastUpdateTime DESC" + "SELECT SingerId, AlbumId, MarketingBudget FROM Albums " + "ORDER BY LastUpdateTime DESC" ) for row in results: @@ -752,17 +752,17 @@ def create_table_with_timestamp(instance_id, database_id): database = instance.database(database_id) request = spanner_admin_database_v1.UpdateDatabaseDdlRequest( - database=database.name, - statements=[ - """CREATE TABLE Performances ( - SingerId BIGINT NOT NULL, - VenueId BIGINT NOT NULL, - EventDate Date, - Revenue BIGINT, - LastUpdateTime SPANNER.COMMIT_TIMESTAMP NOT NULL, - PRIMARY KEY (SingerId, VenueId, EventDate)) - INTERLEAVE IN PARENT Singers ON DELETE CASCADE""" - ], + database=database.name, + statements=[ + """CREATE TABLE Performances ( + SingerId BIGINT NOT NULL, + VenueId BIGINT NOT NULL, + EventDate Date, + Revenue BIGINT, + LastUpdateTime SPANNER.COMMIT_TIMESTAMP NOT NULL, +PRIMARY KEY (SingerId, VenueId, EventDate)) +INTERLEAVE IN PARENT Singers ON DELETE CASCADE""" + ], ) operation = spanner_client.database_admin_api.update_database_ddl(request) @@ -770,9 +770,9 @@ def create_table_with_timestamp(instance_id, database_id): operation.result(OPERATION_TIMEOUT_SECONDS) print( - "Created Performances table on database {} on instance {}".format( - database_id, instance_id - ) + "Created Performances table on database {} on instance {}".format( + database_id, instance_id + ) ) @@ -790,14 +790,14 @@ def insert_data_with_timestamp(instance_id, database_id): with database.batch() as batch: batch.insert( - table="Performances", - columns=( + table="Performances", + columns=( "SingerId", "VenueId", "EventDate", "Revenue", "LastUpdateTime"), - values=[ - (1, 4, "2017-10-05", 11000, spanner.COMMIT_TIMESTAMP), - (1, 19, "2017-11-02", 15000, spanner.COMMIT_TIMESTAMP), - (2, 42, "2017-12-23", 7000, spanner.COMMIT_TIMESTAMP), - ], + values=[ + (1, 4, "2017-10-05", 11000, spanner.COMMIT_TIMESTAMP), + (1, 19, "2017-11-02", 15000, spanner.COMMIT_TIMESTAMP), + (2, 42, "2017-12-23", 7000, spanner.COMMIT_TIMESTAMP), + ], ) print("Inserted data.") @@ -818,8 +818,8 @@ def insert_data_with_dml(instance_id, database_id): def insert_singers(transaction): row_ct = transaction.execute_update( - "INSERT INTO Singers (SingerId, FirstName, LastName) " - " VALUES (10, 'Virginia', 'Watson')" + "INSERT INTO Singers (SingerId, FirstName, LastName) " + " VALUES (10, 'Virginia', 'Watson')" ) print("{} record(s) inserted.".format(row_ct)) @@ -840,9 +840,9 @@ def update_data_with_dml(instance_id, database_id): def update_albums(transaction): row_ct = transaction.execute_update( - "UPDATE Albums " - "SET MarketingBudget = MarketingBudget * 2 " - "WHERE SingerId = 1 and AlbumId = 1" + "UPDATE Albums " + "SET MarketingBudget = MarketingBudget * 2 " + "WHERE SingerId = 1 and AlbumId = 1" ) print("{} record(s) updated.".format(row_ct)) @@ -863,7 +863,7 @@ def delete_data_with_dml(instance_id, database_id): def delete_singers(transaction): row_ct = transaction.execute_update( - "DELETE FROM Singers WHERE FirstName = 'Alice'" + "DELETE FROM Singers WHERE FirstName = 'Alice'" ) print("{} record(s) deleted.".format(row_ct)) @@ -885,14 +885,14 @@ def dml_write_read_transaction(instance_id, database_id): def write_then_read(transaction): # Insert record. row_ct = transaction.execute_update( - "INSERT INTO Singers (SingerId, FirstName, LastName) " - " VALUES (11, 'Timothy', 'Campbell')" + "INSERT INTO Singers (SingerId, FirstName, LastName) " + " VALUES (11, 'Timothy', 'Campbell')" ) print("{} record(s) inserted.".format(row_ct)) # Read newly inserted record. results = transaction.execute_sql( - "SELECT FirstName, LastName FROM Singers WHERE SingerId = 11" + "SELECT FirstName, LastName FROM Singers WHERE SingerId = 11" ) for result in results: print("FirstName: {}, LastName: {}".format(*result)) @@ -912,7 +912,7 @@ def update_data_with_partitioned_dml(instance_id, database_id): database = instance.database(database_id) row_ct = database.execute_partitioned_dml( - "UPDATE Albums SET MarketingBudget = 100000 WHERE SingerId > 1" + "UPDATE Albums SET MarketingBudget = 100000 WHERE SingerId > 1" ) print("{} records updated.".format(row_ct)) @@ -929,7 +929,7 @@ def delete_data_with_partitioned_dml(instance_id, database_id): database = instance.database(database_id) row_ct = database.execute_partitioned_dml( - "DELETE FROM Singers WHERE SingerId > 10") + "DELETE FROM Singers WHERE SingerId > 10") print("{} record(s) deleted.".format(row_ct)) # [END spanner_postgresql_dml_partitioned_delete] @@ -948,20 +948,20 @@ def update_with_batch_dml(instance_id, database_id): database = instance.database(database_id) insert_statement = ( - "INSERT INTO Albums " - "(SingerId, AlbumId, AlbumTitle, MarketingBudget) " - "VALUES (1, 3, 'Test Album Title', 10000)" + "INSERT INTO Albums " + "(SingerId, AlbumId, AlbumTitle, MarketingBudget) " + "VALUES (1, 3, 'Test Album Title', 10000)" ) update_statement = ( - "UPDATE Albums " - "SET MarketingBudget = MarketingBudget * 2 " - "WHERE SingerId = 1 and AlbumId = 3" + "UPDATE Albums " + "SET MarketingBudget = MarketingBudget * 2 " + "WHERE SingerId = 1 and AlbumId = 3" ) def update_albums(transaction): status, row_cts = transaction.batch_update( - [insert_statement, update_statement]) + [insert_statement, update_statement]) if status.code != OK: # Do handling here. @@ -970,7 +970,7 @@ def update_albums(transaction): return print( - "Executed {} SQL statements using Batch DML.".format(len(row_cts))) + "Executed {} SQL statements using Batch DML.".format(len(row_cts))) database.run_in_transaction(update_albums) # [END spanner_postgresql_dml_batch_update] @@ -986,19 +986,19 @@ def create_table_with_datatypes(instance_id, database_id): database = instance.database(database_id) request = spanner_admin_database_v1.UpdateDatabaseDdlRequest( - database=database.name, - statements=[ - """CREATE TABLE Venues ( - VenueId BIGINT NOT NULL, - VenueName character varying(100), - VenueInfo BYTEA, - Capacity BIGINT, - OutdoorVenue BOOL, - PopularityScore FLOAT8, - Revenue NUMERIC, - LastUpdateTime SPANNER.COMMIT_TIMESTAMP NOT NULL, - PRIMARY KEY (VenueId))""" - ], + database=database.name, + statements=[ + """CREATE TABLE Venues ( + VenueId BIGINT NOT NULL, + VenueName character varying(100), + VenueInfo BYTEA, + Capacity BIGINT, + OutdoorVenue BOOL, + PopularityScore FLOAT8, + Revenue NUMERIC, + LastUpdateTime SPANNER.COMMIT_TIMESTAMP NOT NULL, + PRIMARY KEY (VenueId))""" + ], ) operation = spanner_client.database_admin_api.update_database_ddl(request) @@ -1006,9 +1006,9 @@ def create_table_with_datatypes(instance_id, database_id): operation.result(OPERATION_TIMEOUT_SECONDS) print( - "Created Venues table on database {} on instance {}".format( - database_id, instance_id - ) + "Created Venues table on database {} on instance {}".format( + database_id, instance_id + ) ) # [END spanner_postgresql_create_table_with_datatypes] @@ -1027,49 +1027,49 @@ def insert_datatypes_data(instance_id, database_id): exampleBytes3 = base64.b64encode("Hello World 3".encode()) with database.batch() as batch: batch.insert( - table="Venues", - columns=( - "VenueId", - "VenueName", - "VenueInfo", - "Capacity", - "OutdoorVenue", - "PopularityScore", - "Revenue", - "LastUpdateTime", + table="Venues", + columns=( + "VenueId", + "VenueName", + "VenueInfo", + "Capacity", + "OutdoorVenue", + "PopularityScore", + "Revenue", + "LastUpdateTime", + ), + values=[ + ( + 4, + "Venue 4", + exampleBytes1, + 1800, + False, + 0.85543, + decimal.Decimal("215100.10"), + spanner.COMMIT_TIMESTAMP, + ), + ( + 19, + "Venue 19", + exampleBytes2, + 6300, + True, + 0.98716, + decimal.Decimal("1200100.00"), + spanner.COMMIT_TIMESTAMP, + ), + ( + 42, + "Venue 42", + exampleBytes3, + 3000, + False, + 0.72598, + decimal.Decimal("390650.99"), + spanner.COMMIT_TIMESTAMP, ), - values=[ - ( - 4, - "Venue 4", - exampleBytes1, - 1800, - False, - 0.85543, - decimal.Decimal("215100.10"), - spanner.COMMIT_TIMESTAMP, - ), - ( - 19, - "Venue 19", - exampleBytes2, - 6300, - True, - 0.98716, - decimal.Decimal("1200100.00"), - spanner.COMMIT_TIMESTAMP, - ), - ( - 42, - "Venue 42", - exampleBytes3, - 3000, - False, - 0.72598, - decimal.Decimal("390650.99"), - spanner.COMMIT_TIMESTAMP, - ), - ], + ], ) print("Inserted data.") @@ -1091,10 +1091,10 @@ def query_data_with_bool(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT VenueId, VenueName, OutdoorVenue FROM Venues " - "WHERE OutdoorVenue = $1", - params=param, - param_types=param_type, + "SELECT VenueId, VenueName, OutdoorVenue FROM Venues " + "WHERE OutdoorVenue = $1", + params=param, + param_types=param_type, ) for row in results: @@ -1117,9 +1117,9 @@ def query_data_with_bytes(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT VenueId, VenueName FROM Venues " "WHERE VenueInfo = $1", - params=param, - param_types=param_type, + "SELECT VenueId, VenueName FROM Venues " "WHERE VenueInfo = $1", + params=param, + param_types=param_type, ) for row in results: @@ -1142,15 +1142,15 @@ def query_data_with_float(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT VenueId, VenueName, PopularityScore FROM Venues " - "WHERE PopularityScore > $1", - params=param, - param_types=param_type, + "SELECT VenueId, VenueName, PopularityScore FROM Venues " + "WHERE PopularityScore > $1", + params=param, + param_types=param_type, ) for row in results: print( - "VenueId: {}, VenueName: {}, PopularityScore: {}".format(*row)) + "VenueId: {}, VenueName: {}, PopularityScore: {}".format(*row)) # [END spanner_postgresql_query_with_float_parameter] @@ -1169,9 +1169,9 @@ def query_data_with_int(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT VenueId, VenueName, Capacity FROM Venues " "WHERE Capacity >= $1", - params=param, - param_types=param_type, + "SELECT VenueId, VenueName, Capacity FROM Venues " "WHERE Capacity >= $1", + params=param, + param_types=param_type, ) for row in results: @@ -1194,9 +1194,9 @@ def query_data_with_string(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT VenueId, VenueName FROM Venues " "WHERE VenueName = $1", - params=param, - param_types=param_type, + "SELECT VenueId, VenueName FROM Venues " "WHERE VenueName = $1", + params=param, + param_types=param_type, ) for row in results: @@ -1218,8 +1218,8 @@ def query_data_with_timestamp_parameter(instance_id, database_id): # Avoid time drift on the local machine. # https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4197. example_timestamp = ( - datetime.datetime.utcnow() + datetime.timedelta( - days=1) + datetime.datetime.utcnow() + datetime.timedelta( + days=1) ).isoformat() + "Z" # [START spanner_postgresql_query_with_timestamp_parameter] param = {"p1": example_timestamp} @@ -1227,10 +1227,10 @@ def query_data_with_timestamp_parameter(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT VenueId, VenueName, LastUpdateTime FROM Venues " - "WHERE LastUpdateTime < $1", - params=param, - param_types=param_type, + "SELECT VenueId, VenueName, LastUpdateTime FROM Venues " + "WHERE LastUpdateTime < $1", + params=param, + param_types=param_type, ) for row in results: @@ -1253,9 +1253,9 @@ def query_data_with_numeric_parameter(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT VenueId, Revenue FROM Venues WHERE Revenue < $1", - params=param, - param_types=param_type, + "SELECT VenueId, Revenue FROM Venues WHERE Revenue < $1", + params=param, + param_types=param_type, ) for row in results: @@ -1269,17 +1269,17 @@ def create_client_with_query_options(instance_id, database_id): # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" spanner_client = spanner.Client( - query_options={ - "optimizer_version": "1", - "optimizer_statistics_package": "latest", - } + query_options={ + "optimizer_version": "1", + "optimizer_statistics_package": "latest", + } ) instance = spanner_client.instance(instance_id) database = instance.database(database_id) with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT VenueId, VenueName, LastUpdateTime FROM Venues" + "SELECT VenueId, VenueName, LastUpdateTime FROM Venues" ) for row in results: @@ -1298,11 +1298,11 @@ def query_data_with_query_options(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT VenueId, VenueName, LastUpdateTime FROM Venues", - query_options={ - "optimizer_version": "1", - "optimizer_statistics_package": "latest", - }, + "SELECT VenueId, VenueName, LastUpdateTime FROM Venues", + query_options={ + "optimizer_version": "1", + "optimizer_statistics_package": "latest", + }, ) for row in results: @@ -1312,13 +1312,13 @@ def query_data_with_query_options(instance_id, database_id): if __name__ == "__main__": # noqa: C901 parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter ) parser.add_argument("instance_id", help="Your Cloud Spanner instance ID.") parser.add_argument( - "--database-id", help="Your Cloud Spanner database ID.", - default="example_db" + "--database-id", help="Your Cloud Spanner database ID.", + default="example_db" ) subparsers = parser.add_subparsers(dest="command") @@ -1332,7 +1332,7 @@ def query_data_with_query_options(instance_id, database_id): subparsers.add_parser("add_column", help=add_column.__doc__) subparsers.add_parser("update_data", help=update_data.__doc__) subparsers.add_parser( - "query_data_with_new_column", help=query_data_with_new_column.__doc__ + "query_data_with_new_column", help=query_data_with_new_column.__doc__ ) subparsers.add_parser("read_write_transaction", help=read_write_transaction.__doc__) @@ -1345,18 +1345,18 @@ def query_data_with_query_options(instance_id, database_id): subparsers.add_parser("read_data_with_storing_index", help=read_data_with_storing_index.__doc__) subparsers.add_parser( - "create_table_with_timestamp", help=create_table_with_timestamp.__doc__ + "create_table_with_timestamp", help=create_table_with_timestamp.__doc__ ) subparsers.add_parser( - "insert_data_with_timestamp", help=insert_data_with_timestamp.__doc__ + "insert_data_with_timestamp", help=insert_data_with_timestamp.__doc__ ) subparsers.add_parser("add_timestamp_column", help=add_timestamp_column.__doc__) subparsers.add_parser( - "update_data_with_timestamp", help=update_data_with_timestamp.__doc__ + "update_data_with_timestamp", help=update_data_with_timestamp.__doc__ ) subparsers.add_parser( - "query_data_with_timestamp", help=query_data_with_timestamp.__doc__ + "query_data_with_timestamp", help=query_data_with_timestamp.__doc__ ) subparsers.add_parser("insert_data_with_dml", help=insert_data_with_dml.__doc__) @@ -1365,27 +1365,27 @@ def query_data_with_query_options(instance_id, database_id): subparsers.add_parser("delete_data_with_dml", help=delete_data_with_dml.__doc__) subparsers.add_parser( - "dml_write_read_transaction", help=dml_write_read_transaction.__doc__ + "dml_write_read_transaction", help=dml_write_read_transaction.__doc__ ) subparsers.add_parser("insert_with_dml", help=insert_with_dml.__doc__) subparsers.add_parser( - "query_data_with_parameter", help=query_data_with_parameter.__doc__ + "query_data_with_parameter", help=query_data_with_parameter.__doc__ ) subparsers.add_parser( - "write_with_dml_transaction", help=write_with_dml_transaction.__doc__ + "write_with_dml_transaction", help=write_with_dml_transaction.__doc__ ) subparsers.add_parser( - "update_data_with_partitioned_dml", - help=update_data_with_partitioned_dml.__doc__, + "update_data_with_partitioned_dml", + help=update_data_with_partitioned_dml.__doc__, ) subparsers.add_parser( - "delete_data_with_partitioned_dml", - help=delete_data_with_partitioned_dml.__doc__, + "delete_data_with_partitioned_dml", + help=delete_data_with_partitioned_dml.__doc__, ) subparsers.add_parser("update_with_batch_dml", help=update_with_batch_dml.__doc__) subparsers.add_parser( - "create_table_with_datatypes", help=create_table_with_datatypes.__doc__ + "create_table_with_datatypes", help=create_table_with_datatypes.__doc__ ) subparsers.add_parser("insert_datatypes_data", help=insert_datatypes_data.__doc__) @@ -1400,20 +1400,20 @@ def query_data_with_query_options(instance_id, database_id): subparsers.add_parser("query_data_with_string", help=query_data_with_string.__doc__) subparsers.add_parser( - "query_data_with_timestamp_parameter", - help=query_data_with_timestamp_parameter.__doc__, + "query_data_with_timestamp_parameter", + help=query_data_with_timestamp_parameter.__doc__, ) subparsers.add_parser( - "query_data_with_numeric_parameter", - help=query_data_with_numeric_parameter.__doc__, + "query_data_with_numeric_parameter", + help=query_data_with_numeric_parameter.__doc__, ) subparsers.add_parser( - "query_data_with_query_options", - help=query_data_with_query_options.__doc__ + "query_data_with_query_options", + help=query_data_with_query_options.__doc__ ) subparsers.add_parser( - "create_client_with_query_options", - help=create_client_with_query_options.__doc__, + "create_client_with_query_options", + help=create_client_with_query_options.__doc__, ) args = parser.parse_args() diff --git a/samples/samples/pg_snippets_test.py b/samples/samples/pg_snippets_test.py index 022bf50317..c995fdfa63 100644 --- a/samples/samples/pg_snippets_test.py +++ b/samples/samples/pg_snippets_test.py @@ -357,8 +357,8 @@ def test_create_table_with_datatypes(capsys, instance_id, sample_database): @pytest.mark.dependency( - name="insert_datatypes_data", - depends=["create_table_with_datatypes"], + name="insert_datatypes_data", + depends=["create_table_with_datatypes"], ) def test_insert_datatypes_data(capsys, instance_id, sample_database): snippets.insert_datatypes_data(instance_id, sample_database.database_id) @@ -405,7 +405,7 @@ def test_query_data_with_string(capsys, instance_id, sample_database): @pytest.mark.dependency(depends=["add_numeric_column"]) def test_query_data_with_numeric_parameter(capsys, instance_id, - sample_database): + sample_database): snippets.query_data_with_numeric_parameter(instance_id, sample_database.database_id) out, _ = capsys.readouterr() @@ -414,9 +414,9 @@ def test_query_data_with_numeric_parameter(capsys, instance_id, @pytest.mark.dependency(depends=["insert_datatypes_data"]) def test_query_data_with_timestamp_parameter(capsys, instance_id, - sample_database): + sample_database): snippets.query_data_with_timestamp_parameter( - instance_id, sample_database.database_id + instance_id, sample_database.database_id ) out, _ = capsys.readouterr() assert "VenueId: 4, VenueName: Venue 4, LastUpdateTime:" in out diff --git a/samples/samples/snippets_test.py b/samples/samples/snippets_test.py index 0615e4c372..93b9ccdddc 100644 --- a/samples/samples/snippets_test.py +++ b/samples/samples/snippets_test.py @@ -56,7 +56,7 @@ def spanner_dialect(): The dialect is used to initialize the dialect for the database. It can either be GoogleStandardSql or PostgreSql. """ - return DatabaseDialect. + return DatabaseDialect.GOOGLE_STANDARD_SQL @pytest.fixture(scope="module") @@ -111,7 +111,7 @@ def user_managed_instance_config_name(spanner_client): name = f"custom-python-samples-config-{uuid.uuid4().hex[:10]}" yield name snippets.delete_instance_config( - "{}/instanceConfigs/{}".format(spanner_client.project_name, name) + "{}/instanceConfigs/{}".format(spanner_client.project_name, name) ) return @@ -140,8 +140,8 @@ def test_create_database_explicit(sample_instance, create_database_id): def test_create_instance_with_processing_units(capsys, lci_instance_id): processing_units = 500 retry_429(snippets.create_instance_with_processing_units)( - lci_instance_id, - processing_units, + lci_instance_id, + processing_units, ) out, _ = capsys.readouterr() assert lci_instance_id in out @@ -152,10 +152,10 @@ def test_create_instance_with_processing_units(capsys, lci_instance_id): def test_create_database_with_encryption_config( - capsys, instance_id, cmek_database_id, kms_key_name + capsys, instance_id, cmek_database_id, kms_key_name ): snippets.create_database_with_encryption_key( - instance_id, cmek_database_id, kms_key_name + instance_id, cmek_database_id, kms_key_name ) out, _ = capsys.readouterr() assert cmek_database_id in out @@ -177,10 +177,10 @@ def test_list_instance_config(capsys): @pytest.mark.dependency(name="create_instance_config") def test_create_instance_config( - capsys, user_managed_instance_config_name, base_instance_config_id + capsys, user_managed_instance_config_name, base_instance_config_id ): snippets.create_instance_config( - user_managed_instance_config_name, base_instance_config_id + user_managed_instance_config_name, base_instance_config_id ) out, _ = capsys.readouterr() assert "Created instance configuration" in out @@ -197,9 +197,9 @@ def test_update_instance_config(capsys, user_managed_instance_config_name): def test_delete_instance_config(capsys, user_managed_instance_config_name): spanner_client = spanner.Client() snippets.delete_instance_config( - "{}/instanceConfigs/{}".format( - spanner_client.project_name, user_managed_instance_config_name - ) + "{}/instanceConfigs/{}".format( + spanner_client.project_name, user_managed_instance_config_name + ) ) out, _ = capsys.readouterr() assert "successfully deleted" in out @@ -218,15 +218,15 @@ def test_list_databases(capsys, instance_id): def test_create_database_with_default_leader( - capsys, - multi_region_instance, - multi_region_instance_id, - default_leader_database_id, - default_leader, + capsys, + multi_region_instance, + multi_region_instance_id, + default_leader_database_id, + default_leader, ): retry_429 = RetryErrors(exceptions.ResourceExhausted, delay=15) retry_429(snippets.create_database_with_default_leader)( - multi_region_instance_id, default_leader_database_id, default_leader + multi_region_instance_id, default_leader_database_id, default_leader ) out, _ = capsys.readouterr() assert default_leader_database_id in out @@ -234,15 +234,15 @@ def test_create_database_with_default_leader( def test_update_database_with_default_leader( - capsys, - multi_region_instance, - multi_region_instance_id, - default_leader_database_id, - default_leader, + capsys, + multi_region_instance, + multi_region_instance_id, + default_leader_database_id, + default_leader, ): retry_429 = RetryErrors(exceptions.ResourceExhausted, delay=15) retry_429(snippets.update_database_with_default_leader)( - multi_region_instance_id, default_leader_database_id, default_leader + multi_region_instance_id, default_leader_database_id, default_leader ) out, _ = capsys.readouterr() assert default_leader_database_id in out @@ -256,14 +256,14 @@ def test_get_database_ddl(capsys, instance_id, sample_database): def test_query_information_schema_database_options( - capsys, - multi_region_instance, - multi_region_instance_id, - default_leader_database_id, - default_leader, + capsys, + multi_region_instance, + multi_region_instance_id, + default_leader_database_id, + default_leader, ): snippets.query_information_schema_database_options( - multi_region_instance_id, default_leader_database_id + multi_region_instance_id, default_leader_database_id ) out, _ = capsys.readouterr() assert default_leader in out @@ -576,8 +576,8 @@ def test_create_table_with_datatypes(capsys, instance_id, sample_database): @pytest.mark.dependency( - name="insert_datatypes_data", - depends=["create_table_with_datatypes"], + name="insert_datatypes_data", + depends=["create_table_with_datatypes"], ) def test_insert_datatypes_data(capsys, instance_id, sample_database): snippets.insert_datatypes_data(instance_id, sample_database.database_id) @@ -639,8 +639,8 @@ def test_query_data_with_string(capsys, instance_id, sample_database): @pytest.mark.dependency( - name="add_numeric_column", - depends=["create_table_with_datatypes"], + name="add_numeric_column", + depends=["create_table_with_datatypes"], ) def test_add_numeric_column(capsys, instance_id, sample_database): snippets.add_numeric_column(instance_id, sample_database.database_id) @@ -657,7 +657,7 @@ def test_update_data_with_numeric(capsys, instance_id, sample_database): @pytest.mark.dependency(depends=["add_numeric_column"]) def test_query_data_with_numeric_parameter(capsys, instance_id, - sample_database): + sample_database): snippets.query_data_with_numeric_parameter(instance_id, sample_database.database_id) out, _ = capsys.readouterr() @@ -665,8 +665,8 @@ def test_query_data_with_numeric_parameter(capsys, instance_id, @pytest.mark.dependency( - name="add_json_column", - depends=["create_table_with_datatypes"], + name="add_json_column", + depends=["create_table_with_datatypes"], ) def test_add_json_column(capsys, instance_id, sample_database): snippets.add_json_column(instance_id, sample_database.database_id) @@ -691,9 +691,9 @@ def test_query_data_with_json_parameter(capsys, instance_id, sample_database): @pytest.mark.dependency(depends=["insert_datatypes_data"]) def test_query_data_with_timestamp_parameter(capsys, instance_id, - sample_database): + sample_database): snippets.query_data_with_timestamp_parameter( - instance_id, sample_database.database_id + instance_id, sample_database.database_id ) out, _ = capsys.readouterr() assert "VenueId: 4, VenueName: Venue 4, LastUpdateTime:" in out From 12a39ac5d488df968dacedb4080fd7b95ec06287 Mon Sep 17 00:00:00 2001 From: Rajat Bhatta Date: Sun, 9 Oct 2022 23:05:43 +0000 Subject: [PATCH 12/25] refactor --- samples/samples/pg_snippets.py | 6 ++---- samples/samples/pg_snippets_test.py | 2 +- samples/samples/snippets_test.py | 4 ++-- 3 files changed, 5 insertions(+), 7 deletions(-) diff --git a/samples/samples/pg_snippets.py b/samples/samples/pg_snippets.py index a93a1b12ba..6a1ea4c611 100644 --- a/samples/samples/pg_snippets.py +++ b/samples/samples/pg_snippets.py @@ -1217,10 +1217,8 @@ def query_data_with_timestamp_parameter(instance_id, database_id): # [END spanner_postgresql_query_with_timestamp_parameter] # Avoid time drift on the local machine. # https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4197. - example_timestamp = ( - datetime.datetime.utcnow() + datetime.timedelta( - days=1) - ).isoformat() + "Z" + example_timestamp = (datetime.datetime.utcnow() + datetime.timedelta(days=1) + ).isoformat() + "Z" # [START spanner_postgresql_query_with_timestamp_parameter] param = {"p1": example_timestamp} param_type = {"p1": param_types.TIMESTAMP} diff --git a/samples/samples/pg_snippets_test.py b/samples/samples/pg_snippets_test.py index c995fdfa63..bcb0314536 100644 --- a/samples/samples/pg_snippets_test.py +++ b/samples/samples/pg_snippets_test.py @@ -405,7 +405,7 @@ def test_query_data_with_string(capsys, instance_id, sample_database): @pytest.mark.dependency(depends=["add_numeric_column"]) def test_query_data_with_numeric_parameter(capsys, instance_id, - sample_database): + sample_database): snippets.query_data_with_numeric_parameter(instance_id, sample_database.database_id) out, _ = capsys.readouterr() diff --git a/samples/samples/snippets_test.py b/samples/samples/snippets_test.py index 93b9ccdddc..bc64f6fc6e 100644 --- a/samples/samples/snippets_test.py +++ b/samples/samples/snippets_test.py @@ -657,7 +657,7 @@ def test_update_data_with_numeric(capsys, instance_id, sample_database): @pytest.mark.dependency(depends=["add_numeric_column"]) def test_query_data_with_numeric_parameter(capsys, instance_id, - sample_database): + sample_database): snippets.query_data_with_numeric_parameter(instance_id, sample_database.database_id) out, _ = capsys.readouterr() @@ -691,7 +691,7 @@ def test_query_data_with_json_parameter(capsys, instance_id, sample_database): @pytest.mark.dependency(depends=["insert_datatypes_data"]) def test_query_data_with_timestamp_parameter(capsys, instance_id, - sample_database): + sample_database): snippets.query_data_with_timestamp_parameter( instance_id, sample_database.database_id ) From 3d53ddc27a25dc0ea8eeb8cb5ed9117ac1dbeb5e Mon Sep 17 00:00:00 2001 From: Rajat Bhatta Date: Sun, 9 Oct 2022 23:24:49 +0000 Subject: [PATCH 13/25] refactor --- samples/samples/conftest.py | 6 +++--- samples/samples/pg_snippets_test.py | 4 ++-- samples/samples/snippets_test.py | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/samples/samples/conftest.py b/samples/samples/conftest.py index d3c4c58b8e..113a5479bd 100644 --- a/samples/samples/conftest.py +++ b/samples/samples/conftest.py @@ -42,14 +42,14 @@ def sample_name(): @pytest.fixture(scope="module") -def spanner_dialect(): - """Spanner dialect to be used for this sample. +def database_dialect(): + """Database dialect to be used for this sample. The dialect is used to initialize the dialect for the database. It can either be GoogleStandardSql or PostgreSql. """ raise NotImplementedError( - "Define 'spanner_dialect' fixture in sample test driver") + "Define 'database_dialect' fixture in sample test driver") @pytest.fixture(scope="session") diff --git a/samples/samples/pg_snippets_test.py b/samples/samples/pg_snippets_test.py index bcb0314536..3709b97b68 100644 --- a/samples/samples/pg_snippets_test.py +++ b/samples/samples/pg_snippets_test.py @@ -50,7 +50,7 @@ def sample_name(): @pytest.fixture(scope="module") -def spanner_dialect(): +def database_dialect(): """Spanner dialect to be used for this sample. The dialect is used to initialize the dialect for the database. @@ -414,7 +414,7 @@ def test_query_data_with_numeric_parameter(capsys, instance_id, @pytest.mark.dependency(depends=["insert_datatypes_data"]) def test_query_data_with_timestamp_parameter(capsys, instance_id, - sample_database): + sample_database): snippets.query_data_with_timestamp_parameter( instance_id, sample_database.database_id ) diff --git a/samples/samples/snippets_test.py b/samples/samples/snippets_test.py index bc64f6fc6e..d31c8268ea 100644 --- a/samples/samples/snippets_test.py +++ b/samples/samples/snippets_test.py @@ -50,7 +50,7 @@ def sample_name(): @pytest.fixture(scope="module") -def spanner_dialect(): +def database_dialect(): """Spanner dialect to be used for this sample. The dialect is used to initialize the dialect for the database. From 04f50281298c0da6f90eaedf8f99c362f1089540 Mon Sep 17 00:00:00 2001 From: Rajat Bhatta Date: Mon, 10 Oct 2022 04:04:59 +0000 Subject: [PATCH 14/25] add database dialect --- samples/samples/autocommit_test.py | 10 ++++++++++ samples/samples/backup_sample_test.py | 10 ++++++++++ samples/samples/conftest.py | 8 ++++++-- samples/samples/quickstart_test.py | 10 ++++++++++ 4 files changed, 36 insertions(+), 2 deletions(-) diff --git a/samples/samples/autocommit_test.py b/samples/samples/autocommit_test.py index 8150058f1c..bc4df62858 100644 --- a/samples/samples/autocommit_test.py +++ b/samples/samples/autocommit_test.py @@ -16,6 +16,16 @@ def sample_name(): return "autocommit" +@pytest.fixture(scope="module") +def database_dialect(): + """Spanner dialect to be used for this sample. + + The dialect is used to initialize the dialect for the database. + It can either be GoogleStandardSql or PostgreSql. + """ + return DatabaseDialect.GOOGLE_STANDARD_SQL + + @RetryErrors(exception=Aborted, max_tries=2) def test_enable_autocommit_mode(capsys, instance_id, sample_database): # Delete table if it exists for retry attempts. diff --git a/samples/samples/backup_sample_test.py b/samples/samples/backup_sample_test.py index 5f094e7a77..44dca55c4b 100644 --- a/samples/samples/backup_sample_test.py +++ b/samples/samples/backup_sample_test.py @@ -35,6 +35,16 @@ def unique_backup_id(): return f"test-backup-{uuid.uuid4().hex[:10]}" +@pytest.fixture(scope="module") +def database_dialect(): + """Spanner dialect to be used for this sample. + + The dialect is used to initialize the dialect for the database. + It can either be GoogleStandardSql or PostgreSql. + """ + return DatabaseDialect.GOOGLE_STANDARD_SQL + + RESTORE_DB_ID = unique_database_id() BACKUP_ID = unique_backup_id() CMEK_RESTORE_DB_ID = unique_database_id() diff --git a/samples/samples/conftest.py b/samples/samples/conftest.py index 113a5479bd..8ce94c4e4a 100644 --- a/samples/samples/conftest.py +++ b/samples/samples/conftest.py @@ -17,6 +17,8 @@ import uuid from google.api_core import exceptions + +from google.cloud import spanner_admin_database_v1 from google.cloud.spanner_admin_database_v1.types.common import DatabaseDialect from google.cloud.spanner_v1 import backup from google.cloud.spanner_v1 import client @@ -199,6 +201,7 @@ def database_ddl(): @pytest.fixture(scope="module") def sample_database( + spanner_client, sample_instance, database_id, database_ddl, @@ -212,15 +215,16 @@ def sample_database( if not sample_database.exists(): sample_database.create() - request = sample_instance.spanner_admin_database_v1.UpdateDatabaseDdlRequest( + request = spanner_admin_database_v1.UpdateDatabaseDdlRequest( database=sample_database.name, statements=database_ddl, ) - sample_instance.database_admin_api.update_database_ddl(request) + spanner_client.database_admin_api.update_database_ddl(request) yield sample_database sample_database.drop() + return sample_database = sample_instance.database( database_id, diff --git a/samples/samples/quickstart_test.py b/samples/samples/quickstart_test.py index 3726e7aef6..7f8d9920c7 100644 --- a/samples/samples/quickstart_test.py +++ b/samples/samples/quickstart_test.py @@ -22,6 +22,16 @@ def sample_name(): return "quickstart" +@pytest.fixture(scope="module") +def database_dialect(): + """Spanner dialect to be used for this sample. + + The dialect is used to initialize the dialect for the database. + It can either be GoogleStandardSql or PostgreSql. + """ + return DatabaseDialect.GOOGLE_STANDARD_SQL + + def test_quickstart(capsys, instance_id, sample_database): quickstart.run_quickstart(instance_id, sample_database.database_id) out, _ = capsys.readouterr() From 4073cb990b0e9001500f4ce0b6142cfe0de782b9 Mon Sep 17 00:00:00 2001 From: Rajat Bhatta Date: Mon, 10 Oct 2022 04:13:07 +0000 Subject: [PATCH 15/25] database dialect fixture change --- samples/samples/autocommit_test.py | 10 ---------- samples/samples/backup_sample_test.py | 10 ---------- samples/samples/conftest.py | 5 +++-- samples/samples/quickstart_test.py | 10 ---------- 4 files changed, 3 insertions(+), 32 deletions(-) diff --git a/samples/samples/autocommit_test.py b/samples/samples/autocommit_test.py index bc4df62858..8150058f1c 100644 --- a/samples/samples/autocommit_test.py +++ b/samples/samples/autocommit_test.py @@ -16,16 +16,6 @@ def sample_name(): return "autocommit" -@pytest.fixture(scope="module") -def database_dialect(): - """Spanner dialect to be used for this sample. - - The dialect is used to initialize the dialect for the database. - It can either be GoogleStandardSql or PostgreSql. - """ - return DatabaseDialect.GOOGLE_STANDARD_SQL - - @RetryErrors(exception=Aborted, max_tries=2) def test_enable_autocommit_mode(capsys, instance_id, sample_database): # Delete table if it exists for retry attempts. diff --git a/samples/samples/backup_sample_test.py b/samples/samples/backup_sample_test.py index 44dca55c4b..5f094e7a77 100644 --- a/samples/samples/backup_sample_test.py +++ b/samples/samples/backup_sample_test.py @@ -35,16 +35,6 @@ def unique_backup_id(): return f"test-backup-{uuid.uuid4().hex[:10]}" -@pytest.fixture(scope="module") -def database_dialect(): - """Spanner dialect to be used for this sample. - - The dialect is used to initialize the dialect for the database. - It can either be GoogleStandardSql or PostgreSql. - """ - return DatabaseDialect.GOOGLE_STANDARD_SQL - - RESTORE_DB_ID = unique_database_id() BACKUP_ID = unique_backup_id() CMEK_RESTORE_DB_ID = unique_database_id() diff --git a/samples/samples/conftest.py b/samples/samples/conftest.py index 8ce94c4e4a..ed25248594 100644 --- a/samples/samples/conftest.py +++ b/samples/samples/conftest.py @@ -50,8 +50,9 @@ def database_dialect(): The dialect is used to initialize the dialect for the database. It can either be GoogleStandardSql or PostgreSql. """ - raise NotImplementedError( - "Define 'database_dialect' fixture in sample test driver") + # By default, we consider GOOGLE_STANDARD_SQL dialect. Other specific tests + # can override this if required. + return DatabaseDialect.GOOGLE_STANDARD_SQL @pytest.fixture(scope="session") diff --git a/samples/samples/quickstart_test.py b/samples/samples/quickstart_test.py index 7f8d9920c7..3726e7aef6 100644 --- a/samples/samples/quickstart_test.py +++ b/samples/samples/quickstart_test.py @@ -22,16 +22,6 @@ def sample_name(): return "quickstart" -@pytest.fixture(scope="module") -def database_dialect(): - """Spanner dialect to be used for this sample. - - The dialect is used to initialize the dialect for the database. - It can either be GoogleStandardSql or PostgreSql. - """ - return DatabaseDialect.GOOGLE_STANDARD_SQL - - def test_quickstart(capsys, instance_id, sample_database): quickstart.run_quickstart(instance_id, sample_database.database_id) out, _ = capsys.readouterr() From e590809348db6c16ca895f46acb7464094951b1c Mon Sep 17 00:00:00 2001 From: Rajat Bhatta Date: Mon, 10 Oct 2022 05:14:12 +0000 Subject: [PATCH 16/25] fix ddl --- samples/samples/pg_snippets_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/samples/pg_snippets_test.py b/samples/samples/pg_snippets_test.py index 3709b97b68..7508848ccb 100644 --- a/samples/samples/pg_snippets_test.py +++ b/samples/samples/pg_snippets_test.py @@ -36,7 +36,7 @@ CREATE TABLE Albums ( SingerId BIGINT NOT NULL, AlbumId BIGINT NOT NULL, - AlbumTitle CHARACTER VARYING(MAX), + AlbumTitle CHARACTER VARYING(1024), PRIMARY KEY (SingerId, AlbumId) ) INTERLEAVE IN PARENT Singers ON DELETE CASCADE """ From c3305aff2a4b3d024c2fb136cea6146a2350bc9e Mon Sep 17 00:00:00 2001 From: Rajat Bhatta Date: Mon, 10 Oct 2022 08:05:40 +0000 Subject: [PATCH 17/25] yield operation as well --- samples/samples/conftest.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/samples/samples/conftest.py b/samples/samples/conftest.py index ed25248594..addb483cc3 100644 --- a/samples/samples/conftest.py +++ b/samples/samples/conftest.py @@ -221,8 +221,10 @@ def sample_database( statements=database_ddl, ) - spanner_client.database_admin_api.update_database_ddl(request) - yield sample_database + operation =\ + spanner_client.database_admin_api.update_database_ddl(request) + + yield operation, sample_database sample_database.drop() return From abf54a93276f9bf6efc604e7400aab522f4324e3 Mon Sep 17 00:00:00 2001 From: Rajat Bhatta Date: Mon, 10 Oct 2022 08:42:01 +0000 Subject: [PATCH 18/25] skip backup tests --- samples/samples/backup_sample_test.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/samples/samples/backup_sample_test.py b/samples/samples/backup_sample_test.py index 5f094e7a77..538e36882e 100644 --- a/samples/samples/backup_sample_test.py +++ b/samples/samples/backup_sample_test.py @@ -19,6 +19,11 @@ import backup_sample +if pytest.__version__ < "3.0.0": + pytest.skip() +else: + pytestmark = pytest.mark.skip + @pytest.fixture(scope="module") def sample_name(): From 24387e0c9297f26b37e39c6b3f95545c0bc82683 Mon Sep 17 00:00:00 2001 From: Rajat Bhatta Date: Mon, 10 Oct 2022 08:49:09 +0000 Subject: [PATCH 19/25] config changes --- samples/samples/conftest.py | 11 ++++++++--- samples/samples/noxfile.py | 5 +---- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/samples/samples/conftest.py b/samples/samples/conftest.py index addb483cc3..0e8f283440 100644 --- a/samples/samples/conftest.py +++ b/samples/samples/conftest.py @@ -29,6 +29,8 @@ INSTANCE_CREATION_TIMEOUT = 560 # seconds +OPERATION_TIMEOUT_SECONDS = 120 # seconds + retry_429 = retry.RetryErrors(exceptions.ResourceExhausted, delay=15) @@ -214,7 +216,8 @@ def sample_database( ) if not sample_database.exists(): - sample_database.create() + operation = sample_database.create() + operation.result(OPERATION_TIMEOUT_SECONDS) request = spanner_admin_database_v1.UpdateDatabaseDdlRequest( database=sample_database.name, @@ -223,8 +226,9 @@ def sample_database( operation =\ spanner_client.database_admin_api.update_database_ddl(request) + operation.result(OPERATION_TIMEOUT_SECONDS) - yield operation, sample_database + yield sample_database sample_database.drop() return @@ -235,7 +239,8 @@ def sample_database( ) if not sample_database.exists(): - sample_database.create() + operation = sample_database.create() + operation.result(OPERATION_TIMEOUT_SECONDS) yield sample_database diff --git a/samples/samples/noxfile.py b/samples/samples/noxfile.py index b053ca568f..16e0ce5f96 100644 --- a/samples/samples/noxfile.py +++ b/samples/samples/noxfile.py @@ -208,10 +208,7 @@ def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: # check for presence of tests - test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob( - "**/test_*.py", recursive=True - ) - test_list.extend(glob.glob("**/tests", recursive=True)) + test_list = glob.glob("**/pg_snippets_test.py", recursive=True) if len(test_list) == 0: print("No tests found, skipping directory.") From 78bb2196c26ff6d60cf006b0e4a3418b387d20c6 Mon Sep 17 00:00:00 2001 From: Rajat Bhatta Date: Tue, 11 Oct 2022 02:22:49 +0000 Subject: [PATCH 20/25] fix --- samples/samples/conftest.py | 2 +- samples/samples/pg_snippets_test.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/samples/samples/conftest.py b/samples/samples/conftest.py index 0e8f283440..c63548c460 100644 --- a/samples/samples/conftest.py +++ b/samples/samples/conftest.py @@ -29,7 +29,7 @@ INSTANCE_CREATION_TIMEOUT = 560 # seconds -OPERATION_TIMEOUT_SECONDS = 120 # seconds +OPERATION_TIMEOUT_SECONDS = 120 # seconds retry_429 = retry.RetryErrors(exceptions.ResourceExhausted, delay=15) diff --git a/samples/samples/pg_snippets_test.py b/samples/samples/pg_snippets_test.py index 7508848ccb..1f7a800e6d 100644 --- a/samples/samples/pg_snippets_test.py +++ b/samples/samples/pg_snippets_test.py @@ -338,7 +338,7 @@ def test_delete_data_with_partitioned_dml(capsys, instance_id, sample_database): snippets.delete_data_with_partitioned_dml(instance_id, sample_database.database_id) out, _ = capsys.readouterr() - assert "6 record(s) deleted" in out + assert "5 record(s) deleted" in out @pytest.mark.dependency(depends=["add_column"]) From 3c80109d9a974edec0925601d42787d0d0629fa9 Mon Sep 17 00:00:00 2001 From: Rajat Bhatta Date: Tue, 11 Oct 2022 05:09:18 +0000 Subject: [PATCH 21/25] minor lint fix --- samples/samples/backup_sample_test.py | 5 ----- samples/samples/noxfile.py | 5 ++++- samples/samples/pg_snippets.py | 2 +- 3 files changed, 5 insertions(+), 7 deletions(-) diff --git a/samples/samples/backup_sample_test.py b/samples/samples/backup_sample_test.py index 538e36882e..5f094e7a77 100644 --- a/samples/samples/backup_sample_test.py +++ b/samples/samples/backup_sample_test.py @@ -19,11 +19,6 @@ import backup_sample -if pytest.__version__ < "3.0.0": - pytest.skip() -else: - pytestmark = pytest.mark.skip - @pytest.fixture(scope="module") def sample_name(): diff --git a/samples/samples/noxfile.py b/samples/samples/noxfile.py index 16e0ce5f96..b053ca568f 100644 --- a/samples/samples/noxfile.py +++ b/samples/samples/noxfile.py @@ -208,7 +208,10 @@ def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: # check for presence of tests - test_list = glob.glob("**/pg_snippets_test.py", recursive=True) + test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob( + "**/test_*.py", recursive=True + ) + test_list.extend(glob.glob("**/tests", recursive=True)) if len(test_list) == 0: print("No tests found, skipping directory.") diff --git a/samples/samples/pg_snippets.py b/samples/samples/pg_snippets.py index 6a1ea4c611..b3c7e00a79 100644 --- a/samples/samples/pg_snippets.py +++ b/samples/samples/pg_snippets.py @@ -696,7 +696,7 @@ def add_timestamp_column(instance_id, database_id): operation = database.update_ddl( [ - "ALTER TABLE Albums ADD COLUMN LastUpdateTime " "SPANNER.COMMIT_TIMESTAMP"] + "ALTER TABLE Albums ADD COLUMN LastUpdateTime SPANNER.COMMIT_TIMESTAMP"] ) print("Waiting for operation to complete...") From 289f305217c92fbe21540972a03bab59bd2f2665 Mon Sep 17 00:00:00 2001 From: Rajat Bhatta Date: Tue, 11 Oct 2022 07:42:16 +0000 Subject: [PATCH 22/25] some tests were getting skipped. fixing it. --- samples/samples/pg_snippets.py | 71 ++++++++++++++++++++++++++++- samples/samples/pg_snippets_test.py | 19 +++++++- samples/samples/snippets_test.py | 2 +- 3 files changed, 89 insertions(+), 3 deletions(-) diff --git a/samples/samples/pg_snippets.py b/samples/samples/pg_snippets.py index b3c7e00a79..8b695e2cc6 100644 --- a/samples/samples/pg_snippets.py +++ b/samples/samples/pg_snippets.py @@ -1236,6 +1236,63 @@ def query_data_with_timestamp_parameter(instance_id, database_id): # [END spanner_postgresql_query_with_timestamp_parameter] +# [START spanner_postgresql_add_numeric_column] +def add_numeric_column(instance_id, database_id): + """Adds a new NUMERIC column to the Venues table in the example database.""" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + + database = instance.database(database_id) + + operation = database.update_ddl(["ALTER TABLE Venues ADD COLUMN Revenue NUMERIC"]) + + print("Waiting for operation to complete...") + operation.result(OPERATION_TIMEOUT_SECONDS) + + print( + 'Altered table "Venues" on database {} on instance {}.'.format( + database_id, instance_id + ) + ) + + +# [END spanner_postgresql_add_numeric_column] + + +# [START spanner_postgresql_update_data_with_numeric_column] +def update_data_with_numeric(instance_id, database_id): + """Updates Venues tables in the database with the NUMERIC + column. + + This updates the `Revenue` column which must be created before + running this sample. You can add the column by running the + `add_numeric_column` sample or by running this DDL statement + against your database: + + ALTER TABLE Venues ADD COLUMN Revenue NUMERIC + """ + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + + database = instance.database(database_id) + + with database.batch() as batch: + batch.update( + table="Venues", + columns=("VenueId", "Revenue"), + values=[ + (4, decimal.Decimal("35000")), + (19, decimal.Decimal("104500")), + (42, decimal.Decimal("99999999999999999999999999999.99")), + ], + ) + + print("Updated data.") + + +# [END spanner_postgresql_update_data_with_numeric_column] + + def query_data_with_numeric_parameter(instance_id, database_id): """Queries sample data using SQL with a NUMERIC parameter.""" # [START spanner_postgresql_query_with_numeric_parameter] @@ -1401,6 +1458,14 @@ def query_data_with_query_options(instance_id, database_id): "query_data_with_timestamp_parameter", help=query_data_with_timestamp_parameter.__doc__, ) + subparsers.add_parser( + "add_numeric_column", + help=add_numeric_column.__doc__, + ) + subparsers.add_parser( + "update_data_with_numeric", + help=update_data_with_numeric.__doc__, + ) subparsers.add_parser( "query_data_with_numeric_parameter", help=query_data_with_numeric_parameter.__doc__, @@ -1494,7 +1559,11 @@ def query_data_with_query_options(instance_id, database_id): query_data_with_string(args.instance_id, args.database_id) elif args.command == "query_data_with_timestamp_parameter": query_data_with_timestamp_parameter(args.instance_id, args.database_id) - elif args.command == "query_data_with_timestamp_parameter": + elif args.command == "add_numeric_column": + add_numeric_column(args.instance_id, args.database_id) + elif args.command == "update_data_with_numeric": + update_data_with_numeric(args.instance_id, args.database_id) + elif args.command == "query_data_with_numeric_parameter": query_data_with_numeric_parameter(args.instance_id, args.database_id) elif args.command == "query_data_with_query_options": query_data_with_query_options(args.instance_id, args.database_id) diff --git a/samples/samples/pg_snippets_test.py b/samples/samples/pg_snippets_test.py index 1f7a800e6d..155870529d 100644 --- a/samples/samples/pg_snippets_test.py +++ b/samples/samples/pg_snippets_test.py @@ -265,7 +265,7 @@ def test_create_table_with_timestamp(capsys, instance_id, sample_database): assert "Created Performances table on database" in out -@pytest.mark.dependency(depends=["create_table_with_datatypes"]) +@pytest.mark.dependency(depends=["create_table_with_timestamp"]) def test_insert_data_with_timestamp(capsys, instance_id, sample_database): snippets.insert_data_with_timestamp(instance_id, sample_database.database_id) @@ -403,6 +403,23 @@ def test_query_data_with_string(capsys, instance_id, sample_database): assert "VenueId: 42, VenueName: Venue 42" in out +@pytest.mark.dependency( + name="add_numeric_column", + depends=["create_table_with_datatypes"], +) +def test_add_numeric_column(capsys, instance_id, sample_database): + snippets.add_numeric_column(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert 'Altered table "Venues" on database ' in out + + +@pytest.mark.dependency(depends=["add_numeric_column", "insert_datatypes_data"]) +def test_update_data_with_numeric(capsys, instance_id, sample_database): + snippets.update_data_with_numeric(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "Updated data" in out + + @pytest.mark.dependency(depends=["add_numeric_column"]) def test_query_data_with_numeric_parameter(capsys, instance_id, sample_database): diff --git a/samples/samples/snippets_test.py b/samples/samples/snippets_test.py index d31c8268ea..d4143a2319 100644 --- a/samples/samples/snippets_test.py +++ b/samples/samples/snippets_test.py @@ -421,7 +421,7 @@ def test_create_table_with_timestamp(capsys, instance_id, sample_database): assert "Created Performances table on database" in out -@pytest.mark.dependency(depends=["create_table_with_datatypes"]) +@pytest.mark.dependency(depends=["create_table_with_timestamp"]) def test_insert_data_with_timestamp(capsys, instance_id, sample_database): snippets.insert_data_with_timestamp(instance_id, sample_database.database_id) From 9fbb899ae37ab6d1df19cbb0fe8b507eeedc923a Mon Sep 17 00:00:00 2001 From: Rajat Bhatta Date: Tue, 11 Oct 2022 11:05:48 +0000 Subject: [PATCH 23/25] fix test --- samples/samples/pg_snippets.py | 29 ----------------------------- samples/samples/pg_snippets_test.py | 14 ++------------ 2 files changed, 2 insertions(+), 41 deletions(-) diff --git a/samples/samples/pg_snippets.py b/samples/samples/pg_snippets.py index 8b695e2cc6..0f1f7127e2 100644 --- a/samples/samples/pg_snippets.py +++ b/samples/samples/pg_snippets.py @@ -1236,29 +1236,6 @@ def query_data_with_timestamp_parameter(instance_id, database_id): # [END spanner_postgresql_query_with_timestamp_parameter] -# [START spanner_postgresql_add_numeric_column] -def add_numeric_column(instance_id, database_id): - """Adds a new NUMERIC column to the Venues table in the example database.""" - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - - database = instance.database(database_id) - - operation = database.update_ddl(["ALTER TABLE Venues ADD COLUMN Revenue NUMERIC"]) - - print("Waiting for operation to complete...") - operation.result(OPERATION_TIMEOUT_SECONDS) - - print( - 'Altered table "Venues" on database {} on instance {}.'.format( - database_id, instance_id - ) - ) - - -# [END spanner_postgresql_add_numeric_column] - - # [START spanner_postgresql_update_data_with_numeric_column] def update_data_with_numeric(instance_id, database_id): """Updates Venues tables in the database with the NUMERIC @@ -1458,10 +1435,6 @@ def query_data_with_query_options(instance_id, database_id): "query_data_with_timestamp_parameter", help=query_data_with_timestamp_parameter.__doc__, ) - subparsers.add_parser( - "add_numeric_column", - help=add_numeric_column.__doc__, - ) subparsers.add_parser( "update_data_with_numeric", help=update_data_with_numeric.__doc__, @@ -1559,8 +1532,6 @@ def query_data_with_query_options(instance_id, database_id): query_data_with_string(args.instance_id, args.database_id) elif args.command == "query_data_with_timestamp_parameter": query_data_with_timestamp_parameter(args.instance_id, args.database_id) - elif args.command == "add_numeric_column": - add_numeric_column(args.instance_id, args.database_id) elif args.command == "update_data_with_numeric": update_data_with_numeric(args.instance_id, args.database_id) elif args.command == "query_data_with_numeric_parameter": diff --git a/samples/samples/pg_snippets_test.py b/samples/samples/pg_snippets_test.py index 155870529d..2716880832 100644 --- a/samples/samples/pg_snippets_test.py +++ b/samples/samples/pg_snippets_test.py @@ -403,24 +403,14 @@ def test_query_data_with_string(capsys, instance_id, sample_database): assert "VenueId: 42, VenueName: Venue 42" in out -@pytest.mark.dependency( - name="add_numeric_column", - depends=["create_table_with_datatypes"], -) -def test_add_numeric_column(capsys, instance_id, sample_database): - snippets.add_numeric_column(instance_id, sample_database.database_id) - out, _ = capsys.readouterr() - assert 'Altered table "Venues" on database ' in out - - -@pytest.mark.dependency(depends=["add_numeric_column", "insert_datatypes_data"]) +@pytest.mark.dependency(depends=["insert_datatypes_data"]) def test_update_data_with_numeric(capsys, instance_id, sample_database): snippets.update_data_with_numeric(instance_id, sample_database.database_id) out, _ = capsys.readouterr() assert "Updated data" in out -@pytest.mark.dependency(depends=["add_numeric_column"]) +@pytest.mark.dependency(depends=["insert_datatypes_data"]) def test_query_data_with_numeric_parameter(capsys, instance_id, sample_database): snippets.query_data_with_numeric_parameter(instance_id, From 18367baf5c955a7c39a14994c04c49f18120704e Mon Sep 17 00:00:00 2001 From: Rajat Bhatta Date: Tue, 11 Oct 2022 12:36:57 +0000 Subject: [PATCH 24/25] fix test and skip few tests for faster testing --- samples/samples/autocommit_test.py | 4 ++++ samples/samples/backup_sample_test.py | 5 +++++ samples/samples/pg_snippets.py | 2 +- samples/samples/snippets_test.py | 5 +++++ 4 files changed, 15 insertions(+), 1 deletion(-) diff --git a/samples/samples/autocommit_test.py b/samples/samples/autocommit_test.py index 8150058f1c..abdc573bb6 100644 --- a/samples/samples/autocommit_test.py +++ b/samples/samples/autocommit_test.py @@ -10,6 +10,10 @@ import autocommit +if pytest.__version__ < "3.0.0": + pytest.skip() +else: + pytestmark = pytest.mark.skip @pytest.fixture(scope="module") def sample_name(): diff --git a/samples/samples/backup_sample_test.py b/samples/samples/backup_sample_test.py index 5f094e7a77..538e36882e 100644 --- a/samples/samples/backup_sample_test.py +++ b/samples/samples/backup_sample_test.py @@ -19,6 +19,11 @@ import backup_sample +if pytest.__version__ < "3.0.0": + pytest.skip() +else: + pytestmark = pytest.mark.skip + @pytest.fixture(scope="module") def sample_name(): diff --git a/samples/samples/pg_snippets.py b/samples/samples/pg_snippets.py index 0f1f7127e2..367690dbd8 100644 --- a/samples/samples/pg_snippets.py +++ b/samples/samples/pg_snippets.py @@ -1281,7 +1281,7 @@ def query_data_with_numeric_parameter(instance_id, database_id): example_numeric = decimal.Decimal("300000") param = {"p1": example_numeric} - param_type = {"p1": param_types.NUMERIC} + param_type = {"p1": param_types.PG_NUMERIC} with database.snapshot() as snapshot: results = snapshot.execute_sql( diff --git a/samples/samples/snippets_test.py b/samples/samples/snippets_test.py index d4143a2319..51705464dc 100644 --- a/samples/samples/snippets_test.py +++ b/samples/samples/snippets_test.py @@ -23,6 +23,11 @@ import snippets +if pytest.__version__ < "3.0.0": + pytest.skip() +else: + pytestmark = pytest.mark.skip + CREATE_TABLE_SINGERS = """\ CREATE TABLE Singers ( SingerId INT64 NOT NULL, From 0c0cacbeb1644bdedbc2d15211a27835a960f0b6 Mon Sep 17 00:00:00 2001 From: Rajat Bhatta Date: Tue, 11 Oct 2022 12:49:57 +0000 Subject: [PATCH 25/25] re-enable tests --- samples/samples/autocommit_test.py | 4 ---- samples/samples/backup_sample_test.py | 5 ----- samples/samples/snippets_test.py | 5 ----- 3 files changed, 14 deletions(-) diff --git a/samples/samples/autocommit_test.py b/samples/samples/autocommit_test.py index abdc573bb6..8150058f1c 100644 --- a/samples/samples/autocommit_test.py +++ b/samples/samples/autocommit_test.py @@ -10,10 +10,6 @@ import autocommit -if pytest.__version__ < "3.0.0": - pytest.skip() -else: - pytestmark = pytest.mark.skip @pytest.fixture(scope="module") def sample_name(): diff --git a/samples/samples/backup_sample_test.py b/samples/samples/backup_sample_test.py index 538e36882e..5f094e7a77 100644 --- a/samples/samples/backup_sample_test.py +++ b/samples/samples/backup_sample_test.py @@ -19,11 +19,6 @@ import backup_sample -if pytest.__version__ < "3.0.0": - pytest.skip() -else: - pytestmark = pytest.mark.skip - @pytest.fixture(scope="module") def sample_name(): diff --git a/samples/samples/snippets_test.py b/samples/samples/snippets_test.py index 51705464dc..d4143a2319 100644 --- a/samples/samples/snippets_test.py +++ b/samples/samples/snippets_test.py @@ -23,11 +23,6 @@ import snippets -if pytest.__version__ < "3.0.0": - pytest.skip() -else: - pytestmark = pytest.mark.skip - CREATE_TABLE_SINGERS = """\ CREATE TABLE Singers ( SingerId INT64 NOT NULL,