From 46a3ad7e48856cee62dd7e9b5d5807c84cec7c3c Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Fri, 7 Feb 2020 09:10:44 +0000 Subject: [PATCH 1/3] chore: remove bigquery from the monorepo --- .kokoro/continuous/bigquery.cfg | 7 - .kokoro/docs/bigquery.cfg | 7 - .kokoro/presubmit/bigquery.cfg | 7 - .kokoro/release/bigquery.cfg | 7 - README.rst | 2 +- bigquery/.coveragerc | 16 - bigquery/.flake8 | 13 - bigquery/.gitignore | 1 - bigquery/.repo-metadata.json | 13 - bigquery/CHANGELOG.md | 931 --- bigquery/LICENSE | 201 - bigquery/MANIFEST.in | 5 - bigquery/README.rst | 104 - bigquery/benchmark/README.md | 8 - bigquery/benchmark/benchmark.py | 46 - bigquery/benchmark/queries.json | 10 - bigquery/docs/.gitignore | 1 - bigquery/docs/README.rst | 1 - bigquery/docs/_static/custom.css | 4 - bigquery/docs/_templates/layout.html | 49 - bigquery/docs/changelog.md | 1 - bigquery/docs/conf.py | 363 - bigquery/docs/dbapi.rst | 6 - bigquery/docs/gapic/v2/enums.rst | 8 - bigquery/docs/gapic/v2/types.rst | 5 - .../google.cloud.bigquery.magics.html | 8 - bigquery/docs/index.rst | 29 - bigquery/docs/magics.rst | 5 - bigquery/docs/reference.rst | 194 - bigquery/docs/samples | 1 - bigquery/docs/snippets.py | 1195 --- bigquery/docs/usage.html | 8 - bigquery/docs/usage/client.rst | 25 - bigquery/docs/usage/datasets.rst | 131 - bigquery/docs/usage/encryption.rst | 52 - bigquery/docs/usage/index.rst | 35 - bigquery/docs/usage/jobs.rst | 21 - bigquery/docs/usage/pandas.rst | 62 - bigquery/docs/usage/queries.rst | 63 - bigquery/docs/usage/tables.rst | 222 - bigquery/google/__init__.py | 24 - bigquery/google/cloud/__init__.py | 24 - bigquery/google/cloud/bigquery/__init__.py | 154 - bigquery/google/cloud/bigquery/_helpers.py | 686 -- bigquery/google/cloud/bigquery/_http.py | 43 - .../google/cloud/bigquery/_pandas_helpers.py | 744 -- bigquery/google/cloud/bigquery/client.py | 2928 ------- bigquery/google/cloud/bigquery/dataset.py | 752 -- .../google/cloud/bigquery/dbapi/__init__.py | 87 - .../google/cloud/bigquery/dbapi/_helpers.py | 220 - .../google/cloud/bigquery/dbapi/connection.py | 59 - .../google/cloud/bigquery/dbapi/cursor.py | 374 - .../google/cloud/bigquery/dbapi/exceptions.py | 58 - bigquery/google/cloud/bigquery/dbapi/types.py | 84 - .../bigquery/encryption_configuration.py | 84 - bigquery/google/cloud/bigquery/enums.py | 92 - .../google/cloud/bigquery/external_config.py | 790 -- bigquery/google/cloud/bigquery/job.py | 3808 --------- bigquery/google/cloud/bigquery/magics.py | 628 -- bigquery/google/cloud/bigquery/model.py | 435 - bigquery/google/cloud/bigquery/query.py | 633 -- bigquery/google/cloud/bigquery/retry.py | 55 - bigquery/google/cloud/bigquery/routine.py | 518 -- bigquery/google/cloud/bigquery/schema.py | 293 - bigquery/google/cloud/bigquery/table.py | 2221 ----- bigquery/google/cloud/bigquery_v2/__init__.py | 33 - .../cloud/bigquery_v2/gapic/__init__.py | 0 .../google/cloud/bigquery_v2/gapic/enums.py | 171 - .../cloud/bigquery_v2/proto/__init__.py | 0 .../bigquery_v2/proto/encryption_config.proto | 33 - .../proto/encryption_config_pb2.py | 108 - .../proto/encryption_config_pb2_grpc.py | 2 - .../bigquery_v2/proto/location_metadata.proto | 34 - .../proto/location_metadata_pb2.py | 98 - .../proto/location_metadata_pb2_grpc.py | 2 - .../cloud/bigquery_v2/proto/model.proto | 640 -- .../cloud/bigquery_v2/proto/model_pb2.py | 4087 --------- .../cloud/bigquery_v2/proto/model_pb2_grpc.py | 102 - .../bigquery_v2/proto/model_reference.proto | 39 - .../bigquery_v2/proto/model_reference_pb2.py | 145 - .../proto/model_reference_pb2_grpc.py | 2 - .../bigquery_v2/proto/standard_sql.proto | 110 - .../bigquery_v2/proto/standard_sql_pb2.py | 373 - .../proto/standard_sql_pb2_grpc.py | 2 - bigquery/google/cloud/bigquery_v2/types.py | 58 - bigquery/noxfile.py | 213 - bigquery/pylint.config.py | 25 - bigquery/samples/__init__.py | 0 bigquery/samples/add_empty_column.py | 41 - bigquery/samples/browse_table_data.py | 55 - bigquery/samples/client_list_jobs.py | 50 - .../samples/client_load_partitioned_table.py | 50 - bigquery/samples/client_query.py | 39 - bigquery/samples/client_query_add_column.py | 52 - bigquery/samples/client_query_batch.py | 46 - .../samples/client_query_destination_table.py | 40 - .../client_query_destination_table_cmek.py | 49 - .../client_query_destination_table_legacy.py | 44 - bigquery/samples/client_query_dry_run.py | 40 - bigquery/samples/client_query_legacy_sql.py | 39 - bigquery/samples/client_query_relax_column.py | 55 - .../samples/client_query_w_array_params.py | 43 - .../samples/client_query_w_named_params.py | 41 - .../client_query_w_positional_params.py | 43 - .../samples/client_query_w_struct_params.py | 38 - .../client_query_w_timestamp_params.py | 41 - bigquery/samples/copy_table.py | 35 - bigquery/samples/copy_table_cmek.py | 47 - .../samples/copy_table_multiple_source.py | 35 - bigquery/samples/create_dataset.py | 38 - bigquery/samples/create_job.py | 40 - bigquery/samples/create_routine.py | 47 - bigquery/samples/create_routine_ddl.py | 41 - bigquery/samples/create_table.py | 37 - .../samples/create_table_range_partitioned.py | 45 - bigquery/samples/dataset_exists.py | 32 - bigquery/samples/delete_dataset.py | 35 - bigquery/samples/delete_dataset_labels.py | 36 - bigquery/samples/delete_model.py | 32 - bigquery/samples/delete_routine.py | 31 - bigquery/samples/delete_table.py | 32 - bigquery/samples/download_public_data.py | 33 - .../samples/download_public_data_sandbox.py | 34 - bigquery/samples/get_dataset.py | 56 - bigquery/samples/get_dataset_labels.py | 38 - bigquery/samples/get_model.py | 36 - bigquery/samples/get_routine.py | 39 - bigquery/samples/get_table.py | 37 - bigquery/samples/label_dataset.py | 33 - bigquery/samples/list_datasets.py | 34 - bigquery/samples/list_datasets_by_label.py | 34 - bigquery/samples/list_models.py | 39 - bigquery/samples/list_routines.py | 34 - bigquery/samples/list_tables.py | 34 - bigquery/samples/load_table_dataframe.py | 116 - bigquery/samples/load_table_file.py | 43 - bigquery/samples/load_table_uri_avro.py | 38 - bigquery/samples/load_table_uri_cmek.py | 60 - bigquery/samples/load_table_uri_csv.py | 46 - bigquery/samples/load_table_uri_json.py | 46 - bigquery/samples/load_table_uri_orc.py | 38 - bigquery/samples/load_table_uri_parquet.py | 37 - .../query_external_gcs_temporary_table.py | 44 - .../query_external_sheets_permanent_table.py | 74 - .../query_external_sheets_temporary_table.py | 69 - bigquery/samples/query_no_cache.py | 34 - bigquery/samples/query_pagination.py | 53 - bigquery/samples/query_script.py | 69 - bigquery/samples/query_to_arrow.py | 53 - bigquery/samples/table_exists.py | 32 - bigquery/samples/table_insert_rows.py | 34 - ...le_insert_rows_explicit_none_insert_ids.py | 36 - bigquery/samples/tests/__init__.py | 0 bigquery/samples/tests/conftest.py | 167 - .../samples/tests/test_add_empty_column.py | 22 - .../samples/tests/test_browse_table_data.py | 27 - .../samples/tests/test_client_list_jobs.py | 30 - .../test_client_load_partitioned_table.py | 22 - bigquery/samples/tests/test_client_query.py | 23 - .../tests/test_client_query_add_column.py | 32 - .../samples/tests/test_client_query_batch.py | 22 - .../test_client_query_destination_table.py | 22 - ...est_client_query_destination_table_cmek.py | 24 - ...t_client_query_destination_table_legacy.py | 24 - .../tests/test_client_query_dry_run.py | 25 - .../tests/test_client_query_legacy_sql.py | 24 - .../tests/test_client_query_relax_column.py | 32 - .../tests/test_client_query_w_array_params.py | 22 - .../tests/test_client_query_w_named_params.py | 22 - .../test_client_query_w_positional_params.py | 22 - .../test_client_query_w_struct_params.py | 23 - .../test_client_query_w_timestamp_params.py | 22 - bigquery/samples/tests/test_copy_table.py | 26 - .../samples/tests/test_copy_table_cmek.py | 22 - .../tests/test_copy_table_multiple_source.py | 52 - bigquery/samples/tests/test_create_dataset.py | 22 - bigquery/samples/tests/test_create_job.py | 22 - bigquery/samples/tests/test_create_table.py | 21 - .../test_create_table_range_partitioned.py | 27 - bigquery/samples/tests/test_dataset_exists.py | 29 - .../tests/test_dataset_label_samples.py | 33 - bigquery/samples/tests/test_delete_dataset.py | 22 - bigquery/samples/tests/test_delete_table.py | 22 - .../tests/test_download_public_data.py | 34 - .../test_download_public_data_sandbox.py | 34 - bigquery/samples/tests/test_get_dataset.py | 22 - bigquery/samples/tests/test_get_table.py | 37 - bigquery/samples/tests/test_list_datasets.py | 21 - .../tests/test_list_datasets_by_label.py | 24 - bigquery/samples/tests/test_list_tables.py | 23 - .../tests/test_load_table_dataframe.py | 72 - .../samples/tests/test_load_table_file.py | 39 - .../samples/tests/test_load_table_uri_avro.py | 21 - .../samples/tests/test_load_table_uri_cmek.py | 22 - .../samples/tests/test_load_table_uri_csv.py | 22 - .../samples/tests/test_load_table_uri_json.py | 22 - .../samples/tests/test_load_table_uri_orc.py | 22 - .../tests/test_load_table_uri_parquet.py | 22 - bigquery/samples/tests/test_model_samples.py | 39 - ...test_query_external_gcs_temporary_table.py | 22 - ...t_query_external_sheets_permanent_table.py | 24 - ...t_query_external_sheets_temporary_table.py | 22 - bigquery/samples/tests/test_query_no_cache.py | 24 - .../samples/tests/test_query_pagination.py | 23 - bigquery/samples/tests/test_query_script.py | 28 - bigquery/samples/tests/test_query_to_arrow.py | 28 - .../samples/tests/test_routine_samples.py | 101 - bigquery/samples/tests/test_table_exists.py | 29 - .../samples/tests/test_table_insert_rows.py | 32 - ...le_insert_rows_explicit_none_insert_ids.py | 32 - bigquery/samples/tests/test_undelete_table.py | 26 - .../tests/test_update_dataset_access.py | 24 - ...te_dataset_default_partition_expiration.py | 31 - ...update_dataset_default_table_expiration.py | 29 - .../tests/test_update_dataset_description.py | 22 - ...t_update_table_require_partition_filter.py | 35 - bigquery/samples/undelete_table.py | 66 - bigquery/samples/update_dataset_access.py | 45 - ...te_dataset_default_partition_expiration.py | 43 - ...update_dataset_default_table_expiration.py | 41 - .../samples/update_dataset_description.py | 38 - bigquery/samples/update_model.py | 39 - bigquery/samples/update_routine.py | 46 - .../update_table_require_partition_filter.py | 41 - bigquery/setup.cfg | 2 - bigquery/setup.py | 121 - bigquery/synth.metadata | 770 -- bigquery/synth.py | 60 - bigquery/tests/__init__.py | 0 bigquery/tests/data/characters.json | 68 - bigquery/tests/data/characters.jsonl | 3 - bigquery/tests/data/colors.avro | Bin 308 -> 0 bytes bigquery/tests/data/people.csv | 3 - bigquery/tests/data/schema.json | 88 - bigquery/tests/scrub_datasets.py | 25 - bigquery/tests/system.py | 2542 ------ bigquery/tests/unit/__init__.py | 13 - bigquery/tests/unit/enums/__init__.py | 13 - .../enums/test_standard_sql_data_types.py | 73 - bigquery/tests/unit/helpers.py | 24 - bigquery/tests/unit/model/__init__.py | 0 bigquery/tests/unit/model/test_model.py | 320 - .../tests/unit/model/test_model_reference.py | 140 - bigquery/tests/unit/routine/__init__.py | 0 bigquery/tests/unit/routine/test_routine.py | 335 - .../unit/routine/test_routine_argument.py | 100 - .../unit/routine/test_routine_reference.py | 138 - bigquery/tests/unit/test__helpers.py | 1074 --- bigquery/tests/unit/test__http.py | 122 - bigquery/tests/unit/test__pandas_helpers.py | 1326 --- bigquery/tests/unit/test_client.py | 7399 ----------------- bigquery/tests/unit/test_dataset.py | 704 -- bigquery/tests/unit/test_dbapi__helpers.py | 187 - bigquery/tests/unit/test_dbapi_connection.py | 78 - bigquery/tests/unit/test_dbapi_cursor.py | 332 - bigquery/tests/unit/test_dbapi_types.py | 42 - .../unit/test_encryption_configuration.py | 111 - bigquery/tests/unit/test_external_config.py | 427 - bigquery/tests/unit/test_job.py | 6033 -------------- bigquery/tests/unit/test_magics.py | 1373 --- bigquery/tests/unit/test_query.py | 1111 --- bigquery/tests/unit/test_retry.py | 69 - bigquery/tests/unit/test_schema.py | 634 -- .../unit/test_signature_compatibility.py | 51 - bigquery/tests/unit/test_table.py | 3746 --------- 265 files changed, 1 insertion(+), 60638 deletions(-) delete mode 100644 .kokoro/continuous/bigquery.cfg delete mode 100644 .kokoro/docs/bigquery.cfg delete mode 100644 .kokoro/presubmit/bigquery.cfg delete mode 100644 .kokoro/release/bigquery.cfg delete mode 100644 bigquery/.coveragerc delete mode 100644 bigquery/.flake8 delete mode 100644 bigquery/.gitignore delete mode 100644 bigquery/.repo-metadata.json delete mode 100644 bigquery/CHANGELOG.md delete mode 100644 bigquery/LICENSE delete mode 100644 bigquery/MANIFEST.in delete mode 100644 bigquery/README.rst delete mode 100644 bigquery/benchmark/README.md delete mode 100644 bigquery/benchmark/benchmark.py delete mode 100644 bigquery/benchmark/queries.json delete mode 100644 bigquery/docs/.gitignore delete mode 120000 bigquery/docs/README.rst delete mode 100644 bigquery/docs/_static/custom.css delete mode 100644 bigquery/docs/_templates/layout.html delete mode 120000 bigquery/docs/changelog.md delete mode 100644 bigquery/docs/conf.py delete mode 100644 bigquery/docs/dbapi.rst delete mode 100644 bigquery/docs/gapic/v2/enums.rst delete mode 100644 bigquery/docs/gapic/v2/types.rst delete mode 100644 bigquery/docs/generated/google.cloud.bigquery.magics.html delete mode 100644 bigquery/docs/index.rst delete mode 100644 bigquery/docs/magics.rst delete mode 100644 bigquery/docs/reference.rst delete mode 120000 bigquery/docs/samples delete mode 100644 bigquery/docs/snippets.py delete mode 100644 bigquery/docs/usage.html delete mode 100644 bigquery/docs/usage/client.rst delete mode 100644 bigquery/docs/usage/datasets.rst delete mode 100644 bigquery/docs/usage/encryption.rst delete mode 100644 bigquery/docs/usage/index.rst delete mode 100644 bigquery/docs/usage/jobs.rst delete mode 100644 bigquery/docs/usage/pandas.rst delete mode 100644 bigquery/docs/usage/queries.rst delete mode 100644 bigquery/docs/usage/tables.rst delete mode 100644 bigquery/google/__init__.py delete mode 100644 bigquery/google/cloud/__init__.py delete mode 100644 bigquery/google/cloud/bigquery/__init__.py delete mode 100644 bigquery/google/cloud/bigquery/_helpers.py delete mode 100644 bigquery/google/cloud/bigquery/_http.py delete mode 100644 bigquery/google/cloud/bigquery/_pandas_helpers.py delete mode 100644 bigquery/google/cloud/bigquery/client.py delete mode 100644 bigquery/google/cloud/bigquery/dataset.py delete mode 100644 bigquery/google/cloud/bigquery/dbapi/__init__.py delete mode 100644 bigquery/google/cloud/bigquery/dbapi/_helpers.py delete mode 100644 bigquery/google/cloud/bigquery/dbapi/connection.py delete mode 100644 bigquery/google/cloud/bigquery/dbapi/cursor.py delete mode 100644 bigquery/google/cloud/bigquery/dbapi/exceptions.py delete mode 100644 bigquery/google/cloud/bigquery/dbapi/types.py delete mode 100644 bigquery/google/cloud/bigquery/encryption_configuration.py delete mode 100644 bigquery/google/cloud/bigquery/enums.py delete mode 100644 bigquery/google/cloud/bigquery/external_config.py delete mode 100644 bigquery/google/cloud/bigquery/job.py delete mode 100644 bigquery/google/cloud/bigquery/magics.py delete mode 100644 bigquery/google/cloud/bigquery/model.py delete mode 100644 bigquery/google/cloud/bigquery/query.py delete mode 100644 bigquery/google/cloud/bigquery/retry.py delete mode 100644 bigquery/google/cloud/bigquery/routine.py delete mode 100644 bigquery/google/cloud/bigquery/schema.py delete mode 100644 bigquery/google/cloud/bigquery/table.py delete mode 100644 bigquery/google/cloud/bigquery_v2/__init__.py delete mode 100644 bigquery/google/cloud/bigquery_v2/gapic/__init__.py delete mode 100644 bigquery/google/cloud/bigquery_v2/gapic/enums.py delete mode 100644 bigquery/google/cloud/bigquery_v2/proto/__init__.py delete mode 100644 bigquery/google/cloud/bigquery_v2/proto/encryption_config.proto delete mode 100644 bigquery/google/cloud/bigquery_v2/proto/encryption_config_pb2.py delete mode 100644 bigquery/google/cloud/bigquery_v2/proto/encryption_config_pb2_grpc.py delete mode 100644 bigquery/google/cloud/bigquery_v2/proto/location_metadata.proto delete mode 100644 bigquery/google/cloud/bigquery_v2/proto/location_metadata_pb2.py delete mode 100644 bigquery/google/cloud/bigquery_v2/proto/location_metadata_pb2_grpc.py delete mode 100644 bigquery/google/cloud/bigquery_v2/proto/model.proto delete mode 100644 bigquery/google/cloud/bigquery_v2/proto/model_pb2.py delete mode 100644 bigquery/google/cloud/bigquery_v2/proto/model_pb2_grpc.py delete mode 100644 bigquery/google/cloud/bigquery_v2/proto/model_reference.proto delete mode 100644 bigquery/google/cloud/bigquery_v2/proto/model_reference_pb2.py delete mode 100644 bigquery/google/cloud/bigquery_v2/proto/model_reference_pb2_grpc.py delete mode 100644 bigquery/google/cloud/bigquery_v2/proto/standard_sql.proto delete mode 100644 bigquery/google/cloud/bigquery_v2/proto/standard_sql_pb2.py delete mode 100644 bigquery/google/cloud/bigquery_v2/proto/standard_sql_pb2_grpc.py delete mode 100644 bigquery/google/cloud/bigquery_v2/types.py delete mode 100644 bigquery/noxfile.py delete mode 100644 bigquery/pylint.config.py delete mode 100644 bigquery/samples/__init__.py delete mode 100644 bigquery/samples/add_empty_column.py delete mode 100644 bigquery/samples/browse_table_data.py delete mode 100644 bigquery/samples/client_list_jobs.py delete mode 100644 bigquery/samples/client_load_partitioned_table.py delete mode 100644 bigquery/samples/client_query.py delete mode 100644 bigquery/samples/client_query_add_column.py delete mode 100644 bigquery/samples/client_query_batch.py delete mode 100644 bigquery/samples/client_query_destination_table.py delete mode 100644 bigquery/samples/client_query_destination_table_cmek.py delete mode 100644 bigquery/samples/client_query_destination_table_legacy.py delete mode 100644 bigquery/samples/client_query_dry_run.py delete mode 100644 bigquery/samples/client_query_legacy_sql.py delete mode 100644 bigquery/samples/client_query_relax_column.py delete mode 100644 bigquery/samples/client_query_w_array_params.py delete mode 100644 bigquery/samples/client_query_w_named_params.py delete mode 100644 bigquery/samples/client_query_w_positional_params.py delete mode 100644 bigquery/samples/client_query_w_struct_params.py delete mode 100644 bigquery/samples/client_query_w_timestamp_params.py delete mode 100644 bigquery/samples/copy_table.py delete mode 100644 bigquery/samples/copy_table_cmek.py delete mode 100644 bigquery/samples/copy_table_multiple_source.py delete mode 100644 bigquery/samples/create_dataset.py delete mode 100644 bigquery/samples/create_job.py delete mode 100644 bigquery/samples/create_routine.py delete mode 100644 bigquery/samples/create_routine_ddl.py delete mode 100644 bigquery/samples/create_table.py delete mode 100644 bigquery/samples/create_table_range_partitioned.py delete mode 100644 bigquery/samples/dataset_exists.py delete mode 100644 bigquery/samples/delete_dataset.py delete mode 100644 bigquery/samples/delete_dataset_labels.py delete mode 100644 bigquery/samples/delete_model.py delete mode 100644 bigquery/samples/delete_routine.py delete mode 100644 bigquery/samples/delete_table.py delete mode 100644 bigquery/samples/download_public_data.py delete mode 100644 bigquery/samples/download_public_data_sandbox.py delete mode 100644 bigquery/samples/get_dataset.py delete mode 100644 bigquery/samples/get_dataset_labels.py delete mode 100644 bigquery/samples/get_model.py delete mode 100644 bigquery/samples/get_routine.py delete mode 100644 bigquery/samples/get_table.py delete mode 100644 bigquery/samples/label_dataset.py delete mode 100644 bigquery/samples/list_datasets.py delete mode 100644 bigquery/samples/list_datasets_by_label.py delete mode 100644 bigquery/samples/list_models.py delete mode 100644 bigquery/samples/list_routines.py delete mode 100644 bigquery/samples/list_tables.py delete mode 100644 bigquery/samples/load_table_dataframe.py delete mode 100644 bigquery/samples/load_table_file.py delete mode 100644 bigquery/samples/load_table_uri_avro.py delete mode 100644 bigquery/samples/load_table_uri_cmek.py delete mode 100644 bigquery/samples/load_table_uri_csv.py delete mode 100644 bigquery/samples/load_table_uri_json.py delete mode 100644 bigquery/samples/load_table_uri_orc.py delete mode 100644 bigquery/samples/load_table_uri_parquet.py delete mode 100644 bigquery/samples/query_external_gcs_temporary_table.py delete mode 100644 bigquery/samples/query_external_sheets_permanent_table.py delete mode 100644 bigquery/samples/query_external_sheets_temporary_table.py delete mode 100644 bigquery/samples/query_no_cache.py delete mode 100644 bigquery/samples/query_pagination.py delete mode 100644 bigquery/samples/query_script.py delete mode 100644 bigquery/samples/query_to_arrow.py delete mode 100644 bigquery/samples/table_exists.py delete mode 100644 bigquery/samples/table_insert_rows.py delete mode 100644 bigquery/samples/table_insert_rows_explicit_none_insert_ids.py delete mode 100644 bigquery/samples/tests/__init__.py delete mode 100644 bigquery/samples/tests/conftest.py delete mode 100644 bigquery/samples/tests/test_add_empty_column.py delete mode 100644 bigquery/samples/tests/test_browse_table_data.py delete mode 100644 bigquery/samples/tests/test_client_list_jobs.py delete mode 100644 bigquery/samples/tests/test_client_load_partitioned_table.py delete mode 100644 bigquery/samples/tests/test_client_query.py delete mode 100644 bigquery/samples/tests/test_client_query_add_column.py delete mode 100644 bigquery/samples/tests/test_client_query_batch.py delete mode 100644 bigquery/samples/tests/test_client_query_destination_table.py delete mode 100644 bigquery/samples/tests/test_client_query_destination_table_cmek.py delete mode 100644 bigquery/samples/tests/test_client_query_destination_table_legacy.py delete mode 100644 bigquery/samples/tests/test_client_query_dry_run.py delete mode 100644 bigquery/samples/tests/test_client_query_legacy_sql.py delete mode 100644 bigquery/samples/tests/test_client_query_relax_column.py delete mode 100644 bigquery/samples/tests/test_client_query_w_array_params.py delete mode 100644 bigquery/samples/tests/test_client_query_w_named_params.py delete mode 100644 bigquery/samples/tests/test_client_query_w_positional_params.py delete mode 100644 bigquery/samples/tests/test_client_query_w_struct_params.py delete mode 100644 bigquery/samples/tests/test_client_query_w_timestamp_params.py delete mode 100644 bigquery/samples/tests/test_copy_table.py delete mode 100644 bigquery/samples/tests/test_copy_table_cmek.py delete mode 100644 bigquery/samples/tests/test_copy_table_multiple_source.py delete mode 100644 bigquery/samples/tests/test_create_dataset.py delete mode 100644 bigquery/samples/tests/test_create_job.py delete mode 100644 bigquery/samples/tests/test_create_table.py delete mode 100644 bigquery/samples/tests/test_create_table_range_partitioned.py delete mode 100644 bigquery/samples/tests/test_dataset_exists.py delete mode 100644 bigquery/samples/tests/test_dataset_label_samples.py delete mode 100644 bigquery/samples/tests/test_delete_dataset.py delete mode 100644 bigquery/samples/tests/test_delete_table.py delete mode 100644 bigquery/samples/tests/test_download_public_data.py delete mode 100644 bigquery/samples/tests/test_download_public_data_sandbox.py delete mode 100644 bigquery/samples/tests/test_get_dataset.py delete mode 100644 bigquery/samples/tests/test_get_table.py delete mode 100644 bigquery/samples/tests/test_list_datasets.py delete mode 100644 bigquery/samples/tests/test_list_datasets_by_label.py delete mode 100644 bigquery/samples/tests/test_list_tables.py delete mode 100644 bigquery/samples/tests/test_load_table_dataframe.py delete mode 100644 bigquery/samples/tests/test_load_table_file.py delete mode 100644 bigquery/samples/tests/test_load_table_uri_avro.py delete mode 100644 bigquery/samples/tests/test_load_table_uri_cmek.py delete mode 100644 bigquery/samples/tests/test_load_table_uri_csv.py delete mode 100644 bigquery/samples/tests/test_load_table_uri_json.py delete mode 100644 bigquery/samples/tests/test_load_table_uri_orc.py delete mode 100644 bigquery/samples/tests/test_load_table_uri_parquet.py delete mode 100644 bigquery/samples/tests/test_model_samples.py delete mode 100644 bigquery/samples/tests/test_query_external_gcs_temporary_table.py delete mode 100644 bigquery/samples/tests/test_query_external_sheets_permanent_table.py delete mode 100644 bigquery/samples/tests/test_query_external_sheets_temporary_table.py delete mode 100644 bigquery/samples/tests/test_query_no_cache.py delete mode 100644 bigquery/samples/tests/test_query_pagination.py delete mode 100644 bigquery/samples/tests/test_query_script.py delete mode 100644 bigquery/samples/tests/test_query_to_arrow.py delete mode 100644 bigquery/samples/tests/test_routine_samples.py delete mode 100644 bigquery/samples/tests/test_table_exists.py delete mode 100644 bigquery/samples/tests/test_table_insert_rows.py delete mode 100644 bigquery/samples/tests/test_table_insert_rows_explicit_none_insert_ids.py delete mode 100644 bigquery/samples/tests/test_undelete_table.py delete mode 100644 bigquery/samples/tests/test_update_dataset_access.py delete mode 100644 bigquery/samples/tests/test_update_dataset_default_partition_expiration.py delete mode 100644 bigquery/samples/tests/test_update_dataset_default_table_expiration.py delete mode 100644 bigquery/samples/tests/test_update_dataset_description.py delete mode 100644 bigquery/samples/tests/test_update_table_require_partition_filter.py delete mode 100644 bigquery/samples/undelete_table.py delete mode 100644 bigquery/samples/update_dataset_access.py delete mode 100644 bigquery/samples/update_dataset_default_partition_expiration.py delete mode 100644 bigquery/samples/update_dataset_default_table_expiration.py delete mode 100644 bigquery/samples/update_dataset_description.py delete mode 100644 bigquery/samples/update_model.py delete mode 100644 bigquery/samples/update_routine.py delete mode 100644 bigquery/samples/update_table_require_partition_filter.py delete mode 100644 bigquery/setup.cfg delete mode 100644 bigquery/setup.py delete mode 100644 bigquery/synth.metadata delete mode 100644 bigquery/synth.py delete mode 100644 bigquery/tests/__init__.py delete mode 100644 bigquery/tests/data/characters.json delete mode 100644 bigquery/tests/data/characters.jsonl delete mode 100644 bigquery/tests/data/colors.avro delete mode 100644 bigquery/tests/data/people.csv delete mode 100644 bigquery/tests/data/schema.json delete mode 100644 bigquery/tests/scrub_datasets.py delete mode 100644 bigquery/tests/system.py delete mode 100644 bigquery/tests/unit/__init__.py delete mode 100644 bigquery/tests/unit/enums/__init__.py delete mode 100644 bigquery/tests/unit/enums/test_standard_sql_data_types.py delete mode 100644 bigquery/tests/unit/helpers.py delete mode 100644 bigquery/tests/unit/model/__init__.py delete mode 100644 bigquery/tests/unit/model/test_model.py delete mode 100644 bigquery/tests/unit/model/test_model_reference.py delete mode 100644 bigquery/tests/unit/routine/__init__.py delete mode 100644 bigquery/tests/unit/routine/test_routine.py delete mode 100644 bigquery/tests/unit/routine/test_routine_argument.py delete mode 100644 bigquery/tests/unit/routine/test_routine_reference.py delete mode 100644 bigquery/tests/unit/test__helpers.py delete mode 100644 bigquery/tests/unit/test__http.py delete mode 100644 bigquery/tests/unit/test__pandas_helpers.py delete mode 100644 bigquery/tests/unit/test_client.py delete mode 100644 bigquery/tests/unit/test_dataset.py delete mode 100644 bigquery/tests/unit/test_dbapi__helpers.py delete mode 100644 bigquery/tests/unit/test_dbapi_connection.py delete mode 100644 bigquery/tests/unit/test_dbapi_cursor.py delete mode 100644 bigquery/tests/unit/test_dbapi_types.py delete mode 100644 bigquery/tests/unit/test_encryption_configuration.py delete mode 100644 bigquery/tests/unit/test_external_config.py delete mode 100644 bigquery/tests/unit/test_job.py delete mode 100644 bigquery/tests/unit/test_magics.py delete mode 100644 bigquery/tests/unit/test_query.py delete mode 100644 bigquery/tests/unit/test_retry.py delete mode 100644 bigquery/tests/unit/test_schema.py delete mode 100644 bigquery/tests/unit/test_signature_compatibility.py delete mode 100644 bigquery/tests/unit/test_table.py diff --git a/.kokoro/continuous/bigquery.cfg b/.kokoro/continuous/bigquery.cfg deleted file mode 100644 index 69eabb20803c..000000000000 --- a/.kokoro/continuous/bigquery.cfg +++ /dev/null @@ -1,7 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Tell the trampoline which build file to use. -env_vars: { - key: "PACKAGE" - value: "bigquery" -} diff --git a/.kokoro/docs/bigquery.cfg b/.kokoro/docs/bigquery.cfg deleted file mode 100644 index 69eabb20803c..000000000000 --- a/.kokoro/docs/bigquery.cfg +++ /dev/null @@ -1,7 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Tell the trampoline which build file to use. -env_vars: { - key: "PACKAGE" - value: "bigquery" -} diff --git a/.kokoro/presubmit/bigquery.cfg b/.kokoro/presubmit/bigquery.cfg deleted file mode 100644 index 69eabb20803c..000000000000 --- a/.kokoro/presubmit/bigquery.cfg +++ /dev/null @@ -1,7 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Tell the trampoline which build file to use. -env_vars: { - key: "PACKAGE" - value: "bigquery" -} diff --git a/.kokoro/release/bigquery.cfg b/.kokoro/release/bigquery.cfg deleted file mode 100644 index 69eabb20803c..000000000000 --- a/.kokoro/release/bigquery.cfg +++ /dev/null @@ -1,7 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Tell the trampoline which build file to use. -env_vars: { - key: "PACKAGE" - value: "bigquery" -} diff --git a/README.rst b/README.rst index 506863cdd4c3..0c25bb43786b 100644 --- a/README.rst +++ b/README.rst @@ -43,7 +43,7 @@ The following client libraries have **GA** support: - `Stackdriver Logging`_ (`Logging README`_, `Logging Documentation`_) .. _Google BigQuery: https://pypi.org/project/google-cloud-bigquery/ -.. _BigQuery README: https://github.com/googleapis/google-cloud-python/tree/master/bigquery +.. _BigQuery README: https://github.com/googleapis/python-bigquery#python-client-for-google-bigquery .. _BigQuery Documentation: https://googleapis.dev/python/bigquery/latest .. _Google Cloud Bigtable: https://pypi.org/project/google-cloud-bigtable/ diff --git a/bigquery/.coveragerc b/bigquery/.coveragerc deleted file mode 100644 index 098720f672e1..000000000000 --- a/bigquery/.coveragerc +++ /dev/null @@ -1,16 +0,0 @@ -[run] -branch = True - -[report] -fail_under = 100 -show_missing = True -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ - # Ignore abstract methods - raise NotImplementedError -omit = - */gapic/*.py - */proto/*.py diff --git a/bigquery/.flake8 b/bigquery/.flake8 deleted file mode 100644 index 61766fa84d02..000000000000 --- a/bigquery/.flake8 +++ /dev/null @@ -1,13 +0,0 @@ -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - *_pb2.py - - # Standard linting exemptions. - __pycache__, - .git, - *.pyc, - conf.py diff --git a/bigquery/.gitignore b/bigquery/.gitignore deleted file mode 100644 index 9e3a5f25770c..000000000000 --- a/bigquery/.gitignore +++ /dev/null @@ -1 +0,0 @@ -docs/_build \ No newline at end of file diff --git a/bigquery/.repo-metadata.json b/bigquery/.repo-metadata.json deleted file mode 100644 index 5b4734b8e389..000000000000 --- a/bigquery/.repo-metadata.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "name": "bigquery", - "name_pretty": "Google Cloud BigQuery", - "product_documentation": "https://cloud.google.com/bigquery", - "client_documentation": "https://googleapis.dev/python/bigquery/latest", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559654", - "release_level": "ga", - "language": "python", - "repo": "googleapis/google-cloud-python", - "distribution_name": "google-cloud-bigquery", - "api_id": "bigquery.googleapis.com", - "requires_billing": false -} \ No newline at end of file diff --git a/bigquery/CHANGELOG.md b/bigquery/CHANGELOG.md deleted file mode 100644 index 0da745204cec..000000000000 --- a/bigquery/CHANGELOG.md +++ /dev/null @@ -1,931 +0,0 @@ -# Changelog - -[PyPI History][1] - -[1]: https://pypi.org/project/google-cloud-bigquery/#history - -## 1.24.0 - -02-03-2020 01:38 PST - -### Implementation Changes - -- Fix inserting missing repeated fields. ([#10196](https://github.com/googleapis/google-cloud-python/pull/10196)) -- Deprecate `client.dataset()` in favor of `DatasetReference`. ([#7753](https://github.com/googleapis/google-cloud-python/pull/7753)) -- Use faster `to_arrow` + `to_pandas` in `to_dataframe()` when `pyarrow` is available. ([#10027](https://github.com/googleapis/google-cloud-python/pull/10027)) -- Write pandas `datetime[ns]` columns to BigQuery TIMESTAMP columns. ([#10028](https://github.com/googleapis/google-cloud-python/pull/10028)) - -### New Features - -- Check `rows` argument type in `insert_rows()`. ([#10174](https://github.com/googleapis/google-cloud-python/pull/10174)) -- Check `json_rows` arg type in `insert_rows_json()`. ([#10162](https://github.com/googleapis/google-cloud-python/pull/10162)) -- Make `RowIterator.to_dataframe_iterable()` method public. ([#10017](https://github.com/googleapis/google-cloud-python/pull/10017)) -- Add retry parameter to public methods where missing. ([#10026](https://github.com/googleapis/google-cloud-python/pull/10026)) -- Add timeout parameter to Client and Job public methods. ([#10002](https://github.com/googleapis/google-cloud-python/pull/10002)) -- Add timeout parameter to `QueryJob.done()` method. ([#9875](https://github.com/googleapis/google-cloud-python/pull/9875)) -- Add `create_bqstorage_client` parameter to `to_dataframe()` and `to_arrow()` methods. ([#9573](https://github.com/googleapis/google-cloud-python/pull/9573)) - -### Dependencies - -- Fix minimum versions of `google-cloud-core` and `google-resumable-media` dependencies. ([#10016](https://github.com/googleapis/google-cloud-python/pull/10016)) - -### Documentation - -- Fix a comment typo in `job.py`. ([#10209](https://github.com/googleapis/google-cloud-python/pull/10209)) -- Update code samples of load table file and load table URI. ([#10175](https://github.com/googleapis/google-cloud-python/pull/10175)) -- Uncomment `Client` constructor and imports in samples. ([#10058](https://github.com/googleapis/google-cloud-python/pull/10058)) -- Remove unused query code sample. ([#10024](https://github.com/googleapis/google-cloud-python/pull/10024)) -- Update code samples to use strings for table and dataset IDs. ([#9974](https://github.com/googleapis/google-cloud-python/pull/9974)) - -### Internal / Testing Changes - -- Bump copyright year to 2020, tweak docstring formatting (via synth). [#10225](https://github.com/googleapis/google-cloud-python/pull/10225) -- Add tests for concatenating categorical columns. ([#10180](https://github.com/googleapis/google-cloud-python/pull/10180)) -- Adjust test assertions to the new default timeout. ([#10222](https://github.com/googleapis/google-cloud-python/pull/10222)) -- Use Python 3.6 for the nox blacken session (via synth). ([#10012](https://github.com/googleapis/google-cloud-python/pull/10012)) - -## 1.23.1 - -12-16-2019 09:39 PST - - -### Implementation Changes - -- Add `iamMember` entity type to allowed access classes. ([#9973](https://github.com/googleapis/google-cloud-python/pull/9973)) -- Fix typo in import error message (pandas -> pyarrow). ([#9955](https://github.com/googleapis/google-cloud-python/pull/9955)) - -### Dependencies - -- Add `six` as an explicit dependency. ([#9979](https://github.com/googleapis/google-cloud-python/pull/9979)) - -### Documentation - -- Add sample to read from query destination table. ([#9964](https://github.com/googleapis/google-cloud-python/pull/9964)) - -## 1.23.0 - -12-11-2019 13:31 PST - -### New Features - -- Add `close()` method to client for releasing open sockets. ([#9894](https://github.com/googleapis/google-cloud-python/pull/9894)) -- Add support of `use_avro_logical_types` for extract jobs. ([#9642](https://github.com/googleapis/google-cloud-python/pull/9642)) -- Add support for hive partitioning options configuration. ([#9626](https://github.com/googleapis/google-cloud-python/pull/9626)) -- Add description for routine entities. ([#9785](https://github.com/googleapis/google-cloud-python/pull/9785)) - -### Documentation - -- Update code samples to use strings for table and dataset IDs. ([#9495](https://github.com/googleapis/google-cloud-python/pull/9495)) - -### Internal / Testing Changes - -- Run unit tests with Python 3.8. ([#9880](https://github.com/googleapis/google-cloud-python/pull/9880)) -- Import `Mapping` from `collections.abc` not from `collections`. ([#9826](https://github.com/googleapis/google-cloud-python/pull/9826)) - -## 1.22.0 - -11-13-2019 12:23 PST - - -### Implementation Changes -- Preserve job config passed to Client methods. ([#9735](https://github.com/googleapis/google-cloud-python/pull/9735)) -- Use pyarrow fallback for improved schema detection. ([#9321](https://github.com/googleapis/google-cloud-python/pull/9321)) -- Add TypeError if wrong `job_config type` is passed to client job methods. ([#9506](https://github.com/googleapis/google-cloud-python/pull/9506)) -- Fix arrow deprecation warning. ([#9504](https://github.com/googleapis/google-cloud-python/pull/9504)) - -### New Features -- Add `--destination_table` parameter to IPython magic. ([#9599](https://github.com/googleapis/google-cloud-python/pull/9599)) -- Allow passing schema as a sequence of dicts. ([#9550](https://github.com/googleapis/google-cloud-python/pull/9550)) -- Implement defaultEncryptionConfiguration on datasets. ([#9489](https://github.com/googleapis/google-cloud-python/pull/9489)) -- Add range partitioning to tables, load jobs, and query jobs. ([#9477](https://github.com/googleapis/google-cloud-python/pull/9477)) - -### Dependencies -- Pin `google-resumable-media` to includ 0.5.x. ([#9572](https://github.com/googleapis/google-cloud-python/pull/9572)) - -### Documentation -- Fix link anchors in external config docstrings. ([#9627](https://github.com/googleapis/google-cloud-python/pull/9627)) -- Add python 2 sunset banner to documentation. ([#9036](https://github.com/googleapis/google-cloud-python/pull/9036)) -- Add table create sample using integer range partitioning. ([#9478](https://github.com/googleapis/google-cloud-python/pull/9478)) -- Document how to achieve higher write limit and add tests. ([#9574](https://github.com/googleapis/google-cloud-python/pull/9574)) -- Add code sample for scripting. ([#9537](https://github.com/googleapis/google-cloud-python/pull/9537)) -- Rewrite docs in Google style, part 2. ([#9481](https://github.com/googleapis/google-cloud-python/pull/9481)) -- Use multi-regional key path for CMEK in snippets. ([#9523](https://github.com/googleapis/google-cloud-python/pull/9523)) - -### Internal / Testing Changes -- Fix undelete table system test to use milliseconds in snapshot decorator. ([#9649](https://github.com/googleapis/google-cloud-python/pull/9649)) -- Format code with latest version of black. ([#9556](https://github.com/googleapis/google-cloud-python/pull/9556)) -- Remove duplicate test dependencies. ([#9503](https://github.com/googleapis/google-cloud-python/pull/9503)) - -## 1.21.0 - -10-16-2019 10:33 PDT - - -### New Features - -- add ability to pass in a table ID instead of a query to the `%%bigquery` magic ([#9170](https://github.com/googleapis/google-cloud-python/pull/9170)) -- add support for custom `QueryJobConfig` in `BigQuery.cursor.execute` method ([#9278](https://github.com/googleapis/google-cloud-python/pull/9278)) -- store `QueryJob` to destination var on error in `%%bigquery` magic ([#9245](https://github.com/googleapis/google-cloud-python/pull/9245)) -- add script statistics to job resource ([#9428](https://github.com/googleapis/google-cloud-python/pull/9428)) -- add support for sheets ranges ([#9416](https://github.com/googleapis/google-cloud-python/pull/9416)) -- add support for listing jobs by parent job ([#9225](https://github.com/googleapis/google-cloud-python/pull/9225)) -- expose customer managed encryption key for ML models ([#9302](https://github.com/googleapis/google-cloud-python/pull/9302)) -- add `Dataset.default_partition_expiration_ms` and `Table.require_partition_filter` properties ([#9464](https://github.com/googleapis/google-cloud-python/pull/9464)) - -### Dependencies - -- restrict version range of `google-resumable-media` ([#9243](https://github.com/googleapis/google-cloud-python/pull/9243)) - -### Documentation - -- document how to load data as JSON string ([#9231](https://github.com/googleapis/google-cloud-python/pull/9231)) -- standardize comments and formatting in existing code samples ([#9212](https://github.com/googleapis/google-cloud-python/pull/9212)) -- rewrite docstrings in Google style ([#9326](https://github.com/googleapis/google-cloud-python/pull/9326)) -- fix incorrect links to REST API in reference docs ([#9436](https://github.com/googleapis/google-cloud-python/pull/9436)) - -### Internal / Testing Changes - -- add code samples to lint check ([#9277](https://github.com/googleapis/google-cloud-python/pull/9277)) -- update code samples to use strings for table and dataset IDs ([#9136](https://github.com/googleapis/google-cloud-python/pull/9136)) -- simplify scripting system test to reduce flakiness ([#9458](https://github.com/googleapis/google-cloud-python/pull/9458)) - -## 1.20.0 - -09-13-2019 11:22 PDT - - -### Implementation Changes -- Change default endpoint to bigquery.googleapis.com ([#9213](https://github.com/googleapis/google-cloud-python/pull/9213)) -- Change the default value of Cursor instances' `arraysize` attribute to None ([#9199](https://github.com/googleapis/google-cloud-python/pull/9199)) -- Deprecate automatic schema conversion. ([#9176](https://github.com/googleapis/google-cloud-python/pull/9176)) -- Fix `list_rows()` max results with BQ storage client ([#9178](https://github.com/googleapis/google-cloud-python/pull/9178)) - -### New Features -- Add `Model.encryption_config`. (via synth) ([#9214](https://github.com/googleapis/google-cloud-python/pull/9214)) -- Add `Client.insert_rows_from_dataframe()` method ([#9162](https://github.com/googleapis/google-cloud-python/pull/9162)) -- Add support for array parameters to `Cursor.execute()`. ([#9189](https://github.com/googleapis/google-cloud-python/pull/9189)) -- Add support for project IDs with org prefix to `Table.from_string()` factory. ([#9161](https://github.com/googleapis/google-cloud-python/pull/9161)) -- Add `--max_results` option to Jupyter magics ([#9169](https://github.com/googleapis/google-cloud-python/pull/9169)) -- Autofetch table schema on load if not provided. ([#9108](https://github.com/googleapis/google-cloud-python/pull/9108)) -- Add `max_results` parameter to `QueryJob.result()`. ([#9167](https://github.com/googleapis/google-cloud-python/pull/9167)) - -### Documentation -- Fix doc link. ([#9200](https://github.com/googleapis/google-cloud-python/pull/9200)) - -### Internal / Testing Changes -- Revert "Disable failing snippets test ([#9156](https://github.com/googleapis/google-cloud-python/pull/9156))." ([#9220](https://github.com/googleapis/google-cloud-python/pull/9220)) - -## 1.19.0 - -09-03-2019 14:33 PDT - -### Implementation Changes - -- Raise when unexpected fields are present in the `LoadJobConfig.schema` when calling `load_table_from_dataframe`. ([#9096](https://github.com/googleapis/google-cloud-python/pull/9096)) -- Determine the schema in `load_table_from_dataframe` based on dtypes. ([#9049](https://github.com/googleapis/google-cloud-python/pull/9049)) -- Raise helpful error when loading table from dataframe with `STRUCT` columns. ([#9053](https://github.com/googleapis/google-cloud-python/pull/9053)) -- Fix schema recognition of struct field types. ([#9001](https://github.com/googleapis/google-cloud-python/pull/9001)) -- Fix deserializing `None` in `QueryJob` for queries with parameters. ([#9029](https://github.com/googleapis/google-cloud-python/pull/9029)) - -### New Features - -- Include indexes in table written by `load_table_from_dataframe`, only if - fields corresponding to indexes are present in `LoadJobConfig.schema`. - ([#9084](https://github.com/googleapis/google-cloud-python/pull/9084)) -- Add `client_options` to constructor. ([#8999](https://github.com/googleapis/google-cloud-python/pull/8999)) -- Add `--dry_run` option to `%%bigquery` magic. ([#9067](https://github.com/googleapis/google-cloud-python/pull/9067)) -- Add `load_table_from_json()` method to create a table from a list of dictionaries. ([#9076](https://github.com/googleapis/google-cloud-python/pull/9076)) -- Allow subset of schema to be passed into `load_table_from_dataframe`. ([#9064](https://github.com/googleapis/google-cloud-python/pull/9064)) -- Add support for unsetting `LoadJobConfig.schema`. ([#9077](https://github.com/googleapis/google-cloud-python/pull/9077)) -- Add support to `Dataset` for project IDs containing an org prefix. ([#8877](https://github.com/googleapis/google-cloud-python/pull/8877)) -- Add enum with SQL type names allowed to be used in `SchemaField`. ([#9040](https://github.com/googleapis/google-cloud-python/pull/9040)) - -### Documentation - -- Fix the reference URL for `Client.create_dataset()`. ([#9149](https://github.com/googleapis/google-cloud-python/pull/9149)) -- Update code samples to use strings for table names instead of `client.dataset()`. ([#9032](https://github.com/googleapis/google-cloud-python/pull/9032)) -- Remove compatability badges from READMEs. ([#9035](https://github.com/googleapis/google-cloud-python/pull/9035)) -- Fix Pandas DataFrame load example under Python 2.7. ([#9022](https://github.com/googleapis/google-cloud-python/pull/9022)) - -### Internal / Testing Changes - -- Disable failing snippets test for copying CMEK-protected tables. ([#9156](https://github.com/googleapis/google-cloud-python/pull/9156)) -- Fix BigQuery client unit test assertions ([#9112](https://github.com/googleapis/google-cloud-python/pull/9112)) -- Replace avro with arrow schemas in `test_table.py` ([#9056](https://github.com/googleapis/google-cloud-python/pull/9056)) - -## 1.18.0 - -08-08-2019 12:28 PDT - -### New Features - -- Add `bqstorage_client` param to `QueryJob.to_arrow()` ([#8693](https://github.com/googleapis/google-cloud-python/pull/8693)) -- Include SQL query and job ID in exception messages. ([#8748](https://github.com/googleapis/google-cloud-python/pull/8748)) -- Allow using TableListItem to construct a Table object. ([#8738](https://github.com/googleapis/google-cloud-python/pull/8738)) -- Add StandardSqlDataTypes enum to BigQuery ([#8782](https://github.com/googleapis/google-cloud-python/pull/8782)) -- Add `to_standard_sql()` method to SchemaField ([#8880](https://github.com/googleapis/google-cloud-python/pull/8880)) -- Add debug logging statements to track when BQ Storage API is used. ([#8838](https://github.com/googleapis/google-cloud-python/pull/8838)) -- Hide error traceback in BigQuery cell magic ([#8808](https://github.com/googleapis/google-cloud-python/pull/8808)) -- Allow choice of compression when loading from dataframe ([#8938](https://github.com/googleapis/google-cloud-python/pull/8938)) -- Additional clustering metrics for BQML K-means models (via synth). ([#8945](https://github.com/googleapis/google-cloud-python/pull/8945)) - -### Documentation - -- Add compatibility check badges to READMEs. ([#8288](https://github.com/googleapis/google-cloud-python/pull/8288)) -- Link to googleapis.dev documentation in READMEs. ([#8705](https://github.com/googleapis/google-cloud-python/pull/8705)) -- Remove redundant service account key code sample. ([#8891](https://github.com/googleapis/google-cloud-python/pull/8891)) - -### Internal / Testing Changes - -- Fix several pytest "skip if" markers ([#8694](https://github.com/googleapis/google-cloud-python/pull/8694)) -- Update tests to support conversion of NaN as NULL in pyarrow `0.14.*`. ([#8785](https://github.com/googleapis/google-cloud-python/pull/8785)) -- Mock external calls in one of BigQuery unit tests ([#8727](https://github.com/googleapis/google-cloud-python/pull/8727)) -- Set IPython user agent when running queries with IPython cell magic ([#8713](https://github.com/googleapis/google-cloud-python/pull/8713)) -- Use configurable bucket name for GCS samples data in systems tests. ([#8783](https://github.com/googleapis/google-cloud-python/pull/8783)) -- Move `maybe_fail_import()` to top level test utils ([#8840](https://github.com/googleapis/google-cloud-python/pull/8840)) -- Set BQ Storage client user-agent when in Jupyter cell ([#8734](https://github.com/googleapis/google-cloud-python/pull/8734)) - -## 1.17.0 - -07-12-2019 07:56 PDT - -### New Features - -- Support faster Arrow data format in `to_dataframe` when using BigQuery Storage API. ([#8551](https://github.com/googleapis/google-cloud-python/pull/8551)) -- Add `to_arrow` to get a `pyarrow.Table` from query results. ([#8609](https://github.com/googleapis/google-cloud-python/pull/8609)) - -### Dependencies - -- Exclude bad 0.14.0 `pyarrow` release. ([#8551](https://github.com/googleapis/google-cloud-python/pull/8551)) - -## 1.16.0 - -07-01-2019 10:22 PDT - -### New Features - -- Add Routines API. ([#8491](https://github.com/googleapis/google-cloud-python/pull/8491)) -- Add more stats to Models API, such as `optimization_strategy` (via synth). ([#8344](https://github.com/googleapis/google-cloud-python/pull/8344)) - -### Documentation - -- Add docs job to publish to googleapis.dev. ([#8464](https://github.com/googleapis/google-cloud-python/pull/8464)) -- Add sample demonstrating how to create a job. ([#8422](https://github.com/googleapis/google-cloud-python/pull/8422)) -- Use autodetected location in code samples. ([#8340](https://github.com/googleapis/google-cloud-python/pull/8340), [#8341](https://github.com/googleapis/google-cloud-python/pull/8341)) - -### Internal / Testing Changes - -- Refactor `to_dataframe` to deterministicly update progress bar. ([#8303](https://github.com/googleapis/google-cloud-python/pull/8303)) - -## 1.15.0 - -06-14-2019 10:10 PDT - -### Implementation Changes - -- Fix bug where `load_table_from_dataframe` could not append to REQUIRED fields. ([#8230](https://github.com/googleapis/google-cloud-python/pull/8230)) - -### New Features - -- Add `page_size` parameter to `QueryJob.result`. ([#8206](https://github.com/googleapis/google-cloud-python/pull/8206)) - -## 1.14.0 - -06-04-2019 11:11 PDT - - -### New Features -- Add `maximum_bytes_billed` argument and `context.default_query_job_config` property to magics. ([#8179](https://github.com/googleapis/google-cloud-python/pull/8179)) - -### Dependencies -- Don't pin `google-api-core` in libs using `google-cloud-core`. ([#8213](https://github.com/googleapis/google-cloud-python/pull/8213)) - -## 1.13.0 - -05-31-2019 10:22 PDT - -### New Features - -- Use `job_config.schema` for data type conversion if specified in `load_table_from_dataframe`. ([#8105](https://github.com/googleapis/google-cloud-python/pull/8105)) - -### Internal / Testing Changes - -- Adds private `_connection` object to magics context. ([#8192](https://github.com/googleapis/google-cloud-python/pull/8192)) -- Fix coverage in 'types.py' (via synth). ([#8146](https://github.com/googleapis/google-cloud-python/pull/8146)) - -## 1.12.1 - -05-21-2019 11:16 PDT - -### Implementation Changes - -- Don't raise error when encountering unknown fields in Models API. ([#8083](https://github.com/googleapis/google-cloud-python/pull/8083)) - -### Documentation - -- Use alabaster theme everwhere. ([#8021](https://github.com/googleapis/google-cloud-python/pull/8021)) - -### Internal / Testing Changes - -- Add empty lines (via synth). ([#8049](https://github.com/googleapis/google-cloud-python/pull/8049)) - -## 1.12.0 - -05-16-2019 11:25 PDT - -### Implementation Changes -- Remove duplicates from index on pandas DataFrames returned by `to_dataframe()`. ([#7953](https://github.com/googleapis/google-cloud-python/pull/7953)) -- Prevent error when time partitioning is populated with empty dict ([#7904](https://github.com/googleapis/google-cloud-python/pull/7904)) -- Preserve order in `to_dataframe` with BQ Storage from queries containing `ORDER BY` ([#7793](https://github.com/googleapis/google-cloud-python/pull/7793)) -- Respect `progress_bar_type` in `to_dataframe` when used with BQ Storage API ([#7697](https://github.com/googleapis/google-cloud-python/pull/7697)) -- Refactor QueryJob.query to read from resource dictionary ([#7763](https://github.com/googleapis/google-cloud-python/pull/7763)) -- Close the `to_dataframe` progress bar when finished. ([#7757](https://github.com/googleapis/google-cloud-python/pull/7757)) -- Ensure that `KeyboardInterrupt` during `to_dataframe`no longer hangs. ([#7698](https://github.com/googleapis/google-cloud-python/pull/7698)) -- Raise ValueError when BQ Storage is required but missing ([#7726](https://github.com/googleapis/google-cloud-python/pull/7726)) -- Make `total_rows` available on RowIterator before iteration ([#7622](https://github.com/googleapis/google-cloud-python/pull/7622)) -- Avoid masking auth errors in `to_dataframe` with BQ Storage API ([#7674](https://github.com/googleapis/google-cloud-python/pull/7674)) - -### New Features -- Add support for passing `client_info`. ([#7849](https://github.com/googleapis/google-cloud-python/pull/7849) and ([#7806](https://github.com/googleapis/google-cloud-python/pull/7806)) -- Phase 1 for storing schemas for later use. ([#7761](https://github.com/googleapis/google-cloud-python/pull/7761)) -- Add `destination` and related properties to LoadJob. ([#7710](https://github.com/googleapis/google-cloud-python/pull/7710)) -- Add `clustering_fields` property to TableListItem ([#7692](https://github.com/googleapis/google-cloud-python/pull/7692)) -- Add `created` and `expires` properties to TableListItem ([#7684](https://github.com/googleapis/google-cloud-python/pull/7684)) - -### Dependencies -- Pin `google-cloud-core >= 1.0.0, < 2.0dev`. ([#7993](https://github.com/googleapis/google-cloud-python/pull/7993)) -- Add `[all]` extras to install all extra dependencies ([#7610](https://github.com/googleapis/google-cloud-python/pull/7610)) - -### Documentation -- Move table and dataset snippets to samples/ directory ([#7683](https://github.com/googleapis/google-cloud-python/pull/7683)) - -### Internal / Testing Changes -- Blacken unit tests. ([#7960](https://github.com/googleapis/google-cloud-python/pull/7960)) -- Cleanup client tests with method to create minimal table resource ([#7802](https://github.com/googleapis/google-cloud-python/pull/7802)) - -## 1.11.2 - -04-05-2019 08:16 PDT - -### Dependencies - -- Add dependency on protobuf. ([#7668](https://github.com/googleapis/google-cloud-python/pull/7668)) - -## 1.11.1 - -04-04-2019 09:19 PDT - -### Internal / Testing Changes - -- Increment version number in `setup.py`. - -## 1.11.0 - -04-03-2019 19:33 PDT - -### Implementation Changes - -- Remove classifier for Python 3.4 for end-of-life. ([#7535](https://github.com/googleapis/google-cloud-python/pull/7535)) - -### New Features - -- Enable fastparquet support by using temporary file in `load_table_from_dataframe` ([#7545](https://github.com/googleapis/google-cloud-python/pull/7545)) -- Allow string for copy sources, query destination, and default dataset ([#7560](https://github.com/googleapis/google-cloud-python/pull/7560)) -- Add `progress_bar_type` argument to `to_dataframe` to use `tqdm` to display a progress bar ([#7552](https://github.com/googleapis/google-cloud-python/pull/7552)) -- Call `get_table` in `list_rows` if the schema is not available ([#7621](https://github.com/googleapis/google-cloud-python/pull/7621)) -- Fallback to BQ API when there are problems reading from BQ Storage. ([#7633](https://github.com/googleapis/google-cloud-python/pull/7633)) -- Add methods for Models API ([#7562](https://github.com/googleapis/google-cloud-python/pull/7562)) -- Add option to use BigQuery Storage API from IPython magics ([#7640](https://github.com/googleapis/google-cloud-python/pull/7640)) - -### Documentation - -- Remove typo in `Table.from_api_repr` docstring. ([#7509](https://github.com/googleapis/google-cloud-python/pull/7509)) -- Add docs session to nox configuration for BigQuery ([#7541](https://github.com/googleapis/google-cloud-python/pull/7541)) - -### Internal / Testing Changes - -- Refactor `table()` methods into shared implementation. ([#7516](https://github.com/googleapis/google-cloud-python/pull/7516)) -- Blacken noxfile and setup file in nox session ([#7619](https://github.com/googleapis/google-cloud-python/pull/7619)) -- Actually use the `progress_bar_type` argument in `QueryJob.to_dataframe()`. ([#7616](https://github.com/googleapis/google-cloud-python/pull/7616)) - -## 1.10.0 - -03-06-2019 15:20 PST - -### Implementation Changes - -- Harden 'ArrayQueryParameter.from_api_repr' against missing 'parameterValue'. ([#7311](https://github.com/googleapis/google-cloud-python/pull/7311)) -- Allow nested records w/ null values. ([#7297](https://github.com/googleapis/google-cloud-python/pull/7297)) - -### New Features - -- Add options to ignore errors when creating/deleting datasets/tables. ([#7491](https://github.com/googleapis/google-cloud-python/pull/7491)) -- Accept a string in Table and Dataset constructors. ([#7483](https://github.com/googleapis/google-cloud-python/pull/7483)) - -### Documentation - -- Update docstring of RowIterator's to_dataframe ([#7306](https://github.com/googleapis/google-cloud-python/pull/7306)) -- Updated client library documentation URLs. ([#7307](https://github.com/googleapis/google-cloud-python/pull/7307)) - -### Internal / Testing Changes - -- Fix lint. ([#7383](https://github.com/googleapis/google-cloud-python/pull/7383)) - -## 1.9.0 - -02-04-2019 13:28 PST - -### New Features - -- Add arguments to select `dtypes` and use BQ Storage API to `QueryJob.to_dataframe()`. ([#7241](https://github.com/googleapis/google-cloud-python/pull/7241)) - -### Documentation - -- Add sample for fetching `total_rows` from query results. ([#7217](https://github.com/googleapis/google-cloud-python/pull/7217)) - -## 1.8.1 - -12-17-2018 17:53 PST - - -### Documentation -- Document Python 2 deprecation ([#6910](https://github.com/googleapis/google-cloud-python/pull/6910)) -- Normalize docs for 'page_size' / 'max_results' / 'page_token' ([#6842](https://github.com/googleapis/google-cloud-python/pull/6842)) - -## 1.8.0 - -12-10-2018 12:39 PST - - -### Implementation Changes -- Add option to use BQ Storage API with `to_dataframe` ([#6854](https://github.com/googleapis/google-cloud-python/pull/6854)) -- Fix exception type in comment ([#6847](https://github.com/googleapis/google-cloud-python/pull/6847)) -- Add `to_bqstorage` to convert from Table[Reference] google-cloud-bigquery-storage reference ([#6840](https://github.com/googleapis/google-cloud-python/pull/6840)) -- Import `iam.policy` from `google.api_core`. ([#6741](https://github.com/googleapis/google-cloud-python/pull/6741)) -- Add avro logical type control for load jobs. ([#6827](https://github.com/googleapis/google-cloud-python/pull/6827)) -- Allow setting partition expiration to 'None'. ([#6823](https://github.com/googleapis/google-cloud-python/pull/6823)) -- Add `retry` argument to `_AsyncJob.result`. ([#6302](https://github.com/googleapis/google-cloud-python/pull/6302)) - -### Dependencies -- Update dependency to google-cloud-core ([#6835](https://github.com/googleapis/google-cloud-python/pull/6835)) - -### Documentation -- Add avro load samples ([#6832](https://github.com/googleapis/google-cloud-python/pull/6832)) - -### Internal / Testing Changes -- Blacken libraries ([#6794](https://github.com/googleapis/google-cloud-python/pull/6794)) -- Fix copy/paste typos in noxfile comments ([#6831](https://github.com/googleapis/google-cloud-python/pull/6831)) - -## 1.7.0 - -11-05-2018 16:41 PST - -### Implementation Changes - -- Add destination table properties to `LoadJobConfig`. ([#6202](https://github.com/googleapis/google-cloud-python/pull/6202)) -- Allow strings or references in `create_dataset` and `create_table` ([#6199](https://github.com/googleapis/google-cloud-python/pull/6199)) -- Fix swallowed error message ([#6168](https://github.com/googleapis/google-cloud-python/pull/6168)) - -### New Features - -- Add `--params option` to `%%bigquery` magic ([#6277](https://github.com/googleapis/google-cloud-python/pull/6277)) -- Expose `to_api_repr` method for jobs. ([#6176](https://github.com/googleapis/google-cloud-python/pull/6176)) -- Allow string in addition to DatasetReference / TableReference in Client methods. ([#6164](https://github.com/googleapis/google-cloud-python/pull/6164)) -- Add keyword arguments to job config constructors for setting properties ([#6397](https://github.com/googleapis/google-cloud-python/pull/6397)) - -### Documentation - -- Update README service links in quickstart guides. ([#6322](https://github.com/googleapis/google-cloud-python/pull/6322)) -- Move usage guides to their own docs. ([#6238](https://github.com/googleapis/google-cloud-python/pull/6238)) -- Normalize use of support level badges ([#6159](https://github.com/googleapis/google-cloud-python/pull/6159)) - -### Internal / Testing Changes - -- Deprecation cleanups ([#6304](https://github.com/googleapis/google-cloud-python/pull/6304)) -- Use `_get_sub_prop` helper so missing load stats don't raise. ([#6269](https://github.com/googleapis/google-cloud-python/pull/6269)) -- Use new Nox ([#6175](https://github.com/googleapis/google-cloud-python/pull/6175)) -- Harden snippets against transient GCS errors. ([#6184](https://github.com/googleapis/google-cloud-python/pull/6184)) - -## 1.6.0 - -### New Features -- Add support for `GEOGRAPHY` type ([#6147](https://github.com/googleapis/google-cloud-python/pull/6147)) -- Add default QueryJobConfig to Client ([#6088](https://github.com/googleapis/google-cloud-python/pull/6088)) - -### Documentation -- Remove unused "append" samples ([#6100](https://github.com/googleapis/google-cloud-python/pull/6100)) - -### Internal / Testing Changes -- Address dataset leaks, conflicts in systests ([#6099](https://github.com/googleapis/google-cloud-python/pull/6099)) -- Harden bucket teardown against `429 Too Many Requests`. ([#6101](https://github.com/googleapis/google-cloud-python/pull/6101)) - -## 1.5.1 - -### Implementation Changes - -- Retry '502 Bad Gateway' errors by default. (#5930) -- Avoid pulling entire result set into memory when constructing dataframe. (#5870) -- Add support for retrying unstructured 429 / 500 / 502 responses. (#6011) -- Populate the jobReference from the API response. (#6044) - -### Documentation - -- Prepare documentation for repo split (#5955) -- Fix leakage of bigquery/spanner sections into sidebar menu. (#5986) - -### Internal / Testing Changes - -- Test pandas support under Python 3.7. (#5857) -- Nox: use inplace installs (#5865) -- Update system test to use test data in bigquery-public-data. (#5965) - -## 1.5.0 - -### Implementation Changes - -- Make 'Table.location' read-only. (#5687) - -### New Features - -- Add 'clustering_fields' properties. (#5630) -- Add support for job labels (#5654) -- Add 'QueryJob.estimated_bytes_processed' property (#5655) -- Add support/tests for loading tables from 'gzip.GzipFile'. (#5711) -- Add 'ExternalSourceFormat' enum. (#5674) -- Add default location to client (#5678) - -### Documentation - -- Fix typo in CopyJob sources docstring (#5690) - -### Internal / Testing Changes - -- Add/refactor snippets for managing BigQuery jobs (#5631) -- Reenable systests for 'dataset.update'/'table.update'. (#5732) - -## 1.4.0 - -### Implementation Changes - -- Add 'internalError' to retryable error reasons. (#5599) -- Don't raise exception if viewing CREATE VIEW DDL results (#5602) - -### New Features - -- Add Orc source format support and samples (#5500) -- Move 'DEFAULT_RETRY' (w/ its predicate) to a new public 'retry' module. (#5552) -- Allow listing rows on an empty table. (#5584) - -### Documentation - -- Add load_table_from_dataframe() to usage docs and changelog and dedents snippets in usage page (#5501) -- Add samples for query external data sources (GCS & Sheets) (#5491) -- Add BigQuery authorized view samples (#5515) -- Update docs to show pyarrow as the only dependency of load_table_from_dataframe() (#5582) - -### Internal / Testing Changes - -- Add missing explict coverage for '_helpers' (#5550) -- Skip update_table and update_dataset tests until etag issue is resolved. (#5590) - -## 1.3.0 - -### New Features - -- NUMERIC type support (#5331) -- Add timeline and top-level slot-millis to query statistics. (#5312) -- Add additional statistics to query plan stages. (#5307) -- Add `client.load_table_from_dataframe()` (#5387) - -### Documentation - -- Use autosummary to split up API reference docs (#5340) -- Fix typo in Client docstrings (#5342) - -### Internal / Testing Changes - -- Prune systests identified as reduntant to snippets. (#5365) -- Modify system tests to use prerelease versions of grpcio (#5304) -- Improve system test performance (#5319) - -## 1.2.0 - -### Implementation Changes -- Switch `list_partitions` helper to a direct metatable read (#5273) -- Fix typo in `Encoding.ISO_8859_1` enum value (#5211) - -### New Features -- Add UnknownJob type for redacted jobs. (#5281) -- Add project parameter to `list_datasets` and `list_jobs` (#5217) -- Add from_string factory methods to Dataset and Table (#5255) -- Add column based time partitioning (#5267) - -### Documentation -- Standardize docstrings for constants (#5289) -- Fix docstring / impl of `ExtractJob.destination_uri_file_counts`. (#5245) - -### Internal / Testing Changes -- Add testing support for Python 3.7; remove testing support for Python 3.4. (#5295) - -## 1.1.0 - -### New Features -- Add `client.get_service_account_email` (#5203) - -### Documentation -- Update samples and standardize region tags (#5195) - -### Internal / Testing Changes -- Fix trove classifier to be Production/Stable -- Don't suppress 'dots' output on test (#5202) - -## 1.0.0 - -### Implementation Changes -- Remove deprecated Client methods (#5182) - -## 0.32.0 - -### :warning: Interface changes - -- Use `job.configuration` resource for XXXJobConfig classes (#5036) - -### Interface additions - -- Add `page_size` parameter for `list_rows` and use in DB-API for `arraysize` (#4931) -- Add IPython magics for running queries (#4983) - -### Documentation - -- Add job string constant parameters in init and snippets documentation (#4987) - -### Internal / Testing changes - -- Specify IPython version 5.5 when running Python 2.7 tests (#5145) -- Move all Dataset property conversion logic into properties (#5130) -- Remove unnecessary _Table class from test_job.py (#5126) -- Use explicit bytes to initialize 'BytesIO'. (#5116) -- Make SchemaField be able to include description via from_api_repr method (#5114) -- Remove _ApiResourceProperty class (#5107) -- Add dev version for 0.32.0 release (#5105) -- StringIO to BytesIO (#5101) -- Shorten snippets test name (#5091) -- Don't use `selected_fields` for listing query result rows (#5072) -- Add location property to job classes. (#5071) -- Use autospec for Connection in tests. (#5066) -- Add Parquet SourceFormat and samples (#5057) -- Remove test_load_table_from_uri_w_autodetect_schema_then_get_job because of duplicate test in snippets (#5004) -- Fix encoding variable and strings UTF-8 and ISO-8859-1 difference documentation (#4990) - -## 0.31.0 - -### Interface additions - -- Add support for `EncryptionConfiguration` (#4845) - -### Implementation changes - -- Allow listing/getting jobs even when there is an "invalid" job. (#4786) - -### Dependencies - -- The minimum version for `google-api-core` has been updated to version 1.0.0. This may cause some incompatibility with older google-cloud libraries, you will need to update those libraries if you have a dependency conflict. (#4944, #4946) - -### Documentation - -- Update format in `Table.full_table_id` and `TableListItem.full_table_id` docstrings. (#4906) - -### Testing and internal changes - -- Install local dependencies when running lint (#4936) -- Re-enable lint for tests, remove usage of pylint (#4921) -- Normalize all setup.py files (#4909) -- Remove unnecessary debug print from tests (#4907) -- Use constant strings for job properties in tests (#4833) - -## 0.30.0 - -This is the release candidate for v1.0.0. - -### Interface changes / additions - -- Add `delete_contents` to `delete_dataset`. (#4724) - -### Bugfixes - -- Add handling of missing properties in `SchemaField.from_api_repr()`. (#4754) -- Fix missing return value in `LoadJobConfig.from_api_repr`. (#4727) - -### Documentation - -- Minor documentation and typo fixes. (#4782, #4718, #4784, #4835, #4836) - -## 0.29.0 - -### Interface changes / additions - -- Add `to_dataframe()` method to row iterators. When Pandas is installed this - method returns a `DataFrame` containing the query's or table's rows. - ([#4354](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/4354)) -- Iterate over a `QueryJob` to wait for and get the query results. - ([#4350](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/4350)) -- Add `Table.reference` and `Dataset.reference` properties to get the - `TableReference` or `DatasetReference` corresponding to that `Table` or - `Dataset`, respectively. - ([#4405](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/4405)) -- Add `Row.keys()`, `Row.items()`, and `Row.get()`. This makes `Row` act - more like a built-in dictionary. - ([#4393](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/4393), - [#4413](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/4413)) - -### Interface changes / breaking changes - -- Add `Client.insert_rows()` and `Client.insert_rows_json()`, deprecate - `Client.create_rows()` and `Client.create_rows_json()`. - ([#4657](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/4657)) -- Add `Client.list_tables`, deprecate `Client.list_dataset_tables`. - ([#4653](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/4653)) -- `Client.list_tables` returns an iterators of `TableListItem`. The API - only returns a subset of properties of a table when listing. - ([#4427](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/4427)) -- Remove `QueryJob.query_results()`. Use `QueryJob.result()` instead. - ([#4652](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/4652)) -- Remove `Client.query_rows()`. Use `Client.query()` instead. - ([#4429](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/4429)) -- `Client.list_datasets` returns an iterator of `DatasetListItem`. The API - only returns a subset of properties of a dataset when listing. - ([#4439](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/4439)) - -## 0.28.0 - -**0.28.0 significantly changes the interface for this package.** For examples -of the differences between 0.28.0 and previous versions, see -[Migrating to the BigQuery Python client library 0.28][2]. -These changes can be summarized as follows: - -- Query and view operations default to the standard SQL dialect. (#4192) -- Client functions related to - [jobs](https://cloud.google.com/bigquery/docs/jobs-overview), like running - queries, immediately start the job. -- Functions to create, get, update, delete datasets and tables moved to the - client class. - -[2]: https://cloud.google.com/bigquery/docs/python-client-migration - -### Fixes - -- Populate timeout parameter correctly for queries (#4209) -- Automatically retry idempotent RPCs (#4148, #4178) -- Parse timestamps in query parameters using canonical format (#3945) -- Parse array parameters that contain a struct type. (#4040) -- Support Sub Second Datetimes in row data (#3901, #3915, #3926), h/t @page1 - -### Interface changes / additions - -- Support external table configuration (#4182) in query jobs (#4191) and - tables (#4193). -- New `Row` class allows for access by integer index like a tuple, string - index like a dictionary, or attribute access like an object. (#4149) -- Add option for job ID generation with user-supplied prefix (#4198) -- Add support for update of dataset access entries (#4197) -- Add support for atomic read-modify-write of a dataset using etag (#4052) -- Add support for labels to `Dataset` (#4026) -- Add support for labels to `Table` (#4207) -- Add `Table.streaming_buffer` property (#4161) -- Add `TableReference` class (#3942) -- Add `DatasetReference` class (#3938, #3942, #3993) -- Add `ExtractJob.destination_uri_file_counts` property. (#3803) -- Add `client.create_rows_json()` to bypass conversions on streaming writes. - (#4189) -- Add `client.get_job()` to get arbitrary jobs. (#3804, #4213) -- Add filter to `client.list_datasets()` (#4205) -- Add `QueryJob.undeclared_query_parameters` property. (#3802) -- Add `QueryJob.referenced_tables` property. (#3801) -- Add new scalar statistics properties to `QueryJob` (#3800) -- Add `QueryJob.query_plan` property. (#3799) - -### Interface changes / breaking changes - -- Remove `client.run_async_query()`, use `client.query()` instead. (#4130) -- Remove `client.run_sync_query()`, use `client.query_rows()` instead. (#4065, #4248) -- Make `QueryResults` read-only. (#4094, #4144) -- Make `get_query_results` private. Return rows for `QueryJob.result()` (#3883) -- Move `*QueryParameter` and `UDFResource` classes to `query` module (also - exposed in `bigquery` module). (#4156) - -#### Changes to tables - -- Remove `client` from `Table` class (#4159) -- Remove `table.exists()` (#4145) -- Move `table.list_parations` to `client.list_partitions` (#4146) -- Move `table.upload_from_file` to `client.load_table_from_file` (#4136) -- Move `table.update()` and `table.patch()` to `client.update_table()` (#4076) -- Move `table.insert_data()` to `client.create_rows()`. Automatically - generates row IDs if not supplied. (#4151, #4173) -- Move `table.fetch_data()` to `client.list_rows()` (#4119, #4143) -- Move `table.delete()` to `client.delete_table()` (#4066) -- Move `table.create()` to `client.create_table()` (#4038, #4043) -- Move `table.reload()` to `client.get_table()` (#4004) -- Rename `Table.name` attribute to `Table.table_id` (#3959) -- `Table` constructor takes a `TableReference` as parameter (#3997) - -#### Changes to datasets - -- Remove `client` from `Dataset` class (#4018) -- Remove `dataset.exists()` (#3996) -- Move `dataset.list_tables()` to `client.list_dataset_tables()` (#4013) -- Move `dataset.delete()` to `client.delete_dataset()` (#4012) -- Move `dataset.patch()` and `dataset.update()` to `client.update_dataset()` (#4003) -- Move `dataset.create()` to `client.create_dataset()` (#3982) -- Move `dataset.reload()` to `client.get_dataset()` (#3973) -- Rename `Dataset.name` attribute to `Dataset.dataset_id` (#3955) -- `client.dataset()` returns a `DatasetReference` instead of `Dataset`. (#3944) -- Rename class: `dataset.AccessGrant -> dataset.AccessEntry`. (#3798) -- `dataset.table()` returns a `TableReference` instead of a `Table` (#4014) -- `Dataset` constructor takes a DatasetReference (#4036) - -#### Changes to jobs - -- Make `job.begin()` method private. (#4242) -- Add `LoadJobConfig` class and modify `LoadJob` (#4103, #4137) -- Add `CopyJobConfig` class and modify `CopyJob` (#4051, #4059) -- Type of Job's and Query's `default_dataset` changed from `Dataset` to - `DatasetReference` (#4037) -- Rename `client.load_table_from_storage()` to `client.load_table_from_uri()` - (#4235) -- Rename `client.extract_table_to_storage` to `client.extract_table()`. - Method starts the extract job immediately. (#3991, #4177) -- Rename `XJob.name` to `XJob.job_id`. (#3962) -- Rename job classes. `LoadTableFromStorageJob -> LoadJob` and - `ExtractTableToStorageJob -> jobs.ExtractJob` (#3797) - -### Dependencies - -- Updating to `google-cloud-core ~= 0.28`, in particular, the - `google-api-core` package has been moved out of `google-cloud-core`. (#4221) - -PyPI: https://pypi.org/project/google-cloud-bigquery/0.28.0/ - - -## 0.27.0 - -- Remove client-side enum validation. (#3735) -- Add `Table.row_from_mapping` helper. (#3425) -- Move `google.cloud.future` to `google.api.core` (#3764) -- Fix `__eq__` and `__ne__`. (#3765) -- Move `google.cloud.iterator` to `google.api.core.page_iterator` (#3770) -- `nullMarker` support for BigQuery Load Jobs (#3777), h/t @leondealmeida -- Allow `job_id` to be explicitly specified in DB-API. (#3779) -- Add support for a custom null marker. (#3776) -- Add `SchemaField` serialization and deserialization. (#3786) -- Add `get_query_results` method to the client. (#3838) -- Poll for query completion via `getQueryResults` method. (#3844) -- Allow fetching more than the first page when `max_results` is set. (#3845) - -PyPI: https://pypi.org/project/google-cloud-bigquery/0.27.0/ - -## 0.26.0 - -### Notable implementation changes - -- Using the `requests` transport attached to a Client for for resumable media - (i.e. downloads and uploads) (#3705) (this relates to the `httplib2` to - `requests` switch) - -### Interface changes / additions - -- Adding `autodetect` property on `LoadTableFromStorageJob` to enable schema - autodetection. (#3648) -- Implementing the Python Futures interface for Jobs. Call `job.result()` to - wait for jobs to complete instead of polling manually on the job status. - (#3626) -- Adding `is_nullable` property on `SchemaField`. Can be used to check if a - column is nullable. (#3620) -- `job_name` argument added to `Table.upload_from_file` for setting the job - ID. (#3605) -- Adding `google.cloud.bigquery.dbapi` package, which implements PEP-249 - DB-API specification. (#2921) -- Adding `Table.view_use_legacy_sql` property. Can be used to create views - with legacy or standard SQL. (#3514) - -### Interface changes / breaking changes - -- Removing `results()` method from the `QueryJob` class. Use - `query_results()` instead. (#3661) -- `SchemaField` is now immutable. It is also hashable so that it can be used - in sets. (#3601) - -### Dependencies - -- Updating to `google-cloud-core ~= 0.26`, in particular, the underlying HTTP - transport switched from `httplib2` to `requests` (#3654, #3674) -- Adding dependency on `google-resumable-media` for loading BigQuery tables - from local files. (#3555) - -### Packaging - -- Fix inclusion of `tests` (vs. `unit_tests`) in `MANIFEST.in` (#3552) -- Updating `author_email` in `setup.py` to `googleapis-publisher@google.com`. - (#3598) - -PyPI: https://pypi.org/project/google-cloud-bigquery/0.26.0/ diff --git a/bigquery/LICENSE b/bigquery/LICENSE deleted file mode 100644 index a8ee855de2aa..000000000000 --- a/bigquery/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - https://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - https://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/bigquery/MANIFEST.in b/bigquery/MANIFEST.in deleted file mode 100644 index 9cbf175afe6b..000000000000 --- a/bigquery/MANIFEST.in +++ /dev/null @@ -1,5 +0,0 @@ -include README.rst LICENSE -recursive-include google *.json *.proto -recursive-include tests * -global-exclude *.py[co] -global-exclude __pycache__ diff --git a/bigquery/README.rst b/bigquery/README.rst deleted file mode 100644 index 8f73576d6f7d..000000000000 --- a/bigquery/README.rst +++ /dev/null @@ -1,104 +0,0 @@ -Python Client for Google BigQuery -================================= - -|GA| |pypi| |versions| - -Querying massive datasets can be time consuming and expensive without the -right hardware and infrastructure. Google `BigQuery`_ solves this problem by -enabling super-fast, SQL queries against append-mostly tables, using the -processing power of Google's infrastructure. - -- `Client Library Documentation`_ -- `Product Documentation`_ - -.. |GA| image:: https://img.shields.io/badge/support-GA-gold.svg - :target: https://github.com/googleapis/google-cloud-python/blob/master/README.rst#general-availability -.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-bigquery.svg - :target: https://pypi.org/project/google-cloud-bigquery/ -.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-bigquery.svg - :target: https://pypi.org/project/google-cloud-bigquery/ -.. _BigQuery: https://cloud.google.com/bigquery/what-is-bigquery -.. _Client Library Documentation: https://googleapis.dev/python/bigquery/latest -.. _Product Documentation: https://cloud.google.com/bigquery/docs/reference/v2/ - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. `Enable the Google Cloud BigQuery API.`_ -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Enable the Google Cloud BigQuery API.: https://cloud.google.com/bigquery -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Supported Python Versions -^^^^^^^^^^^^^^^^^^^^^^^^^ -Python >= 3.5 - -Deprecated Python Versions -^^^^^^^^^^^^^^^^^^^^^^^^^^ -Python == 2.7. Python 2.7 support will be removed on January 1, 2020. - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - pip install virtualenv - virtualenv - source /bin/activate - /bin/pip install google-cloud-bigquery - - -Windows -^^^^^^^ - -.. code-block:: console - - pip install virtualenv - virtualenv - \Scripts\activate - \Scripts\pip.exe install google-cloud-bigquery - -Example Usage -------------- - -Perform a query -~~~~~~~~~~~~~~~ - -.. code:: python - - from google.cloud import bigquery - - client = bigquery.Client() - - # Perform a query. - QUERY = ( - 'SELECT name FROM `bigquery-public-data.usa_names.usa_1910_2013` ' - 'WHERE state = "TX" ' - 'LIMIT 100') - query_job = client.query(QUERY) # API request - rows = query_job.result() # Waits for query to finish - - for row in rows: - print(row.name) diff --git a/bigquery/benchmark/README.md b/bigquery/benchmark/README.md deleted file mode 100644 index 435926acb045..000000000000 --- a/bigquery/benchmark/README.md +++ /dev/null @@ -1,8 +0,0 @@ -# BigQuery Benchmark -This directory contains benchmarks for BigQuery client. - -## Usage -`python benchmark.py queries.json` - -BigQuery service caches requests so the benchmark should be run -at least twice, disregarding the first result. diff --git a/bigquery/benchmark/benchmark.py b/bigquery/benchmark/benchmark.py deleted file mode 100644 index 2917f169aba1..000000000000 --- a/bigquery/benchmark/benchmark.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from google.cloud import bigquery -from datetime import datetime -import json -import sys - -if len(sys.argv) < 2: - raise Exception('need query file, usage: python {0} '.format(sys.argv[0])) - -with open(sys.argv[1], 'r') as f: - queries = json.loads(f.read()) - -client = bigquery.Client() - -for query in queries: - start_time = datetime.now() - job = client.query(query) - rows = job.result() - - num_rows = 0 - num_cols = None - first_byte_time = None - - for row in rows: - if num_rows == 0: - num_cols = len(row) - first_byte_time = datetime.now() - start_time - elif num_cols != len(row): - raise Exception('found {0} columsn, expected {1}'.format(len(row), num_cols)) - num_rows += 1 - total_time = datetime.now() - start_time - print("query {0}: {1} rows, {2} cols, first byte {3} sec, total {4} sec" - .format(query, num_rows, num_cols, first_byte_time.total_seconds(), total_time.total_seconds())) diff --git a/bigquery/benchmark/queries.json b/bigquery/benchmark/queries.json deleted file mode 100644 index 13fed38b52b3..000000000000 --- a/bigquery/benchmark/queries.json +++ /dev/null @@ -1,10 +0,0 @@ -[ - "SELECT * FROM `nyc-tlc.yellow.trips` LIMIT 10000", - "SELECT * FROM `nyc-tlc.yellow.trips` LIMIT 100000", - "SELECT * FROM `nyc-tlc.yellow.trips` LIMIT 1000000", - "SELECT title FROM `bigquery-public-data.samples.wikipedia` ORDER BY title LIMIT 1000", - "SELECT title, id, timestamp, contributor_ip FROM `bigquery-public-data.samples.wikipedia` WHERE title like 'Blo%' ORDER BY id", - "SELECT * FROM `bigquery-public-data.baseball.games_post_wide` ORDER BY gameId", - "SELECT * FROM `bigquery-public-data.samples.github_nested` WHERE repository.has_downloads ORDER BY repository.created_at LIMIT 10000", - "SELECT repo_name, path FROM `bigquery-public-data.github_repos.files` WHERE path LIKE '%.java' ORDER BY id LIMIT 1000000" -] diff --git a/bigquery/docs/.gitignore b/bigquery/docs/.gitignore deleted file mode 100644 index 3fe20bec0f3a..000000000000 --- a/bigquery/docs/.gitignore +++ /dev/null @@ -1 +0,0 @@ -generated/ \ No newline at end of file diff --git a/bigquery/docs/README.rst b/bigquery/docs/README.rst deleted file mode 120000 index 89a0106941ff..000000000000 --- a/bigquery/docs/README.rst +++ /dev/null @@ -1 +0,0 @@ -../README.rst \ No newline at end of file diff --git a/bigquery/docs/_static/custom.css b/bigquery/docs/_static/custom.css deleted file mode 100644 index 9a6f9f8ddc3a..000000000000 --- a/bigquery/docs/_static/custom.css +++ /dev/null @@ -1,4 +0,0 @@ -div#python2-eol { - border-color: red; - border-width: medium; -} \ No newline at end of file diff --git a/bigquery/docs/_templates/layout.html b/bigquery/docs/_templates/layout.html deleted file mode 100644 index de457b2c2767..000000000000 --- a/bigquery/docs/_templates/layout.html +++ /dev/null @@ -1,49 +0,0 @@ -{% extends "!layout.html" %} -{%- block content %} -{%- if theme_fixed_sidebar|lower == 'true' %} -
- {{ sidebar() }} - {%- block document %} -
- {%- if render_sidebar %} -
- {%- endif %} - - {%- block relbar_top %} - {%- if theme_show_relbar_top|tobool %} - - {%- endif %} - {% endblock %} - -
-
- On January 1, 2020 this library will no longer support Python 2 on the latest released version. - Previously released library versions will continue to be available. For more information please - visit Python 2 support on Google Cloud. -
- {% block body %} {% endblock %} -
- - {%- block relbar_bottom %} - {%- if theme_show_relbar_bottom|tobool %} - - {%- endif %} - {% endblock %} - - {%- if render_sidebar %} -
- {%- endif %} -
- {%- endblock %} -
-
-{%- else %} -{{ super() }} -{%- endif %} -{%- endblock %} diff --git a/bigquery/docs/changelog.md b/bigquery/docs/changelog.md deleted file mode 120000 index 04c99a55caae..000000000000 --- a/bigquery/docs/changelog.md +++ /dev/null @@ -1 +0,0 @@ -../CHANGELOG.md \ No newline at end of file diff --git a/bigquery/docs/conf.py b/bigquery/docs/conf.py deleted file mode 100644 index 1b83501d1417..000000000000 --- a/bigquery/docs/conf.py +++ /dev/null @@ -1,363 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# google-cloud-bigquery documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shutil - -from sphinx.util import logging - -logger = logging.getLogger(__name__) - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -# needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates", os.path.join("..", "..", "docs", "_templates")] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = "index" - -# General information about the project. -project = u"google-cloud-bigquery" -copyright = u"2015, Google" -author = u"Google APIs" - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -# html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-bigquery-doc" - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - #'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - #'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - #'preamble': '', - # Latex figure (float) alignment - #'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - master_doc, - "google-cloud-bigquery.tex", - u"google-cloud-bigquery Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - master_doc, - "google-cloud-bigquery", - u"google-cloud-bigquery Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - master_doc, - "google-cloud-bigquery", - u"google-cloud-bigquery Documentation", - author, - "google-cloud-bigquery", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), - "python": ("http://python.readthedocs.org/en/latest/", None), -} - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True - -# Static HTML pages, e.g. to support redirects -# See: https://tech.signavio.com/2017/managing-sphinx-redirects -# HTML pages to be copied from source to target -static_html_pages = ["usage.html", "generated/google.cloud.bigquery.magics.html"] - - -def copy_static_html_pages(app, exception): - if exception is None and app.builder.name == "html": - for static_html_page in static_html_pages: - target_path = app.outdir + "/" + static_html_page - src_path = app.srcdir + "/" + static_html_page - if os.path.isfile(src_path): - logger.info("Copying static html: %s -> %s", src_path, target_path) - shutil.copyfile(src_path, target_path) - - -def setup(app): - app.connect("build-finished", copy_static_html_pages) diff --git a/bigquery/docs/dbapi.rst b/bigquery/docs/dbapi.rst deleted file mode 100644 index ca0256d3c8de..000000000000 --- a/bigquery/docs/dbapi.rst +++ /dev/null @@ -1,6 +0,0 @@ -DB-API Reference -~~~~~~~~~~~~~~~~ - -.. automodule:: google.cloud.bigquery.dbapi - :members: - :show-inheritance: diff --git a/bigquery/docs/gapic/v2/enums.rst b/bigquery/docs/gapic/v2/enums.rst deleted file mode 100644 index 0e0f05adaea5..000000000000 --- a/bigquery/docs/gapic/v2/enums.rst +++ /dev/null @@ -1,8 +0,0 @@ -Enums for BigQuery API Client -============================= - -.. autoclass:: google.cloud.bigquery_v2.gapic.enums.Model - :members: - -.. autoclass:: google.cloud.bigquery_v2.gapic.enums.StandardSqlDataType - :members: diff --git a/bigquery/docs/gapic/v2/types.rst b/bigquery/docs/gapic/v2/types.rst deleted file mode 100644 index 97938768a690..000000000000 --- a/bigquery/docs/gapic/v2/types.rst +++ /dev/null @@ -1,5 +0,0 @@ -Types for BigQuery API Client -============================= - -.. automodule:: google.cloud.bigquery_v2.types - :members: \ No newline at end of file diff --git a/bigquery/docs/generated/google.cloud.bigquery.magics.html b/bigquery/docs/generated/google.cloud.bigquery.magics.html deleted file mode 100644 index 0d2a00fa14c9..000000000000 --- a/bigquery/docs/generated/google.cloud.bigquery.magics.html +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - diff --git a/bigquery/docs/index.rst b/bigquery/docs/index.rst deleted file mode 100644 index ec360de69770..000000000000 --- a/bigquery/docs/index.rst +++ /dev/null @@ -1,29 +0,0 @@ -.. include:: README.rst - -More Examples -~~~~~~~~~~~~~ - -.. toctree:: - :maxdepth: 2 - - usage/index - Official Google BigQuery How-to Guides - -API Reference -------------- - -.. toctree:: - :maxdepth: 2 - - reference - dbapi - -Changelog ---------- - -For a list of all ``google-cloud-bigquery`` releases: - -.. toctree:: - :maxdepth: 2 - - changelog diff --git a/bigquery/docs/magics.rst b/bigquery/docs/magics.rst deleted file mode 100644 index 732c27af94dd..000000000000 --- a/bigquery/docs/magics.rst +++ /dev/null @@ -1,5 +0,0 @@ -IPython Magics for BigQuery -=========================== - -.. automodule:: google.cloud.bigquery.magics - :members: diff --git a/bigquery/docs/reference.rst b/bigquery/docs/reference.rst deleted file mode 100644 index 981059de5226..000000000000 --- a/bigquery/docs/reference.rst +++ /dev/null @@ -1,194 +0,0 @@ -API Reference -~~~~~~~~~~~~~ - -.. currentmodule:: google.cloud.bigquery - -The main concepts with this API are: - -- :class:`~google.cloud.bigquery.client.Client` manages connections to the - BigQuery API. Use the client methods to run jobs (such as a - :class:`~google.cloud.bigquery.job.QueryJob` via - :meth:`~google.cloud.bigquery.client.Client.query`) and manage resources. - -- :class:`~google.cloud.bigquery.dataset.Dataset` represents a - collection of tables. - -- :class:`~google.cloud.bigquery.table.Table` represents a single "relation". - -Client -====== - -.. autosummary:: - :toctree: generated - - client.Client - -Job -=== - -Job Configuration ------------------ - -.. autosummary:: - :toctree: generated - - job.QueryJobConfig - job.CopyJobConfig - job.LoadJobConfig - job.ExtractJobConfig - -Job Classes ------------ - -.. autosummary:: - :toctree: generated - - job.QueryJob - job.CopyJob - job.LoadJob - job.ExtractJob - job.UnknownJob - -Job-Related Types ------------------ - -.. autosummary:: - :toctree: generated - - job.Compression - job.CreateDisposition - job.DestinationFormat - job.Encoding - job.QueryPriority - job.SourceFormat - job.WriteDisposition - job.SchemaUpdateOption - - -Dataset -======= - -.. autosummary:: - :toctree: generated - - dataset.Dataset - dataset.DatasetListItem - dataset.DatasetReference - dataset.AccessEntry - - -Table -===== - -.. autosummary:: - :toctree: generated - - table.PartitionRange - table.RangePartitioning - table.Row - table.RowIterator - table.Table - table.TableListItem - table.TableReference - table.TimePartitioning - table.TimePartitioningType - -Model -===== - -.. autosummary:: - :toctree: generated - - model.Model - model.ModelReference - -Routine -======= - -.. autosummary:: - :toctree: generated - - routine.Routine - routine.RoutineArgument - routine.RoutineReference - -Schema -====== - -.. autosummary:: - :toctree: generated - - schema.SchemaField - - -Query -===== - -.. autosummary:: - :toctree: generated - - query.ArrayQueryParameter - query.ScalarQueryParameter - query.StructQueryParameter - query.UDFResource - - -Retries -======= - -.. autosummary:: - :toctree: generated - - retry.DEFAULT_RETRY - - -External Configuration -====================== - -.. autosummary:: - :toctree: generated - - external_config.ExternalSourceFormat - external_config.ExternalConfig - external_config.BigtableOptions - external_config.BigtableColumnFamily - external_config.BigtableColumn - external_config.CSVOptions - external_config.GoogleSheetsOptions - - -Magics -====== - -.. toctree:: - :maxdepth: 2 - - magics - - -Enums -===== - -.. autosummary:: - :toctree: generated - - enums.StandardSqlDataTypes - -Encryption Configuration -======================== - -.. autosummary:: - :toctree: generated - - encryption_configuration.EncryptionConfiguration - -Additional Types -================ - -Protocol buffer classes for working with the Models API. - -.. toctree:: - :maxdepth: 2 - - gapic/v2/enums - gapic/v2/types diff --git a/bigquery/docs/samples b/bigquery/docs/samples deleted file mode 120000 index 18cd9a30aaff..000000000000 --- a/bigquery/docs/samples +++ /dev/null @@ -1 +0,0 @@ -../samples/ \ No newline at end of file diff --git a/bigquery/docs/snippets.py b/bigquery/docs/snippets.py deleted file mode 100644 index 4981a1e18100..000000000000 --- a/bigquery/docs/snippets.py +++ /dev/null @@ -1,1195 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Testable usage examples for Google BigQuery API wrapper -Each example function takes a ``client`` argument (which must be an instance -of :class:`google.cloud.bigquery.client.Client`) and uses it to perform a task -with the API. -To facilitate running the examples as system tests, each example is also passed -a ``to_delete`` list; the function adds to the list any objects created which -need to be deleted during teardown. -""" - -import os -import time - -import pytest -import six - -try: - import fastparquet -except (ImportError, AttributeError): - fastparquet = None -try: - import pandas -except (ImportError, AttributeError): - pandas = None -try: - import pyarrow -except (ImportError, AttributeError): - pyarrow = None - -from google.api_core.exceptions import InternalServerError -from google.api_core.exceptions import ServiceUnavailable -from google.api_core.exceptions import TooManyRequests -from google.cloud import bigquery -from google.cloud import storage -from test_utils.retry import RetryErrors - -ORIGINAL_FRIENDLY_NAME = "Original friendly name" -ORIGINAL_DESCRIPTION = "Original description" -LOCALLY_CHANGED_FRIENDLY_NAME = "Locally-changed friendly name" -LOCALLY_CHANGED_DESCRIPTION = "Locally-changed description" -UPDATED_FRIENDLY_NAME = "Updated friendly name" -UPDATED_DESCRIPTION = "Updated description" - -SCHEMA = [ - bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"), - bigquery.SchemaField("age", "INTEGER", mode="REQUIRED"), -] - -ROWS = [ - ("Phred Phlyntstone", 32), - ("Bharney Rhubble", 33), - ("Wylma Phlyntstone", 29), - ("Bhettye Rhubble", 27), -] - -QUERY = ( - "SELECT name FROM `bigquery-public-data.usa_names.usa_1910_2013` " - 'WHERE state = "TX"' -) - - -retry_429 = RetryErrors(TooManyRequests) -retry_storage_errors = RetryErrors( - (TooManyRequests, InternalServerError, ServiceUnavailable) -) - - -@pytest.fixture(scope="module") -def client(): - return bigquery.Client() - - -@pytest.fixture -def to_delete(client): - doomed = [] - yield doomed - for item in doomed: - if isinstance(item, (bigquery.Dataset, bigquery.DatasetReference)): - retry_429(client.delete_dataset)(item, delete_contents=True) - elif isinstance(item, storage.Bucket): - retry_storage_errors(item.delete)() - else: - retry_429(item.delete)() - - -def _millis(): - return int(time.time() * 1000) - - -class _CloseOnDelete(object): - def __init__(self, wrapped): - self._wrapped = wrapped - - def delete(self): - self._wrapped.close() - - -def test_create_client_default_credentials(): - """Create a BigQuery client with Application Default Credentials""" - - # [START bigquery_client_default_credentials] - from google.cloud import bigquery - - # If you don't specify credentials when constructing the client, the - # client library will look for credentials in the environment. - client = bigquery.Client() - # [END bigquery_client_default_credentials] - - assert client is not None - - -def test_create_table_nested_repeated_schema(client, to_delete): - dataset_id = "create_table_nested_repeated_{}".format(_millis()) - dataset_ref = client.dataset(dataset_id) - dataset = bigquery.Dataset(dataset_ref) - client.create_dataset(dataset) - to_delete.append(dataset) - - # [START bigquery_nested_repeated_schema] - # from google.cloud import bigquery - # client = bigquery.Client() - # dataset_ref = client.dataset('my_dataset') - - schema = [ - bigquery.SchemaField("id", "STRING", mode="NULLABLE"), - bigquery.SchemaField("first_name", "STRING", mode="NULLABLE"), - bigquery.SchemaField("last_name", "STRING", mode="NULLABLE"), - bigquery.SchemaField("dob", "DATE", mode="NULLABLE"), - bigquery.SchemaField( - "addresses", - "RECORD", - mode="REPEATED", - fields=[ - bigquery.SchemaField("status", "STRING", mode="NULLABLE"), - bigquery.SchemaField("address", "STRING", mode="NULLABLE"), - bigquery.SchemaField("city", "STRING", mode="NULLABLE"), - bigquery.SchemaField("state", "STRING", mode="NULLABLE"), - bigquery.SchemaField("zip", "STRING", mode="NULLABLE"), - bigquery.SchemaField("numberOfYears", "STRING", mode="NULLABLE"), - ], - ), - ] - table_ref = dataset_ref.table("my_table") - table = bigquery.Table(table_ref, schema=schema) - table = client.create_table(table) # API request - - print("Created table {}".format(table.full_table_id)) - # [END bigquery_nested_repeated_schema] - - -def test_create_table_cmek(client, to_delete): - dataset_id = "create_table_cmek_{}".format(_millis()) - dataset = bigquery.Dataset(client.dataset(dataset_id)) - client.create_dataset(dataset) - to_delete.append(dataset) - - # [START bigquery_create_table_cmek] - # from google.cloud import bigquery - # client = bigquery.Client() - # dataset_id = 'my_dataset' - - table_ref = client.dataset(dataset_id).table("my_table") - table = bigquery.Table(table_ref) - - # Set the encryption key to use for the table. - # TODO: Replace this key with a key you have created in Cloud KMS. - kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( - "cloud-samples-tests", "us", "test", "test" - ) - table.encryption_configuration = bigquery.EncryptionConfiguration( - kms_key_name=kms_key_name - ) - - table = client.create_table(table) # API request - - assert table.encryption_configuration.kms_key_name == kms_key_name - # [END bigquery_create_table_cmek] - - -def test_create_partitioned_table(client, to_delete): - dataset_id = "create_table_partitioned_{}".format(_millis()) - dataset_ref = bigquery.Dataset(client.dataset(dataset_id)) - dataset = client.create_dataset(dataset_ref) - to_delete.append(dataset) - - # [START bigquery_create_table_partitioned] - # from google.cloud import bigquery - # client = bigquery.Client() - # dataset_ref = client.dataset('my_dataset') - - table_ref = dataset_ref.table("my_partitioned_table") - schema = [ - bigquery.SchemaField("name", "STRING"), - bigquery.SchemaField("post_abbr", "STRING"), - bigquery.SchemaField("date", "DATE"), - ] - table = bigquery.Table(table_ref, schema=schema) - table.time_partitioning = bigquery.TimePartitioning( - type_=bigquery.TimePartitioningType.DAY, - field="date", # name of column to use for partitioning - expiration_ms=7776000000, - ) # 90 days - - table = client.create_table(table) - - print( - "Created table {}, partitioned on column {}".format( - table.table_id, table.time_partitioning.field - ) - ) - # [END bigquery_create_table_partitioned] - - assert table.time_partitioning.type_ == "DAY" - assert table.time_partitioning.field == "date" - assert table.time_partitioning.expiration_ms == 7776000000 - - -@pytest.mark.skip( - reason=( - "update_table() is flaky " - "https://github.com/GoogleCloudPlatform/google-cloud-python/issues/5589" - ) -) -def test_manage_table_labels(client, to_delete): - dataset_id = "label_table_dataset_{}".format(_millis()) - table_id = "label_table_{}".format(_millis()) - dataset = bigquery.Dataset(client.dataset(dataset_id)) - client.create_dataset(dataset) - to_delete.append(dataset) - - table = bigquery.Table(dataset.table(table_id), schema=SCHEMA) - table = client.create_table(table) - - # [START bigquery_label_table] - # from google.cloud import bigquery - # client = bigquery.Client() - # table_ref = client.dataset('my_dataset').table('my_table') - # table = client.get_table(table_ref) # API request - - assert table.labels == {} - labels = {"color": "green"} - table.labels = labels - - table = client.update_table(table, ["labels"]) # API request - - assert table.labels == labels - # [END bigquery_label_table] - - # [START bigquery_get_table_labels] - # from google.cloud import bigquery - # client = bigquery.Client() - # dataset_id = 'my_dataset' - # table_id = 'my_table' - - dataset_ref = client.dataset(dataset_id) - table_ref = dataset_ref.table(table_id) - table = client.get_table(table_ref) # API Request - - # View table labels - print("Table ID: {}".format(table_id)) - print("Labels:") - if table.labels: - for label, value in table.labels.items(): - print("\t{}: {}".format(label, value)) - else: - print("\tTable has no labels defined.") - # [END bigquery_get_table_labels] - assert table.labels == labels - - # [START bigquery_delete_label_table] - # from google.cloud import bigquery - # client = bigquery.Client() - # table_ref = client.dataset('my_dataset').table('my_table') - # table = client.get_table(table_ref) # API request - - # This example table starts with one label - assert table.labels == {"color": "green"} - # To delete a label from a table, set its value to None - table.labels["color"] = None - - table = client.update_table(table, ["labels"]) # API request - - assert table.labels == {} - # [END bigquery_delete_label_table] - - -@pytest.mark.skip( - reason=( - "update_table() is flaky " - "https://github.com/GoogleCloudPlatform/google-cloud-python/issues/5589" - ) -) -def test_update_table_description(client, to_delete): - """Update a table's description.""" - dataset_id = "update_table_description_dataset_{}".format(_millis()) - table_id = "update_table_description_table_{}".format(_millis()) - dataset = bigquery.Dataset(client.dataset(dataset_id)) - client.create_dataset(dataset) - to_delete.append(dataset) - - table = bigquery.Table(dataset.table(table_id), schema=SCHEMA) - table.description = "Original description." - table = client.create_table(table) - - # [START bigquery_update_table_description] - # from google.cloud import bigquery - # client = bigquery.Client() - # table_ref = client.dataset('my_dataset').table('my_table') - # table = client.get_table(table_ref) # API request - - assert table.description == "Original description." - table.description = "Updated description." - - table = client.update_table(table, ["description"]) # API request - - assert table.description == "Updated description." - # [END bigquery_update_table_description] - - -@pytest.mark.skip( - reason=( - "update_table() is flaky " - "https://github.com/GoogleCloudPlatform/google-cloud-python/issues/5589" - ) -) -def test_update_table_expiration(client, to_delete): - """Update a table's expiration time.""" - dataset_id = "update_table_expiration_dataset_{}".format(_millis()) - table_id = "update_table_expiration_table_{}".format(_millis()) - dataset = bigquery.Dataset(client.dataset(dataset_id)) - client.create_dataset(dataset) - to_delete.append(dataset) - - table = bigquery.Table(dataset.table(table_id), schema=SCHEMA) - table = client.create_table(table) - - # [START bigquery_update_table_expiration] - import datetime - import pytz - - # from google.cloud import bigquery - # client = bigquery.Client() - # table_ref = client.dataset('my_dataset').table('my_table') - # table = client.get_table(table_ref) # API request - - assert table.expires is None - - # set table to expire 5 days from now - expiration = datetime.datetime.now(pytz.utc) + datetime.timedelta(days=5) - table.expires = expiration - table = client.update_table(table, ["expires"]) # API request - - # expiration is stored in milliseconds - margin = datetime.timedelta(microseconds=1000) - assert expiration - margin <= table.expires <= expiration + margin - # [END bigquery_update_table_expiration] - - -@pytest.mark.skip( - reason=( - "update_table() is flaky " - "https://github.com/GoogleCloudPlatform/google-cloud-python/issues/5589" - ) -) -def test_relax_column(client, to_delete): - """Updates a schema field from required to nullable.""" - dataset_id = "relax_column_dataset_{}".format(_millis()) - table_id = "relax_column_table_{}".format(_millis()) - dataset = bigquery.Dataset(client.dataset(dataset_id)) - dataset = client.create_dataset(dataset) - to_delete.append(dataset) - - # [START bigquery_relax_column] - # from google.cloud import bigquery - # client = bigquery.Client() - # dataset_id = 'my_dataset' - # table_id = 'my_table' - - original_schema = [ - bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"), - bigquery.SchemaField("age", "INTEGER", mode="REQUIRED"), - ] - table_ref = client.dataset(dataset_id).table(table_id) - table = bigquery.Table(table_ref, schema=original_schema) - table = client.create_table(table) - assert all(field.mode == "REQUIRED" for field in table.schema) - - # SchemaField properties cannot be edited after initialization. - # To make changes, construct new SchemaField objects. - relaxed_schema = [ - bigquery.SchemaField("full_name", "STRING", mode="NULLABLE"), - bigquery.SchemaField("age", "INTEGER", mode="NULLABLE"), - ] - table.schema = relaxed_schema - table = client.update_table(table, ["schema"]) - - assert all(field.mode == "NULLABLE" for field in table.schema) - # [END bigquery_relax_column] - - -@pytest.mark.skip( - reason=( - "update_table() is flaky " - "https://github.com/GoogleCloudPlatform/google-cloud-python/issues/5589" - ) -) -def test_update_table_cmek(client, to_delete): - """Patch a table's metadata.""" - dataset_id = "update_table_cmek_{}".format(_millis()) - table_id = "update_table_cmek_{}".format(_millis()) - dataset = bigquery.Dataset(client.dataset(dataset_id)) - client.create_dataset(dataset) - to_delete.append(dataset) - - table = bigquery.Table(dataset.table(table_id)) - original_kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( - "cloud-samples-tests", "us", "test", "test" - ) - table.encryption_configuration = bigquery.EncryptionConfiguration( - kms_key_name=original_kms_key_name - ) - table = client.create_table(table) - - # [START bigquery_update_table_cmek] - # from google.cloud import bigquery - # client = bigquery.Client() - - assert table.encryption_configuration.kms_key_name == original_kms_key_name - - # Set a new encryption key to use for the destination. - # TODO: Replace this key with a key you have created in KMS. - updated_kms_key_name = ( - "projects/cloud-samples-tests/locations/us/keyRings/test/cryptoKeys/otherkey" - ) - table.encryption_configuration = bigquery.EncryptionConfiguration( - kms_key_name=updated_kms_key_name - ) - - table = client.update_table(table, ["encryption_configuration"]) # API request - - assert table.encryption_configuration.kms_key_name == updated_kms_key_name - assert original_kms_key_name != updated_kms_key_name - # [END bigquery_update_table_cmek] - - -@pytest.mark.skip( - reason=( - "update_table() is flaky " - "https://github.com/GoogleCloudPlatform/google-cloud-python/issues/5589" - ) -) -def test_manage_views(client, to_delete): - project = client.project - source_dataset_id = "source_dataset_{}".format(_millis()) - source_dataset_ref = client.dataset(source_dataset_id) - source_dataset = bigquery.Dataset(source_dataset_ref) - source_dataset = client.create_dataset(source_dataset) - to_delete.append(source_dataset) - - job_config = bigquery.LoadJobConfig() - job_config.schema = [ - bigquery.SchemaField("name", "STRING"), - bigquery.SchemaField("post_abbr", "STRING"), - ] - job_config.skip_leading_rows = 1 - uri = "gs://cloud-samples-data/bigquery/us-states/us-states.csv" - source_table_id = "us_states" - load_job = client.load_table_from_uri( - uri, source_dataset.table(source_table_id), job_config=job_config - ) - load_job.result() - - shared_dataset_id = "shared_dataset_{}".format(_millis()) - shared_dataset_ref = client.dataset(shared_dataset_id) - shared_dataset = bigquery.Dataset(shared_dataset_ref) - shared_dataset = client.create_dataset(shared_dataset) - to_delete.append(shared_dataset) - - # [START bigquery_create_view] - # from google.cloud import bigquery - # client = bigquery.Client() - # project = 'my-project' - # source_dataset_id = 'my_source_dataset' - # source_table_id = 'us_states' - # shared_dataset_ref = client.dataset('my_shared_dataset') - - # This example shows how to create a shared view of a source table of - # US States. The source table contains all 50 states, while the view will - # contain only states with names starting with 'W'. - view_ref = shared_dataset_ref.table("my_shared_view") - view = bigquery.Table(view_ref) - sql_template = 'SELECT name, post_abbr FROM `{}.{}.{}` WHERE name LIKE "W%"' - view.view_query = sql_template.format(project, source_dataset_id, source_table_id) - view = client.create_table(view) # API request - - print("Successfully created view at {}".format(view.full_table_id)) - # [END bigquery_create_view] - - # [START bigquery_update_view_query] - # from google.cloud import bigquery - # client = bigquery.Client() - # project = 'my-project' - # source_dataset_id = 'my_source_dataset' - # source_table_id = 'us_states' - # shared_dataset_ref = client.dataset('my_shared_dataset') - - # This example shows how to update a shared view of a source table of - # US States. The view's query will be updated to contain only states with - # names starting with 'M'. - view_ref = shared_dataset_ref.table("my_shared_view") - view = bigquery.Table(view_ref) - sql_template = 'SELECT name, post_abbr FROM `{}.{}.{}` WHERE name LIKE "M%"' - view.view_query = sql_template.format(project, source_dataset_id, source_table_id) - view = client.update_table(view, ["view_query"]) # API request - # [END bigquery_update_view_query] - - # [START bigquery_get_view] - # from google.cloud import bigquery - # client = bigquery.Client() - # shared_dataset_id = 'my_shared_dataset' - - view_ref = client.dataset(shared_dataset_id).table("my_shared_view") - view = client.get_table(view_ref) # API Request - - # Display view properties - print("View at {}".format(view.full_table_id)) - print("View Query:\n{}".format(view.view_query)) - # [END bigquery_get_view] - assert view.view_query is not None - - analyst_group_email = "example-analyst-group@google.com" - # [START bigquery_grant_view_access] - # from google.cloud import bigquery - # client = bigquery.Client() - - # Assign access controls to the dataset containing the view - # shared_dataset_id = 'my_shared_dataset' - # analyst_group_email = 'data_analysts@example.com' - shared_dataset = client.get_dataset( - client.dataset(shared_dataset_id) - ) # API request - access_entries = shared_dataset.access_entries - access_entries.append( - bigquery.AccessEntry("READER", "groupByEmail", analyst_group_email) - ) - shared_dataset.access_entries = access_entries - shared_dataset = client.update_dataset( - shared_dataset, ["access_entries"] - ) # API request - - # Authorize the view to access the source dataset - # project = 'my-project' - # source_dataset_id = 'my_source_dataset' - source_dataset = client.get_dataset( - client.dataset(source_dataset_id) - ) # API request - view_reference = { - "projectId": project, - "datasetId": shared_dataset_id, - "tableId": "my_shared_view", - } - access_entries = source_dataset.access_entries - access_entries.append(bigquery.AccessEntry(None, "view", view_reference)) - source_dataset.access_entries = access_entries - source_dataset = client.update_dataset( - source_dataset, ["access_entries"] - ) # API request - # [END bigquery_grant_view_access] - - -def test_load_table_from_uri_autodetect(client, to_delete, capsys): - """Load table from a GCS URI using various formats and auto-detected schema - Each file format has its own tested load from URI sample. Because most of - the code is common for autodetect, append, and truncate, this sample - includes snippets for all supported formats but only calls a single load - job. - This code snippet is made up of shared code, then format-specific code, - followed by more shared code. Note that only the last format in the - format-specific code section will be tested in this test. - """ - dataset_id = "load_table_from_uri_auto_{}".format(_millis()) - dataset = bigquery.Dataset(client.dataset(dataset_id)) - client.create_dataset(dataset) - to_delete.append(dataset) - - # Shared code - # [START bigquery_load_table_gcs_csv_autodetect] - # [START bigquery_load_table_gcs_json_autodetect] - # from google.cloud import bigquery - # client = bigquery.Client() - # dataset_id = 'my_dataset' - - dataset_ref = client.dataset(dataset_id) - job_config = bigquery.LoadJobConfig() - job_config.autodetect = True - # [END bigquery_load_table_gcs_csv_autodetect] - # [END bigquery_load_table_gcs_json_autodetect] - - # Format-specific code - # [START bigquery_load_table_gcs_csv_autodetect] - job_config.skip_leading_rows = 1 - # The source format defaults to CSV, so the line below is optional. - job_config.source_format = bigquery.SourceFormat.CSV - uri = "gs://cloud-samples-data/bigquery/us-states/us-states.csv" - # [END bigquery_load_table_gcs_csv_autodetect] - # unset csv-specific attribute - del job_config._properties["load"]["skipLeadingRows"] - - # [START bigquery_load_table_gcs_json_autodetect] - job_config.source_format = bigquery.SourceFormat.NEWLINE_DELIMITED_JSON - uri = "gs://cloud-samples-data/bigquery/us-states/us-states.json" - # [END bigquery_load_table_gcs_json_autodetect] - - # Shared code - # [START bigquery_load_table_gcs_csv_autodetect] - # [START bigquery_load_table_gcs_json_autodetect] - load_job = client.load_table_from_uri( - uri, dataset_ref.table("us_states"), job_config=job_config - ) # API request - print("Starting job {}".format(load_job.job_id)) - - load_job.result() # Waits for table load to complete. - print("Job finished.") - - destination_table = client.get_table(dataset_ref.table("us_states")) - print("Loaded {} rows.".format(destination_table.num_rows)) - # [END bigquery_load_table_gcs_csv_autodetect] - # [END bigquery_load_table_gcs_json_autodetect] - - out, _ = capsys.readouterr() - assert "Loaded 50 rows." in out - - -def test_load_table_from_uri_truncate(client, to_delete, capsys): - """Replaces table data with data from a GCS URI using various formats - Each file format has its own tested load from URI sample. Because most of - the code is common for autodetect, append, and truncate, this sample - includes snippets for all supported formats but only calls a single load - job. - This code snippet is made up of shared code, then format-specific code, - followed by more shared code. Note that only the last format in the - format-specific code section will be tested in this test. - """ - dataset_id = "load_table_from_uri_trunc_{}".format(_millis()) - dataset = bigquery.Dataset(client.dataset(dataset_id)) - client.create_dataset(dataset) - to_delete.append(dataset) - - job_config = bigquery.LoadJobConfig() - job_config.schema = [ - bigquery.SchemaField("name", "STRING"), - bigquery.SchemaField("post_abbr", "STRING"), - ] - table_ref = dataset.table("us_states") - body = six.BytesIO(b"Washington,WA") - client.load_table_from_file(body, table_ref, job_config=job_config).result() - previous_rows = client.get_table(table_ref).num_rows - assert previous_rows > 0 - - # Shared code - # [START bigquery_load_table_gcs_avro_truncate] - # [START bigquery_load_table_gcs_csv_truncate] - # [START bigquery_load_table_gcs_json_truncate] - # [START bigquery_load_table_gcs_parquet_truncate] - # [START bigquery_load_table_gcs_orc_truncate] - # from google.cloud import bigquery - # client = bigquery.Client() - # table_ref = client.dataset('my_dataset').table('existing_table') - - job_config = bigquery.LoadJobConfig() - job_config.write_disposition = bigquery.WriteDisposition.WRITE_TRUNCATE - # [END bigquery_load_table_gcs_avro_truncate] - # [END bigquery_load_table_gcs_csv_truncate] - # [END bigquery_load_table_gcs_json_truncate] - # [END bigquery_load_table_gcs_parquet_truncate] - # [END bigquery_load_table_gcs_orc_truncate] - - # Format-specific code - # [START bigquery_load_table_gcs_avro_truncate] - job_config.source_format = bigquery.SourceFormat.AVRO - uri = "gs://cloud-samples-data/bigquery/us-states/us-states.avro" - # [END bigquery_load_table_gcs_avro_truncate] - - # [START bigquery_load_table_gcs_csv_truncate] - job_config.skip_leading_rows = 1 - # The source format defaults to CSV, so the line below is optional. - job_config.source_format = bigquery.SourceFormat.CSV - uri = "gs://cloud-samples-data/bigquery/us-states/us-states.csv" - # [END bigquery_load_table_gcs_csv_truncate] - # unset csv-specific attribute - del job_config._properties["load"]["skipLeadingRows"] - - # [START bigquery_load_table_gcs_json_truncate] - job_config.source_format = bigquery.SourceFormat.NEWLINE_DELIMITED_JSON - uri = "gs://cloud-samples-data/bigquery/us-states/us-states.json" - # [END bigquery_load_table_gcs_json_truncate] - - # [START bigquery_load_table_gcs_parquet_truncate] - job_config.source_format = bigquery.SourceFormat.PARQUET - uri = "gs://cloud-samples-data/bigquery/us-states/us-states.parquet" - # [END bigquery_load_table_gcs_parquet_truncate] - - # [START bigquery_load_table_gcs_orc_truncate] - job_config.source_format = bigquery.SourceFormat.ORC - uri = "gs://cloud-samples-data/bigquery/us-states/us-states.orc" - # [END bigquery_load_table_gcs_orc_truncate] - - # Shared code - # [START bigquery_load_table_gcs_avro_truncate] - # [START bigquery_load_table_gcs_csv_truncate] - # [START bigquery_load_table_gcs_json_truncate] - # [START bigquery_load_table_gcs_parquet_truncate] - # [START bigquery_load_table_gcs_orc_truncate] - load_job = client.load_table_from_uri( - uri, table_ref, job_config=job_config - ) # API request - print("Starting job {}".format(load_job.job_id)) - - load_job.result() # Waits for table load to complete. - print("Job finished.") - - destination_table = client.get_table(table_ref) - print("Loaded {} rows.".format(destination_table.num_rows)) - # [END bigquery_load_table_gcs_avro_truncate] - # [END bigquery_load_table_gcs_csv_truncate] - # [END bigquery_load_table_gcs_json_truncate] - # [END bigquery_load_table_gcs_parquet_truncate] - # [END bigquery_load_table_gcs_orc_truncate] - - out, _ = capsys.readouterr() - assert "Loaded 50 rows." in out - - -def test_load_table_add_column(client, to_delete): - dataset_id = "load_table_add_column_{}".format(_millis()) - dataset_ref = client.dataset(dataset_id) - dataset = bigquery.Dataset(dataset_ref) - dataset.location = "US" - dataset = client.create_dataset(dataset) - to_delete.append(dataset) - - snippets_dir = os.path.abspath(os.path.dirname(__file__)) - filepath = os.path.join( - snippets_dir, "..", "..", "bigquery", "tests", "data", "people.csv" - ) - table_ref = dataset_ref.table("my_table") - old_schema = [bigquery.SchemaField("full_name", "STRING", mode="REQUIRED")] - table = client.create_table(bigquery.Table(table_ref, schema=old_schema)) - - # [START bigquery_add_column_load_append] - # from google.cloud import bigquery - # client = bigquery.Client() - # dataset_ref = client.dataset('my_dataset') - # filepath = 'path/to/your_file.csv' - - # Retrieves the destination table and checks the length of the schema - table_id = "my_table" - table_ref = dataset_ref.table(table_id) - table = client.get_table(table_ref) - print("Table {} contains {} columns.".format(table_id, len(table.schema))) - - # Configures the load job to append the data to the destination table, - # allowing field addition - job_config = bigquery.LoadJobConfig() - job_config.write_disposition = bigquery.WriteDisposition.WRITE_APPEND - job_config.schema_update_options = [ - bigquery.SchemaUpdateOption.ALLOW_FIELD_ADDITION - ] - # In this example, the existing table contains only the 'full_name' column. - # 'REQUIRED' fields cannot be added to an existing schema, so the - # additional column must be 'NULLABLE'. - job_config.schema = [ - bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"), - bigquery.SchemaField("age", "INTEGER", mode="NULLABLE"), - ] - job_config.source_format = bigquery.SourceFormat.CSV - job_config.skip_leading_rows = 1 - - with open(filepath, "rb") as source_file: - job = client.load_table_from_file( - source_file, - table_ref, - location="US", # Must match the destination dataset location. - job_config=job_config, - ) # API request - - job.result() # Waits for table load to complete. - print( - "Loaded {} rows into {}:{}.".format( - job.output_rows, dataset_id, table_ref.table_id - ) - ) - - # Checks the updated length of the schema - table = client.get_table(table) - print("Table {} now contains {} columns.".format(table_id, len(table.schema))) - # [END bigquery_add_column_load_append] - assert len(table.schema) == 2 - assert table.num_rows > 0 - - -def test_load_table_relax_column(client, to_delete): - dataset_id = "load_table_relax_column_{}".format(_millis()) - dataset_ref = client.dataset(dataset_id) - dataset = bigquery.Dataset(dataset_ref) - dataset.location = "US" - dataset = client.create_dataset(dataset) - to_delete.append(dataset) - - snippets_dir = os.path.abspath(os.path.dirname(__file__)) - filepath = os.path.join( - snippets_dir, "..", "..", "bigquery", "tests", "data", "people.csv" - ) - table_ref = dataset_ref.table("my_table") - old_schema = [ - bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"), - bigquery.SchemaField("age", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("favorite_color", "STRING", mode="REQUIRED"), - ] - table = client.create_table(bigquery.Table(table_ref, schema=old_schema)) - - # [START bigquery_relax_column_load_append] - # from google.cloud import bigquery - # client = bigquery.Client() - # dataset_ref = client.dataset('my_dataset') - # filepath = 'path/to/your_file.csv' - - # Retrieves the destination table and checks the number of required fields - table_id = "my_table" - table_ref = dataset_ref.table(table_id) - table = client.get_table(table_ref) - original_required_fields = sum(field.mode == "REQUIRED" for field in table.schema) - # In this example, the existing table has 3 required fields. - print("{} fields in the schema are required.".format(original_required_fields)) - - # Configures the load job to append the data to a destination table, - # allowing field relaxation - job_config = bigquery.LoadJobConfig() - job_config.write_disposition = bigquery.WriteDisposition.WRITE_APPEND - job_config.schema_update_options = [ - bigquery.SchemaUpdateOption.ALLOW_FIELD_RELAXATION - ] - # In this example, the existing table contains three required fields - # ('full_name', 'age', and 'favorite_color'), while the data to load - # contains only the first two fields. - job_config.schema = [ - bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"), - bigquery.SchemaField("age", "INTEGER", mode="REQUIRED"), - ] - job_config.source_format = bigquery.SourceFormat.CSV - job_config.skip_leading_rows = 1 - - with open(filepath, "rb") as source_file: - job = client.load_table_from_file( - source_file, - table_ref, - location="US", # Must match the destination dataset location. - job_config=job_config, - ) # API request - - job.result() # Waits for table load to complete. - print( - "Loaded {} rows into {}:{}.".format( - job.output_rows, dataset_id, table_ref.table_id - ) - ) - - # Checks the updated number of required fields - table = client.get_table(table) - current_required_fields = sum(field.mode == "REQUIRED" for field in table.schema) - print("{} fields in the schema are now required.".format(current_required_fields)) - # [END bigquery_relax_column_load_append] - assert original_required_fields - current_required_fields == 1 - assert len(table.schema) == 3 - assert table.schema[2].mode == "NULLABLE" - assert table.num_rows > 0 - - -def test_extract_table(client, to_delete): - bucket_name = "extract_shakespeare_{}".format(_millis()) - storage_client = storage.Client() - bucket = retry_storage_errors(storage_client.create_bucket)(bucket_name) - to_delete.append(bucket) - - # [START bigquery_extract_table] - # from google.cloud import bigquery - # client = bigquery.Client() - # bucket_name = 'my-bucket' - project = "bigquery-public-data" - dataset_id = "samples" - table_id = "shakespeare" - - destination_uri = "gs://{}/{}".format(bucket_name, "shakespeare.csv") - dataset_ref = client.dataset(dataset_id, project=project) - table_ref = dataset_ref.table(table_id) - - extract_job = client.extract_table( - table_ref, - destination_uri, - # Location must match that of the source table. - location="US", - ) # API request - extract_job.result() # Waits for job to complete. - - print( - "Exported {}:{}.{} to {}".format(project, dataset_id, table_id, destination_uri) - ) - # [END bigquery_extract_table] - - blob = retry_storage_errors(bucket.get_blob)("shakespeare.csv") - assert blob.exists - assert blob.size > 0 - to_delete.insert(0, blob) - - -def test_extract_table_json(client, to_delete): - bucket_name = "extract_shakespeare_json_{}".format(_millis()) - storage_client = storage.Client() - bucket = retry_storage_errors(storage_client.create_bucket)(bucket_name) - to_delete.append(bucket) - - # [START bigquery_extract_table_json] - # from google.cloud import bigquery - # client = bigquery.Client() - # bucket_name = 'my-bucket' - - destination_uri = "gs://{}/{}".format(bucket_name, "shakespeare.json") - dataset_ref = client.dataset("samples", project="bigquery-public-data") - table_ref = dataset_ref.table("shakespeare") - job_config = bigquery.job.ExtractJobConfig() - job_config.destination_format = bigquery.DestinationFormat.NEWLINE_DELIMITED_JSON - - extract_job = client.extract_table( - table_ref, - destination_uri, - job_config=job_config, - # Location must match that of the source table. - location="US", - ) # API request - extract_job.result() # Waits for job to complete. - # [END bigquery_extract_table_json] - - blob = retry_storage_errors(bucket.get_blob)("shakespeare.json") - assert blob.exists - assert blob.size > 0 - to_delete.insert(0, blob) - - -def test_extract_table_compressed(client, to_delete): - bucket_name = "extract_shakespeare_compress_{}".format(_millis()) - storage_client = storage.Client() - bucket = retry_storage_errors(storage_client.create_bucket)(bucket_name) - to_delete.append(bucket) - - # [START bigquery_extract_table_compressed] - # from google.cloud import bigquery - # client = bigquery.Client() - # bucket_name = 'my-bucket' - - destination_uri = "gs://{}/{}".format(bucket_name, "shakespeare.csv.gz") - dataset_ref = client.dataset("samples", project="bigquery-public-data") - table_ref = dataset_ref.table("shakespeare") - job_config = bigquery.job.ExtractJobConfig() - job_config.compression = bigquery.Compression.GZIP - - extract_job = client.extract_table( - table_ref, - destination_uri, - # Location must match that of the source table. - location="US", - job_config=job_config, - ) # API request - extract_job.result() # Waits for job to complete. - # [END bigquery_extract_table_compressed] - - blob = retry_storage_errors(bucket.get_blob)("shakespeare.csv.gz") - assert blob.exists - assert blob.size > 0 - to_delete.insert(0, blob) - - -def test_client_query_total_rows(client, capsys): - """Run a query and just check for how many rows.""" - # [START bigquery_query_total_rows] - # from google.cloud import bigquery - # client = bigquery.Client() - - query = ( - "SELECT name FROM `bigquery-public-data.usa_names.usa_1910_2013` " - 'WHERE state = "TX" ' - "LIMIT 100" - ) - query_job = client.query( - query, - # Location must match that of the dataset(s) referenced in the query. - location="US", - ) # API request - starts the query - - results = query_job.result() # Wait for query to complete. - print("Got {} rows.".format(results.total_rows)) - # [END bigquery_query_total_rows] - - out, _ = capsys.readouterr() - assert "Got 100 rows." in out - - -def test_manage_job(client): - sql = """ - SELECT corpus - FROM `bigquery-public-data.samples.shakespeare` - GROUP BY corpus; - """ - location = "us" - job = client.query(sql, location=location) - job_id = job.job_id - - # [START bigquery_cancel_job] - # TODO(developer): Uncomment the lines below and replace with your values. - # from google.cloud import bigquery - # client = bigquery.Client() - # job_id = 'bq-job-123x456-123y123z123c' # replace with your job ID - # location = 'us' # replace with your location - - job = client.cancel_job(job_id, location=location) - # [END bigquery_cancel_job] - - # [START bigquery_get_job] - # TODO(developer): Uncomment the lines below and replace with your values. - # from google.cloud import bigquery - # client = bigquery.Client() - # job_id = 'bq-job-123x456-123y123z123c' # replace with your job ID - # location = 'us' # replace with your location - - job = client.get_job(job_id, location=location) # API request - - # Print selected job properties - print("Details for job {} running in {}:".format(job_id, location)) - print( - "\tType: {}\n\tState: {}\n\tCreated: {}".format( - job.job_type, job.state, job.created - ) - ) - # [END bigquery_get_job] - - -def test_query_external_gcs_permanent_table(client, to_delete): - dataset_id = "query_external_gcs_{}".format(_millis()) - dataset = bigquery.Dataset(client.dataset(dataset_id)) - client.create_dataset(dataset) - to_delete.append(dataset) - - # [START bigquery_query_external_gcs_perm] - # from google.cloud import bigquery - # client = bigquery.Client() - # dataset_id = 'my_dataset' - - # Configure the external data source - dataset_ref = client.dataset(dataset_id) - table_id = "us_states" - schema = [ - bigquery.SchemaField("name", "STRING"), - bigquery.SchemaField("post_abbr", "STRING"), - ] - table = bigquery.Table(dataset_ref.table(table_id), schema=schema) - external_config = bigquery.ExternalConfig("CSV") - external_config.source_uris = [ - "gs://cloud-samples-data/bigquery/us-states/us-states.csv" - ] - external_config.options.skip_leading_rows = 1 # optionally skip header row - table.external_data_configuration = external_config - - # Create a permanent table linked to the GCS file - table = client.create_table(table) # API request - - # Example query to find states starting with 'W' - sql = 'SELECT * FROM `{}.{}` WHERE name LIKE "W%"'.format(dataset_id, table_id) - - query_job = client.query(sql) # API request - - w_states = list(query_job) # Waits for query to finish - print("There are {} states with names starting with W.".format(len(w_states))) - # [END bigquery_query_external_gcs_perm] - assert len(w_states) == 4 - - -def test_ddl_create_view(client, to_delete, capsys): - """Create a view via a DDL query.""" - project = client.project - dataset_id = "ddl_view_{}".format(_millis()) - table_id = "new_view" - dataset = bigquery.Dataset(client.dataset(dataset_id)) - client.create_dataset(dataset) - to_delete.append(dataset) - - # [START bigquery_ddl_create_view] - # from google.cloud import bigquery - # project = 'my-project' - # dataset_id = 'my_dataset' - # table_id = 'new_view' - # client = bigquery.Client(project=project) - - sql = """ - CREATE VIEW `{}.{}.{}` - OPTIONS( - expiration_timestamp=TIMESTAMP_ADD( - CURRENT_TIMESTAMP(), INTERVAL 48 HOUR), - friendly_name="new_view", - description="a view that expires in 2 days", - labels=[("org_unit", "development")] - ) - AS SELECT name, state, year, number - FROM `bigquery-public-data.usa_names.usa_1910_current` - WHERE state LIKE 'W%' - """.format( - project, dataset_id, table_id - ) - - job = client.query(sql) # API request. - job.result() # Waits for the query to finish. - - print( - 'Created new view "{}.{}.{}".'.format( - job.destination.project, - job.destination.dataset_id, - job.destination.table_id, - ) - ) - # [END bigquery_ddl_create_view] - - out, _ = capsys.readouterr() - assert 'Created new view "{}.{}.{}".'.format(project, dataset_id, table_id) in out - - # Test that listing query result rows succeeds so that generic query - # processing tools work with DDL statements. - rows = list(job) - assert len(rows) == 0 - - if pandas is not None: - df = job.to_dataframe() - assert len(df) == 0 - - -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -def test_query_results_as_dataframe(client): - # [START bigquery_query_results_dataframe] - # from google.cloud import bigquery - # client = bigquery.Client() - - sql = """ - SELECT name, SUM(number) as count - FROM `bigquery-public-data.usa_names.usa_1910_current` - GROUP BY name - ORDER BY count DESC - LIMIT 10 - """ - - df = client.query(sql).to_dataframe() - # [END bigquery_query_results_dataframe] - assert isinstance(df, pandas.DataFrame) - assert len(list(df)) == 2 # verify the number of columns - assert len(df) == 10 # verify the number of rows - - -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -def test_list_rows_as_dataframe(client): - # [START bigquery_list_rows_dataframe] - # from google.cloud import bigquery - # client = bigquery.Client() - - dataset_ref = client.dataset("samples", project="bigquery-public-data") - table_ref = dataset_ref.table("shakespeare") - table = client.get_table(table_ref) - - df = client.list_rows(table).to_dataframe() - # [END bigquery_list_rows_dataframe] - assert isinstance(df, pandas.DataFrame) - assert len(list(df)) == len(table.schema) # verify the number of columns - assert len(df) == table.num_rows # verify the number of rows - - -if __name__ == "__main__": - pytest.main() diff --git a/bigquery/docs/usage.html b/bigquery/docs/usage.html deleted file mode 100644 index 78dc14b9ca03..000000000000 --- a/bigquery/docs/usage.html +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - diff --git a/bigquery/docs/usage/client.rst b/bigquery/docs/usage/client.rst deleted file mode 100644 index d631585ea2fe..000000000000 --- a/bigquery/docs/usage/client.rst +++ /dev/null @@ -1,25 +0,0 @@ -Creating a Client -~~~~~~~~~~~~~~~~~ - -A project is the top-level container in the ``BigQuery`` API: it is tied -closely to billing, and can provide default access control across all its -datasets. If no ``project`` is passed to the client container, the library -attempts to infer a project using the environment (including explicit -environment variables, GAE, and GCE). - -To override the project inferred from the environment, pass an explicit -``project`` to the :class:`~google.cloud.bigquery.client.Client` constructor, -or to either of the alternative ``classmethod`` factories: - -.. code-block:: python - - from google.cloud import bigquery - client = bigquery.Client(project='PROJECT_ID') - - -Project ACLs -^^^^^^^^^^^^ - -Each project has an access control list granting reader / writer / owner -permission to one or more entities. This list cannot be queried or set -via the API; it must be managed using the Google Developer Console. diff --git a/bigquery/docs/usage/datasets.rst b/bigquery/docs/usage/datasets.rst deleted file mode 100644 index 2daee77f36d2..000000000000 --- a/bigquery/docs/usage/datasets.rst +++ /dev/null @@ -1,131 +0,0 @@ -Managing Datasets -~~~~~~~~~~~~~~~~~ - -A dataset represents a collection of tables, and applies several default -policies to tables as they are created: - -- An access control list (ACL). When created, a dataset has an ACL - which maps to the ACL inherited from its project. - -- A default table expiration period. If set, tables created within the - dataset will have the value as their expiration period. - -See BigQuery documentation for more information on -`Datasets `_. - -Listing Datasets -^^^^^^^^^^^^^^^^ - -List datasets for a project with the -:func:`~google.cloud.bigquery.client.Client.list_datasets` method: - -.. literalinclude:: ../samples/list_datasets.py - :language: python - :dedent: 4 - :start-after: [START bigquery_list_datasets] - :end-before: [END bigquery_list_datasets] - -List datasets by label for a project with the -:func:`~google.cloud.bigquery.client.Client.list_datasets` method: - -.. literalinclude:: ../samples/list_datasets_by_label.py - :language: python - :dedent: 4 - :start-after: [START bigquery_list_datasets_by_label] - :end-before: [END bigquery_list_datasets_by_label] - -Getting a Dataset -^^^^^^^^^^^^^^^^^ - -Get a dataset resource (to pick up changes made by another client) with the -:func:`~google.cloud.bigquery.client.Client.get_dataset` method: - -.. literalinclude:: ../samples/get_dataset.py - :language: python - :dedent: 4 - :start-after: [START bigquery_get_dataset] - :end-before: [END bigquery_get_dataset] - -Determine if a dataset exists with the -:func:`~google.cloud.bigquery.client.Client.get_dataset` method: - -.. literalinclude:: ../samples/dataset_exists.py - :language: python - :dedent: 4 - :start-after: [START bigquery_dataset_exists] - :end-before: [END bigquery_dataset_exists] - -Creating a Dataset -^^^^^^^^^^^^^^^^^^ - -Create a new dataset with the -:func:`~google.cloud.bigquery.client.Client.create_dataset` method: - -.. literalinclude:: ../samples/create_dataset.py - :language: python - :dedent: 4 - :start-after: [START bigquery_create_dataset] - :end-before: [END bigquery_create_dataset] - -Updating a Dataset -^^^^^^^^^^^^^^^^^^ - -Update a property in a dataset's metadata with the -:func:`~google.cloud.bigquery.client.Client.update_dataset` method: - -.. literalinclude:: ../samples/update_dataset_description.py - :language: python - :dedent: 4 - :start-after: [START bigquery_update_dataset_description] - :end-before: [END bigquery_update_dataset_description] - -Modify user permissions on a dataset with the -:func:`~google.cloud.bigquery.client.Client.update_dataset` method: - -.. literalinclude:: ../samples/update_dataset_access.py - :language: python - :dedent: 4 - :start-after: [START bigquery_update_dataset_access] - :end-before: [END bigquery_update_dataset_access] - -Manage Dataset labels -^^^^^^^^^^^^^^^^^^^^^ - -Add labels to a dataset with the -:func:`~google.cloud.bigquery.client.Client.update_dataset` method: - -.. literalinclude:: ../samples/label_dataset.py - :language: python - :dedent: 4 - :start-after: [START bigquery_label_dataset] - :end-before: [END bigquery_label_dataset] - -Get dataset's labels with the -:func:`~google.cloud.bigquery.client.Client.get_dataset` method: - -.. literalinclude:: ../samples/get_dataset_labels.py - :language: python - :dedent: 4 - :start-after: [START bigquery_get_dataset_labels] - :end-before: [END bigquery_get_dataset_labels] - -Delete dataset's labels with the -:func:`~google.cloud.bigquery.client.Client.update_dataset` method: - -.. literalinclude:: ../samples/delete_dataset_labels.py - :language: python - :dedent: 4 - :start-after: [START bigquery_delete_label_dataset] - :end-before: [END bigquery_delete_label_dataset] - -Deleting a Dataset -^^^^^^^^^^^^^^^^^^ - -Delete a dataset with the -:func:`~google.cloud.bigquery.client.Client.delete_dataset` method: - -.. literalinclude:: ../samples/delete_dataset.py - :language: python - :dedent: 4 - :start-after: [START bigquery_delete_dataset] - :end-before: [END bigquery_delete_dataset] diff --git a/bigquery/docs/usage/encryption.rst b/bigquery/docs/usage/encryption.rst deleted file mode 100644 index 6652f05658c6..000000000000 --- a/bigquery/docs/usage/encryption.rst +++ /dev/null @@ -1,52 +0,0 @@ -Using Customer Managed Encryption Keys -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Table data is always encrypted at rest, but BigQuery also provides a way for -you to control what keys it uses to encrypt they data. See `Protecting data -with Cloud KMS keys -`_ -in the BigQuery documentation for more details. - -Create a new table, using a customer-managed encryption key from -Cloud KMS to encrypt it. - -.. literalinclude:: ../snippets.py - :language: python - :dedent: 4 - :start-after: [START bigquery_create_table_cmek] - :end-before: [END bigquery_create_table_cmek] - -Change the key used to encrypt a table. - -.. literalinclude:: ../snippets.py - :language: python - :dedent: 4 - :start-after: [START bigquery_update_table_cmek] - :end-before: [END bigquery_update_table_cmek] - -Load a file from Cloud Storage, using a customer-managed encryption key from -Cloud KMS for the destination table. - -.. literalinclude:: ../samples/load_table_uri_cmek.py - :language: python - :dedent: 4 - :start-after: [START bigquery_load_table_gcs_json_cmek] - :end-before: [END bigquery_load_table_gcs_json_cmek] - -Copy a table, using a customer-managed encryption key from Cloud KMS for the -destination table. - -.. literalinclude:: ../samples/copy_table_cmek.py - :language: python - :dedent: 4 - :start-after: [START bigquery_copy_table_cmek] - :end-before: [END bigquery_copy_table_cmek] - -Write query results to a table, using a customer-managed encryption key from -Cloud KMS for the destination table. - -.. literalinclude:: ../samples/client_query_destination_table_cmek.py - :language: python - :dedent: 4 - :start-after: [START bigquery_query_destination_table_cmek] - :end-before: [END bigquery_query_destination_table_cmek] diff --git a/bigquery/docs/usage/index.rst b/bigquery/docs/usage/index.rst deleted file mode 100644 index ff4c9d7f1a8f..000000000000 --- a/bigquery/docs/usage/index.rst +++ /dev/null @@ -1,35 +0,0 @@ -Usage Guides -~~~~~~~~~~~~ - -BigQuery Basics -^^^^^^^^^^^^^^^ - -.. toctree:: - :maxdepth: 1 - - client - queries - -Working with BigQuery Resources -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. toctree:: - :maxdepth: 1 - - datasets - tables - encryption - jobs - -Integrations with Other Libraries -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. toctree:: - :maxdepth: 1 - - pandas - -See also, the :mod:`google.cloud.bigquery.magics` module for integrations -with Jupyter. - - diff --git a/bigquery/docs/usage/jobs.rst b/bigquery/docs/usage/jobs.rst deleted file mode 100644 index c3dd71031bfc..000000000000 --- a/bigquery/docs/usage/jobs.rst +++ /dev/null @@ -1,21 +0,0 @@ -Managing Jobs -~~~~~~~~~~~~~ - -Jobs describe actions performed on data in BigQuery tables: - -- Load data into a table -- Run a query against data in one or more tables -- Extract data from a table -- Copy a table - -Listing jobs -^^^^^^^^^^^^ - -List jobs for a project with the -:func:`~google.cloud.bigquery.client.Client.list_jobs` method: - -.. literalinclude:: ../samples/client_list_jobs.py - :language: python - :dedent: 4 - :start-after: [START bigquery_list_jobs] - :end-before: [END bigquery_list_jobs] diff --git a/bigquery/docs/usage/pandas.rst b/bigquery/docs/usage/pandas.rst deleted file mode 100644 index 9db98dfbbccb..000000000000 --- a/bigquery/docs/usage/pandas.rst +++ /dev/null @@ -1,62 +0,0 @@ -Using BigQuery with Pandas -~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Retrieve BigQuery data as a Pandas DataFrame -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -As of version 0.29.0, you can use the -:func:`~google.cloud.bigquery.table.RowIterator.to_dataframe` function to -retrieve query results or table rows as a :class:`pandas.DataFrame`. - -First, ensure that the :mod:`pandas` library is installed by running: - -.. code-block:: bash - - pip install --upgrade pandas - -Alternatively, you can install the BigQuery python client library with -:mod:`pandas` by running: - -.. code-block:: bash - - pip install --upgrade google-cloud-bigquery[pandas] - -To retrieve query results as a :class:`pandas.DataFrame`: - -.. literalinclude:: ../snippets.py - :language: python - :dedent: 4 - :start-after: [START bigquery_query_results_dataframe] - :end-before: [END bigquery_query_results_dataframe] - -To retrieve table rows as a :class:`pandas.DataFrame`: - -.. literalinclude:: ../snippets.py - :language: python - :dedent: 4 - :start-after: [START bigquery_list_rows_dataframe] - :end-before: [END bigquery_list_rows_dataframe] - -Load a Pandas DataFrame to a BigQuery Table -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -As of version 1.3.0, you can use the -:func:`~google.cloud.bigquery.client.Client.load_table_from_dataframe` function -to load data from a :class:`pandas.DataFrame` to a -:class:`~google.cloud.bigquery.table.Table`. To use this function, in addition -to :mod:`pandas`, you will need to install the :mod:`pyarrow` library. You can -install the BigQuery python client library with :mod:`pandas` and -:mod:`pyarrow` by running: - -.. code-block:: bash - - pip install --upgrade google-cloud-bigquery[pandas,pyarrow] - -The following example demonstrates how to create a :class:`pandas.DataFrame` -and load it into a new table: - -.. literalinclude:: ../samples/load_table_dataframe.py - :language: python - :dedent: 4 - :start-after: [START bigquery_load_table_dataframe] - :end-before: [END bigquery_load_table_dataframe] diff --git a/bigquery/docs/usage/queries.rst b/bigquery/docs/usage/queries.rst deleted file mode 100644 index fc57e54de9df..000000000000 --- a/bigquery/docs/usage/queries.rst +++ /dev/null @@ -1,63 +0,0 @@ -Running Queries -~~~~~~~~~~~~~~~ - -Querying data -^^^^^^^^^^^^^ - -Run a query and wait for it to finish with the -:func:`~google.cloud.bigquery.client.Client.query` method: - -.. literalinclude:: ../samples/client_query.py - :language: python - :dedent: 4 - :start-after: [START bigquery_query] - :end-before: [END bigquery_query] - - -Run a dry run query -^^^^^^^^^^^^^^^^^^^ - -.. literalinclude:: ../samples/client_query_dry_run.py - :language: python - :dedent: 4 - :start-after: [START bigquery_query_dry_run] - :end-before: [END bigquery_query_dry_run] - - -Writing query results to a destination table -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -See BigQuery documentation for more information on -`writing query results `_. - -.. literalinclude:: ../samples/client_query_destination_table.py - :language: python - :dedent: 4 - :start-after: [START bigquery_query_destination_table] - :end-before: [END bigquery_query_destination_table] - - -Run a query using a named query parameter -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -See BigQuery documentation for more information on -`parameterized queries `_. - -.. literalinclude:: ../samples/client_query_w_named_params.py - :language: python - :dedent: 4 - :start-after: [START bigquery_query_params_named] - :end-before: [END bigquery_query_params_named] - -Run a script -^^^^^^^^^^^^ - -See BigQuery documentation for more information on `scripting in BigQuery -standard SQL -`_. - -.. literalinclude:: ../samples/query_script.py - :language: python - :dedent: 4 - :start-after: [START bigquery_query_script] - :end-before: [END bigquery_query_script] diff --git a/bigquery/docs/usage/tables.rst b/bigquery/docs/usage/tables.rst deleted file mode 100644 index 45145cd19004..000000000000 --- a/bigquery/docs/usage/tables.rst +++ /dev/null @@ -1,222 +0,0 @@ -Managing Tables -~~~~~~~~~~~~~~~ - -Tables exist within datasets. See BigQuery documentation for more information -on `Tables `_. - -Listing Tables -^^^^^^^^^^^^^^ - -List the tables belonging to a dataset with the -:func:`~google.cloud.bigquery.client.Client.list_tables` method: - -.. literalinclude:: ../samples/list_tables.py - :language: python - :dedent: 4 - :start-after: [START bigquery_list_tables] - :end-before: [END bigquery_list_tables] - -Getting a Table -^^^^^^^^^^^^^^^ - -Get a table resource with the -:func:`~google.cloud.bigquery.client.Client.get_table` method: - -.. literalinclude:: ../samples/get_table.py - :language: python - :dedent: 4 - :start-after: [START bigquery_get_table] - :end-before: [END bigquery_get_table] - -Determine if a table exists with the -:func:`~google.cloud.bigquery.client.Client.get_table` method: - -.. literalinclude:: ../samples/table_exists.py - :language: python - :dedent: 4 - :start-after: [START bigquery_table_exists] - :end-before: [END bigquery_table_exists] - -Browse data rows in a table with the -:func:`~google.cloud.bigquery.client.Client.list_rows` method: - -.. literalinclude:: ../samples/browse_table_data.py - :language: python - :dedent: 4 - :start-after: [START bigquery_browse_table] - :end-before: [END bigquery_browse_table] - -Creating a Table -^^^^^^^^^^^^^^^^ - -Create an empty table with the -:func:`~google.cloud.bigquery.client.Client.create_table` method: - -.. literalinclude:: ../samples/create_table.py - :language: python - :dedent: 4 - :start-after: [START bigquery_create_table] - :end-before: [END bigquery_create_table] - -Create an integer range partitioned table with the -:func:`~google.cloud.bigquery.client.Client.create_table` method: - -.. literalinclude:: ../samples/create_table_range_partitioned.py - :language: python - :dedent: 4 - :start-after: [START bigquery_create_table_range_partitioned] - :end-before: [END bigquery_create_table_range_partitioned] - -Load table data from a file with the -:func:`~google.cloud.bigquery.client.Client.load_table_from_file` method: - -.. literalinclude:: ../samples/load_table_file.py - :language: python - :dedent: 4 - :start-after: [START bigquery_load_from_file] - :end-before: [END bigquery_load_from_file] - -Load a CSV file from Cloud Storage with the -:func:`~google.cloud.bigquery.client.Client.load_table_from_uri` method: - -.. literalinclude:: ../samples/load_table_uri_csv.py - :language: python - :dedent: 4 - :start-after: [START bigquery_load_table_gcs_csv] - :end-before: [END bigquery_load_table_gcs_csv] - -See also: `Loading CSV data from Cloud Storage -`_. - -Load a JSON file from Cloud Storage: - -.. literalinclude:: ../samples/load_table_uri_json.py - :language: python - :dedent: 4 - :start-after: [START bigquery_load_table_gcs_json] - :end-before: [END bigquery_load_table_gcs_json] - -See also: `Loading JSON data from Cloud Storage -`_. - -Load a Parquet file from Cloud Storage: - -.. literalinclude:: ../samples/load_table_uri_parquet.py - :language: python - :dedent: 4 - :start-after: [START bigquery_load_table_gcs_parquet] - :end-before: [END bigquery_load_table_gcs_parquet] - -See also: `Loading Parquet data from Cloud Storage -`_. - -Load an Avro file from Cloud Storage: - -.. literalinclude:: ../samples/load_table_uri_avro.py - :language: python - :dedent: 4 - :start-after: [START bigquery_load_table_gcs_avro] - :end-before: [END bigquery_load_table_gcs_avro] - -See also: `Loading Avro data from Cloud Storage -`_. - -Load an ORC file from Cloud Storage: - -.. literalinclude:: ../samples/load_table_uri_orc.py - :language: python - :dedent: 4 - :start-after: [START bigquery_load_table_gcs_orc] - :end-before: [END bigquery_load_table_gcs_orc] - -See also: `Loading ORC data from Cloud Storage -`_. - -Updating a Table -^^^^^^^^^^^^^^^^ - -Update a property in a table's metadata with the -:func:`~google.cloud.bigquery.client.Client.update_table` method: - -.. literalinclude:: ../snippets.py - :language: python - :dedent: 4 - :start-after: [START bigquery_update_table_description] - :end-before: [END bigquery_update_table_description] - -Insert rows into a table's data with the -:func:`~google.cloud.bigquery.client.Client.insert_rows` method: - -.. literalinclude:: ../samples/table_insert_rows.py - :language: python - :dedent: 4 - :start-after: [START bigquery_table_insert_rows] - :end-before: [END bigquery_table_insert_rows] - -Insert rows into a table's data with the -:func:`~google.cloud.bigquery.client.Client.insert_rows` method, achieving -higher write limit: - -.. literalinclude:: ../samples/table_insert_rows_explicit_none_insert_ids.py - :language: python - :dedent: 4 - :start-after: [START bigquery_table_insert_rows_explicit_none_insert_ids] - :end-before: [END bigquery_table_insert_rows_explicit_none_insert_ids] - -Mind that inserting data with ``None`` row insert IDs can come at the expense of -more duplicate inserts. See also: -`Streaming inserts `_. - -Add an empty column to the existing table with the -:func:`~google.cloud.bigquery.update_table` method: - -.. literalinclude:: ../samples/add_empty_column.py - :language: python - :dedent: 4 - :start-after: [START bigquery_add_empty_column] - :end-before: [END bigquery_add_empty_column] - -Copying a Table -^^^^^^^^^^^^^^^ - -Copy a table with the -:func:`~google.cloud.bigquery.client.Client.copy_table` method: - -.. literalinclude:: ../samples/copy_table.py - :language: python - :dedent: 4 - :start-after: [START bigquery_copy_table] - :end-before: [END bigquery_copy_table] - -Copy table data to Google Cloud Storage with the -:func:`~google.cloud.bigquery.client.Client.extract_table` method: - -.. literalinclude:: ../snippets.py - :language: python - :dedent: 4 - :start-after: [START bigquery_extract_table] - :end-before: [END bigquery_extract_table] - -Deleting a Table -^^^^^^^^^^^^^^^^ - -Delete a table with the -:func:`~google.cloud.bigquery.client.Client.delete_table` method: - -.. literalinclude:: ../samples/delete_table.py - :language: python - :dedent: 4 - :start-after: [START bigquery_delete_table] - :end-before: [END bigquery_delete_table] - -Restoring a Deleted Table -^^^^^^^^^^^^^^^^^^^^^^^^^ - -Restore a deleted table from a snapshot by using the -:func:`~google.cloud.bigquery.client.Client.copy_table` method: - -.. literalinclude:: ../samples/undelete_table.py - :language: python - :dedent: 4 - :start-after: [START bigquery_undelete_table] - :end-before: [END bigquery_undelete_table] diff --git a/bigquery/google/__init__.py b/bigquery/google/__init__.py deleted file mode 100644 index 8fcc60e2b9c6..000000000000 --- a/bigquery/google/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - - __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/bigquery/google/cloud/__init__.py b/bigquery/google/cloud/__init__.py deleted file mode 100644 index 8fcc60e2b9c6..000000000000 --- a/bigquery/google/cloud/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - - __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/bigquery/google/cloud/bigquery/__init__.py b/bigquery/google/cloud/bigquery/__init__.py deleted file mode 100644 index 3982c1175850..000000000000 --- a/bigquery/google/cloud/bigquery/__init__.py +++ /dev/null @@ -1,154 +0,0 @@ -# Copyright 2015 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Google BigQuery API wrapper. - -The main concepts with this API are: - -- :class:`~google.cloud.bigquery.client.Client` manages connections to the - BigQuery API. Use the client methods to run jobs (such as a - :class:`~google.cloud.bigquery.job.QueryJob` via - :meth:`~google.cloud.bigquery.client.Client.query`) and manage resources. - -- :class:`~google.cloud.bigquery.dataset.Dataset` represents a - collection of tables. - -- :class:`~google.cloud.bigquery.table.Table` represents a single "relation". -""" - - -from pkg_resources import get_distribution - -__version__ = get_distribution("google-cloud-bigquery").version - -from google.cloud.bigquery.client import Client -from google.cloud.bigquery.dataset import AccessEntry -from google.cloud.bigquery.dataset import Dataset -from google.cloud.bigquery.dataset import DatasetReference -from google.cloud.bigquery import enums -from google.cloud.bigquery.enums import StandardSqlDataTypes -from google.cloud.bigquery.external_config import ExternalConfig -from google.cloud.bigquery.external_config import BigtableOptions -from google.cloud.bigquery.external_config import BigtableColumnFamily -from google.cloud.bigquery.external_config import BigtableColumn -from google.cloud.bigquery.external_config import CSVOptions -from google.cloud.bigquery.external_config import GoogleSheetsOptions -from google.cloud.bigquery.external_config import ExternalSourceFormat -from google.cloud.bigquery.job import Compression -from google.cloud.bigquery.job import CopyJob -from google.cloud.bigquery.job import CopyJobConfig -from google.cloud.bigquery.job import CreateDisposition -from google.cloud.bigquery.job import DestinationFormat -from google.cloud.bigquery.job import Encoding -from google.cloud.bigquery.job import ExtractJob -from google.cloud.bigquery.job import ExtractJobConfig -from google.cloud.bigquery.job import LoadJob -from google.cloud.bigquery.job import LoadJobConfig -from google.cloud.bigquery.job import QueryJob -from google.cloud.bigquery.job import QueryJobConfig -from google.cloud.bigquery.job import QueryPriority -from google.cloud.bigquery.job import SchemaUpdateOption -from google.cloud.bigquery.job import SourceFormat -from google.cloud.bigquery.job import UnknownJob -from google.cloud.bigquery.job import WriteDisposition -from google.cloud.bigquery.model import Model -from google.cloud.bigquery.model import ModelReference -from google.cloud.bigquery.query import ArrayQueryParameter -from google.cloud.bigquery.query import ScalarQueryParameter -from google.cloud.bigquery.query import StructQueryParameter -from google.cloud.bigquery.query import UDFResource -from google.cloud.bigquery.retry import DEFAULT_RETRY -from google.cloud.bigquery.routine import Routine -from google.cloud.bigquery.routine import RoutineArgument -from google.cloud.bigquery.routine import RoutineReference -from google.cloud.bigquery.schema import SchemaField -from google.cloud.bigquery.table import PartitionRange -from google.cloud.bigquery.table import RangePartitioning -from google.cloud.bigquery.table import Row -from google.cloud.bigquery.table import Table -from google.cloud.bigquery.table import TableReference -from google.cloud.bigquery.table import TimePartitioningType -from google.cloud.bigquery.table import TimePartitioning -from google.cloud.bigquery.encryption_configuration import EncryptionConfiguration - -__all__ = [ - "__version__", - "Client", - # Queries - "QueryJob", - "QueryJobConfig", - "ArrayQueryParameter", - "ScalarQueryParameter", - "StructQueryParameter", - # Datasets - "Dataset", - "DatasetReference", - "AccessEntry", - # Tables - "Table", - "TableReference", - "PartitionRange", - "RangePartitioning", - "Row", - "TimePartitioning", - "TimePartitioningType", - # Jobs - "CopyJob", - "CopyJobConfig", - "ExtractJob", - "ExtractJobConfig", - "LoadJob", - "LoadJobConfig", - "UnknownJob", - # Models - "Model", - "ModelReference", - # Routines - "Routine", - "RoutineArgument", - "RoutineReference", - # Shared helpers - "SchemaField", - "UDFResource", - "ExternalConfig", - "BigtableOptions", - "BigtableColumnFamily", - "BigtableColumn", - "CSVOptions", - "GoogleSheetsOptions", - "DEFAULT_RETRY", - # Enum Constants - "enums", - "Compression", - "CreateDisposition", - "DestinationFormat", - "ExternalSourceFormat", - "Encoding", - "QueryPriority", - "SchemaUpdateOption", - "StandardSqlDataTypes", - "SourceFormat", - "WriteDisposition", - # EncryptionConfiguration - "EncryptionConfiguration", -] - - -def load_ipython_extension(ipython): - """Called by IPython when this module is loaded as an IPython extension.""" - from google.cloud.bigquery.magics import _cell_magic - - ipython.register_magic_function( - _cell_magic, magic_kind="cell", magic_name="bigquery" - ) diff --git a/bigquery/google/cloud/bigquery/_helpers.py b/bigquery/google/cloud/bigquery/_helpers.py deleted file mode 100644 index 21a8e3636d24..000000000000 --- a/bigquery/google/cloud/bigquery/_helpers.py +++ /dev/null @@ -1,686 +0,0 @@ -# Copyright 2015 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Shared helper functions for BigQuery API classes.""" - -import base64 -import copy -import datetime -import decimal -import re - -from google.cloud._helpers import UTC -from google.cloud._helpers import _date_from_iso8601_date -from google.cloud._helpers import _datetime_from_microseconds -from google.cloud._helpers import _microseconds_from_datetime -from google.cloud._helpers import _RFC3339_NO_FRACTION -from google.cloud._helpers import _to_bytes - -_RFC3339_MICROS_NO_ZULU = "%Y-%m-%dT%H:%M:%S.%f" -_TIMEONLY_WO_MICROS = "%H:%M:%S" -_TIMEONLY_W_MICROS = "%H:%M:%S.%f" -_PROJECT_PREFIX_PATTERN = re.compile( - r""" - (?P\S+\:[^.]+)\.(?P[^.]+)(?:$|\.(?P[^.]+)$) -""", - re.VERBOSE, -) - - -def _not_null(value, field): - """Check whether 'value' should be coerced to 'field' type.""" - return value is not None or field.mode != "NULLABLE" - - -def _int_from_json(value, field): - """Coerce 'value' to an int, if set or not nullable.""" - if _not_null(value, field): - return int(value) - - -def _float_from_json(value, field): - """Coerce 'value' to a float, if set or not nullable.""" - if _not_null(value, field): - return float(value) - - -def _decimal_from_json(value, field): - """Coerce 'value' to a Decimal, if set or not nullable.""" - if _not_null(value, field): - return decimal.Decimal(value) - - -def _bool_from_json(value, field): - """Coerce 'value' to a bool, if set or not nullable.""" - if _not_null(value, field): - return value.lower() in ["t", "true", "1"] - - -def _string_from_json(value, _): - """NOOP string -> string coercion""" - return value - - -def _bytes_from_json(value, field): - """Base64-decode value""" - if _not_null(value, field): - return base64.standard_b64decode(_to_bytes(value)) - - -def _timestamp_from_json(value, field): - """Coerce 'value' to a datetime, if set or not nullable.""" - if _not_null(value, field): - # value will be a float in seconds, to microsecond precision, in UTC. - return _datetime_from_microseconds(1e6 * float(value)) - - -def _timestamp_query_param_from_json(value, field): - """Coerce 'value' to a datetime, if set or not nullable. - - Args: - value (str): The timestamp. - - field (google.cloud.bigquery.schema.SchemaField): - The field corresponding to the value. - - Returns: - Optional[datetime.datetime]: - The parsed datetime object from - ``value`` if the ``field`` is not null (otherwise it is - :data:`None`). - """ - if _not_null(value, field): - # Canonical formats for timestamps in BigQuery are flexible. See: - # g.co/cloud/bigquery/docs/reference/standard-sql/data-types#timestamp-type - # The separator between the date and time can be 'T' or ' '. - value = value.replace(" ", "T", 1) - # The UTC timezone may be formatted as Z or +00:00. - value = value.replace("Z", "") - value = value.replace("+00:00", "") - - if "." in value: - # YYYY-MM-DDTHH:MM:SS.ffffff - return datetime.datetime.strptime(value, _RFC3339_MICROS_NO_ZULU).replace( - tzinfo=UTC - ) - else: - # YYYY-MM-DDTHH:MM:SS - return datetime.datetime.strptime(value, _RFC3339_NO_FRACTION).replace( - tzinfo=UTC - ) - else: - return None - - -def _datetime_from_json(value, field): - """Coerce 'value' to a datetime, if set or not nullable. - - Args: - value (str): The timestamp. - field (google.cloud.bigquery.schema.SchemaField): - The field corresponding to the value. - - Returns: - Optional[datetime.datetime]: - The parsed datetime object from - ``value`` if the ``field`` is not null (otherwise it is - :data:`None`). - """ - if _not_null(value, field): - if "." in value: - # YYYY-MM-DDTHH:MM:SS.ffffff - return datetime.datetime.strptime(value, _RFC3339_MICROS_NO_ZULU) - else: - # YYYY-MM-DDTHH:MM:SS - return datetime.datetime.strptime(value, _RFC3339_NO_FRACTION) - else: - return None - - -def _date_from_json(value, field): - """Coerce 'value' to a datetime date, if set or not nullable""" - if _not_null(value, field): - # value will be a string, in YYYY-MM-DD form. - return _date_from_iso8601_date(value) - - -def _time_from_json(value, field): - """Coerce 'value' to a datetime date, if set or not nullable""" - if _not_null(value, field): - if len(value) == 8: # HH:MM:SS - fmt = _TIMEONLY_WO_MICROS - elif len(value) == 15: # HH:MM:SS.micros - fmt = _TIMEONLY_W_MICROS - else: - raise ValueError("Unknown time format: {}".format(value)) - return datetime.datetime.strptime(value, fmt).time() - - -def _record_from_json(value, field): - """Coerce 'value' to a mapping, if set or not nullable.""" - if _not_null(value, field): - record = {} - record_iter = zip(field.fields, value["f"]) - for subfield, cell in record_iter: - converter = _CELLDATA_FROM_JSON[subfield.field_type] - if subfield.mode == "REPEATED": - value = [converter(item["v"], subfield) for item in cell["v"]] - else: - value = converter(cell["v"], subfield) - record[subfield.name] = value - return record - - -_CELLDATA_FROM_JSON = { - "INTEGER": _int_from_json, - "INT64": _int_from_json, - "FLOAT": _float_from_json, - "FLOAT64": _float_from_json, - "NUMERIC": _decimal_from_json, - "BOOLEAN": _bool_from_json, - "BOOL": _bool_from_json, - "STRING": _string_from_json, - "GEOGRAPHY": _string_from_json, - "BYTES": _bytes_from_json, - "TIMESTAMP": _timestamp_from_json, - "DATETIME": _datetime_from_json, - "DATE": _date_from_json, - "TIME": _time_from_json, - "RECORD": _record_from_json, -} - -_QUERY_PARAMS_FROM_JSON = dict(_CELLDATA_FROM_JSON) -_QUERY_PARAMS_FROM_JSON["TIMESTAMP"] = _timestamp_query_param_from_json - - -def _field_to_index_mapping(schema): - """Create a mapping from schema field name to index of field.""" - return {f.name: i for i, f in enumerate(schema)} - - -def _field_from_json(resource, field): - converter = _CELLDATA_FROM_JSON.get(field.field_type, lambda value, _: value) - if field.mode == "REPEATED": - return [converter(item["v"], field) for item in resource] - else: - return converter(resource, field) - - -def _row_tuple_from_json(row, schema): - """Convert JSON row data to row with appropriate types. - - Note: ``row['f']`` and ``schema`` are presumed to be of the same length. - - Args: - row (Dict): A JSON response row to be converted. - schema (Sequence[Union[ \ - :class:`~google.cloud.bigquery.schema.SchemaField`, \ - Mapping[str, Any] \ - ]]): Specification of the field types in ``row``. - - Returns: - Tuple: A tuple of data converted to native types. - """ - from google.cloud.bigquery.schema import _to_schema_fields - - schema = _to_schema_fields(schema) - - row_data = [] - for field, cell in zip(schema, row["f"]): - row_data.append(_field_from_json(cell["v"], field)) - return tuple(row_data) - - -def _rows_from_json(values, schema): - """Convert JSON row data to rows with appropriate types. - - Args: - values (Sequence[Dict]): The list of responses (JSON rows) to convert. - schema (Sequence[Union[ \ - :class:`~google.cloud.bigquery.schema.SchemaField`, \ - Mapping[str, Any] \ - ]]): - The table's schema. If any item is a mapping, its content must be - compatible with - :meth:`~google.cloud.bigquery.schema.SchemaField.from_api_repr`. - - Returns: - List[:class:`~google.cloud.bigquery.Row`] - """ - from google.cloud.bigquery import Row - from google.cloud.bigquery.schema import _to_schema_fields - - schema = _to_schema_fields(schema) - field_to_index = _field_to_index_mapping(schema) - return [Row(_row_tuple_from_json(r, schema), field_to_index) for r in values] - - -def _int_to_json(value): - """Coerce 'value' to an JSON-compatible representation.""" - if isinstance(value, int): - value = str(value) - return value - - -def _float_to_json(value): - """Coerce 'value' to an JSON-compatible representation.""" - return value - - -def _decimal_to_json(value): - """Coerce 'value' to a JSON-compatible representation.""" - if isinstance(value, decimal.Decimal): - value = str(value) - return value - - -def _bool_to_json(value): - """Coerce 'value' to an JSON-compatible representation.""" - if isinstance(value, bool): - value = "true" if value else "false" - return value - - -def _bytes_to_json(value): - """Coerce 'value' to an JSON-compatible representation.""" - if isinstance(value, bytes): - value = base64.standard_b64encode(value).decode("ascii") - return value - - -def _timestamp_to_json_parameter(value): - """Coerce 'value' to an JSON-compatible representation. - - This version returns the string representation used in query parameters. - """ - if isinstance(value, datetime.datetime): - if value.tzinfo not in (None, UTC): - # Convert to UTC and remove the time zone info. - value = value.replace(tzinfo=None) - value.utcoffset() - value = "%s %s+00:00" % (value.date().isoformat(), value.time().isoformat()) - return value - - -def _timestamp_to_json_row(value): - """Coerce 'value' to an JSON-compatible representation. - - This version returns floating-point seconds value used in row data. - """ - if isinstance(value, datetime.datetime): - value = _microseconds_from_datetime(value) * 1e-6 - return value - - -def _datetime_to_json(value): - """Coerce 'value' to an JSON-compatible representation.""" - if isinstance(value, datetime.datetime): - value = value.strftime(_RFC3339_MICROS_NO_ZULU) - return value - - -def _date_to_json(value): - """Coerce 'value' to an JSON-compatible representation.""" - if isinstance(value, datetime.date): - value = value.isoformat() - return value - - -def _time_to_json(value): - """Coerce 'value' to an JSON-compatible representation.""" - if isinstance(value, datetime.time): - value = value.isoformat() - return value - - -# Converters used for scalar values marshalled as row data. -_SCALAR_VALUE_TO_JSON_ROW = { - "INTEGER": _int_to_json, - "INT64": _int_to_json, - "FLOAT": _float_to_json, - "FLOAT64": _float_to_json, - "NUMERIC": _decimal_to_json, - "BOOLEAN": _bool_to_json, - "BOOL": _bool_to_json, - "BYTES": _bytes_to_json, - "TIMESTAMP": _timestamp_to_json_row, - "DATETIME": _datetime_to_json, - "DATE": _date_to_json, - "TIME": _time_to_json, -} - - -# Converters used for scalar values marshalled as query parameters. -_SCALAR_VALUE_TO_JSON_PARAM = _SCALAR_VALUE_TO_JSON_ROW.copy() -_SCALAR_VALUE_TO_JSON_PARAM["TIMESTAMP"] = _timestamp_to_json_parameter - - -def _scalar_field_to_json(field, row_value): - """Maps a field and value to a JSON-safe value. - - Args: - field (google.cloud.bigquery.schema.SchemaField): - The SchemaField to use for type conversion and field name. - row_value (Any): - Value to be converted, based on the field's type. - - Returns: - Any: A JSON-serializable object. - """ - converter = _SCALAR_VALUE_TO_JSON_ROW.get(field.field_type) - if converter is None: # STRING doesn't need converting - return row_value - return converter(row_value) - - -def _repeated_field_to_json(field, row_value): - """Convert a repeated/array field to its JSON representation. - - Args: - field (google.cloud.bigquery.schema.SchemaField): - The SchemaField to use for type conversion and field name. The - field mode must equal ``REPEATED``. - row_value (Sequence[Any]): - A sequence of values to convert to JSON-serializable values. - - Returns: - List[Any]: A list of JSON-serializable objects. - """ - # Remove the REPEATED, but keep the other fields. This allows us to process - # each item as if it were a top-level field. - item_field = copy.deepcopy(field) - item_field._mode = "NULLABLE" - values = [] - for item in row_value: - values.append(_field_to_json(item_field, item)) - return values - - -def _record_field_to_json(fields, row_value): - """Convert a record/struct field to its JSON representation. - - Args: - fields (Sequence[google.cloud.bigquery.schema.SchemaField]): - The :class:`~google.cloud.bigquery.schema.SchemaField`s of the - record's subfields to use for type conversion and field names. - row_value (Union[Tuple[Any], Mapping[str, Any]): - A tuple or dictionary to convert to JSON-serializable values. - - Returns: - Mapping[str, Any]: A JSON-serializable dictionary. - """ - record = {} - isdict = isinstance(row_value, dict) - - for subindex, subfield in enumerate(fields): - subname = subfield.name - subvalue = row_value.get(subname) if isdict else row_value[subindex] - - # None values are unconditionally omitted - if subvalue is not None: - record[subname] = _field_to_json(subfield, subvalue) - - return record - - -def _field_to_json(field, row_value): - """Convert a field into JSON-serializable values. - - Args: - field (google.cloud.bigquery.schema.SchemaField): - The SchemaField to use for type conversion and field name. - - row_value (Union[Sequence[List], Any]): - Row data to be inserted. If the SchemaField's mode is - REPEATED, assume this is a list. If not, the type - is inferred from the SchemaField's field_type. - - Returns: - Any: A JSON-serializable object. - """ - if row_value is None: - return None - - if field.mode == "REPEATED": - return _repeated_field_to_json(field, row_value) - - if field.field_type == "RECORD": - return _record_field_to_json(field.fields, row_value) - - return _scalar_field_to_json(field, row_value) - - -def _snake_to_camel_case(value): - """Convert snake case string to camel case.""" - words = value.split("_") - return words[0] + "".join(map(str.capitalize, words[1:])) - - -def _get_sub_prop(container, keys, default=None): - """Get a nested value from a dictionary. - - This method works like ``dict.get(key)``, but for nested values. - - Arguments: - container (Dict): - A dictionary which may contain other dictionaries as values. - keys (Iterable): - A sequence of keys to attempt to get the value for. Each item in - the sequence represents a deeper nesting. The first key is for - the top level. If there is a dictionary there, the second key - attempts to get the value within that, and so on. - default (object): - (Optional) Value to returned if any of the keys are not found. - Defaults to ``None``. - - Examples: - Get a top-level value (equivalent to ``container.get('key')``). - - >>> _get_sub_prop({'key': 'value'}, ['key']) - 'value' - - Get a top-level value, providing a default (equivalent to - ``container.get('key', default='default')``). - - >>> _get_sub_prop({'nothere': 123}, ['key'], default='not found') - 'not found' - - Get a nested value. - - >>> _get_sub_prop({'key': {'subkey': 'value'}}, ['key', 'subkey']) - 'value' - - Returns: - object: The value if present or the default. - """ - sub_val = container - for key in keys: - if key not in sub_val: - return default - sub_val = sub_val[key] - return sub_val - - -def _set_sub_prop(container, keys, value): - """Set a nested value in a dictionary. - - Arguments: - container (Dict): - A dictionary which may contain other dictionaries as values. - keys (Iterable): - A sequence of keys to attempt to set the value for. Each item in - the sequence represents a deeper nesting. The first key is for - the top level. If there is a dictionary there, the second key - attempts to get the value within that, and so on. - value (object): Value to set within the container. - - Examples: - Set a top-level value (equivalent to ``container['key'] = 'value'``). - - >>> container = {} - >>> _set_sub_prop(container, ['key'], 'value') - >>> container - {'key': 'value'} - - Set a nested value. - - >>> container = {} - >>> _set_sub_prop(container, ['key', 'subkey'], 'value') - >>> container - {'key': {'subkey': 'value'}} - - Replace a nested value. - - >>> container = {'key': {'subkey': 'prev'}} - >>> _set_sub_prop(container, ['key', 'subkey'], 'new') - >>> container - {'key': {'subkey': 'new'}} - """ - sub_val = container - for key in keys[:-1]: - if key not in sub_val: - sub_val[key] = {} - sub_val = sub_val[key] - sub_val[keys[-1]] = value - - -def _del_sub_prop(container, keys): - """Remove a nested key fro a dictionary. - - Arguments: - container (Dict): - A dictionary which may contain other dictionaries as values. - keys (Iterable): - A sequence of keys to attempt to clear the value for. Each item in - the sequence represents a deeper nesting. The first key is for - the top level. If there is a dictionary there, the second key - attempts to get the value within that, and so on. - - Examples: - Remove a top-level value (equivalent to ``del container['key']``). - - >>> container = {'key': 'value'} - >>> _del_sub_prop(container, ['key']) - >>> container - {} - - Remove a nested value. - - >>> container = {'key': {'subkey': 'value'}} - >>> _del_sub_prop(container, ['key', 'subkey']) - >>> container - {'key': {}} - """ - sub_val = container - for key in keys[:-1]: - if key not in sub_val: - sub_val[key] = {} - sub_val = sub_val[key] - if keys[-1] in sub_val: - del sub_val[keys[-1]] - - -def _int_or_none(value): - """Helper: deserialize int value from JSON string.""" - if isinstance(value, int): - return value - if value is not None: - return int(value) - - -def _str_or_none(value): - """Helper: serialize value to JSON string.""" - if value is not None: - return str(value) - - -def _split_id(full_id): - """Helper: split full_id into composite parts. - - Args: - full_id (str): Fully-qualified ID in standard SQL format. - - Returns: - List[str]: ID's parts separated into components. - """ - with_prefix = _PROJECT_PREFIX_PATTERN.match(full_id) - if with_prefix is None: - parts = full_id.split(".") - else: - parts = with_prefix.groups() - parts = [part for part in parts if part] - return parts - - -def _parse_3_part_id(full_id, default_project=None, property_name="table_id"): - output_project_id = default_project - output_dataset_id = None - output_resource_id = None - parts = _split_id(full_id) - - if len(parts) != 2 and len(parts) != 3: - raise ValueError( - "{property_name} must be a fully-qualified ID in " - 'standard SQL format, e.g., "project.dataset.{property_name}", ' - "got {}".format(full_id, property_name=property_name) - ) - - if len(parts) == 2 and not default_project: - raise ValueError( - "When default_project is not set, {property_name} must be a " - "fully-qualified ID in standard SQL format, " - 'e.g., "project.dataset_id.{property_name}", got {}'.format( - full_id, property_name=property_name - ) - ) - - if len(parts) == 2: - output_dataset_id, output_resource_id = parts - else: - output_project_id, output_dataset_id, output_resource_id = parts - - return output_project_id, output_dataset_id, output_resource_id - - -def _build_resource_from_properties(obj, filter_fields): - """Build a resource based on a ``_properties`` dictionary, filtered by - ``filter_fields``, which follow the name of the Python object. - """ - partial = {} - for filter_field in filter_fields: - api_field = obj._PROPERTY_TO_API_FIELD.get(filter_field) - if api_field is None and filter_field not in obj._properties: - raise ValueError("No property %s" % filter_field) - elif api_field is not None: - partial[api_field] = obj._properties.get(api_field) - else: - # allows properties that are not defined in the library - # and properties that have the same name as API resource key - partial[filter_field] = obj._properties[filter_field] - - return partial - - -def _verify_job_config_type(job_config, expected_type, param_name="job_config"): - if not isinstance(job_config, expected_type): - msg = ( - "Expected an instance of {expected_type} class for the {param_name} parameter, " - "but received {param_name} = {job_config}" - ) - raise TypeError( - msg.format( - expected_type=expected_type.__name__, - param_name=param_name, - job_config=job_config, - ) - ) diff --git a/bigquery/google/cloud/bigquery/_http.py b/bigquery/google/cloud/bigquery/_http.py deleted file mode 100644 index 2ff4effefb76..000000000000 --- a/bigquery/google/cloud/bigquery/_http.py +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright 2015 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Create / interact with Google BigQuery connections.""" - -from google.cloud import _http - -from google.cloud.bigquery import __version__ - - -class Connection(_http.JSONConnection): - """A connection to Google BigQuery via the JSON REST API. - - Args: - client (google.cloud.bigquery.client.Client): The client that owns the current connection. - - client_info (google.api_core.client_info.ClientInfo): (Optional) instance used to generate user agent. - """ - - DEFAULT_API_ENDPOINT = "https://bigquery.googleapis.com" - - def __init__(self, client, client_info=None, api_endpoint=DEFAULT_API_ENDPOINT): - super(Connection, self).__init__(client, client_info) - self.API_BASE_URL = api_endpoint - self._client_info.gapic_version = __version__ - self._client_info.client_library_version = __version__ - - API_VERSION = "v2" - """The version of the API, used in building the API call's URL.""" - - API_URL_TEMPLATE = "{api_base_url}/bigquery/{api_version}{path}" - """A template for the URL of a particular API call.""" diff --git a/bigquery/google/cloud/bigquery/_pandas_helpers.py b/bigquery/google/cloud/bigquery/_pandas_helpers.py deleted file mode 100644 index 645478ff6d4b..000000000000 --- a/bigquery/google/cloud/bigquery/_pandas_helpers.py +++ /dev/null @@ -1,744 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Shared helper functions for connecting BigQuery and pandas.""" - -import concurrent.futures -import functools -import logging -import warnings - -from six.moves import queue - -try: - from google.cloud import bigquery_storage_v1beta1 -except ImportError: # pragma: NO COVER - bigquery_storage_v1beta1 = None - -try: - import pandas -except ImportError: # pragma: NO COVER - pandas = None - -try: - import pyarrow - import pyarrow.parquet -except ImportError: # pragma: NO COVER - pyarrow = None - -from google.cloud.bigquery import schema - - -_LOGGER = logging.getLogger(__name__) - -_NO_BQSTORAGE_ERROR = ( - "The google-cloud-bigquery-storage library is not installed, " - "please install google-cloud-bigquery-storage to use bqstorage features." -) - -_PROGRESS_INTERVAL = 0.2 # Maximum time between download status checks, in seconds. - -_PANDAS_DTYPE_TO_BQ = { - "bool": "BOOLEAN", - "datetime64[ns, UTC]": "TIMESTAMP", - # BigQuery does not support uploading DATETIME values from Parquet files. - # See: https://github.com/googleapis/google-cloud-python/issues/9996 - "datetime64[ns]": "TIMESTAMP", - "float32": "FLOAT", - "float64": "FLOAT", - "int8": "INTEGER", - "int16": "INTEGER", - "int32": "INTEGER", - "int64": "INTEGER", - "uint8": "INTEGER", - "uint16": "INTEGER", - "uint32": "INTEGER", -} - - -class _DownloadState(object): - """Flag to indicate that a thread should exit early.""" - - def __init__(self): - # No need for a lock because reading/replacing a variable is defined to - # be an atomic operation in the Python language definition (enforced by - # the global interpreter lock). - self.done = False - - -def pyarrow_datetime(): - return pyarrow.timestamp("us", tz=None) - - -def pyarrow_numeric(): - return pyarrow.decimal128(38, 9) - - -def pyarrow_time(): - return pyarrow.time64("us") - - -def pyarrow_timestamp(): - return pyarrow.timestamp("us", tz="UTC") - - -if pyarrow: - # This dictionary is duplicated in bigquery_storage/test/unite/test_reader.py - # When modifying it be sure to update it there as well. - BQ_TO_ARROW_SCALARS = { - "BOOL": pyarrow.bool_, - "BOOLEAN": pyarrow.bool_, - "BYTES": pyarrow.binary, - "DATE": pyarrow.date32, - "DATETIME": pyarrow_datetime, - "FLOAT": pyarrow.float64, - "FLOAT64": pyarrow.float64, - "GEOGRAPHY": pyarrow.string, - "INT64": pyarrow.int64, - "INTEGER": pyarrow.int64, - "NUMERIC": pyarrow_numeric, - "STRING": pyarrow.string, - "TIME": pyarrow_time, - "TIMESTAMP": pyarrow_timestamp, - } - ARROW_SCALAR_IDS_TO_BQ = { - # https://arrow.apache.org/docs/python/api/datatypes.html#type-classes - pyarrow.bool_().id: "BOOL", - pyarrow.int8().id: "INT64", - pyarrow.int16().id: "INT64", - pyarrow.int32().id: "INT64", - pyarrow.int64().id: "INT64", - pyarrow.uint8().id: "INT64", - pyarrow.uint16().id: "INT64", - pyarrow.uint32().id: "INT64", - pyarrow.uint64().id: "INT64", - pyarrow.float16().id: "FLOAT64", - pyarrow.float32().id: "FLOAT64", - pyarrow.float64().id: "FLOAT64", - pyarrow.time32("ms").id: "TIME", - pyarrow.time64("ns").id: "TIME", - pyarrow.timestamp("ns").id: "TIMESTAMP", - pyarrow.date32().id: "DATE", - pyarrow.date64().id: "DATETIME", # because millisecond resolution - pyarrow.binary().id: "BYTES", - pyarrow.string().id: "STRING", # also alias for pyarrow.utf8() - pyarrow.decimal128(38, scale=9).id: "NUMERIC", - # The exact decimal's scale and precision are not important, as only - # the type ID matters, and it's the same for all decimal128 instances. - } - -else: # pragma: NO COVER - BQ_TO_ARROW_SCALARS = {} # pragma: NO COVER - ARROW_SCALAR_IDS_TO_BQ = {} # pragma: NO_COVER - - -def bq_to_arrow_struct_data_type(field): - arrow_fields = [] - for subfield in field.fields: - arrow_subfield = bq_to_arrow_field(subfield) - if arrow_subfield: - arrow_fields.append(arrow_subfield) - else: - # Could not determine a subfield type. Fallback to type - # inference. - return None - return pyarrow.struct(arrow_fields) - - -def bq_to_arrow_data_type(field): - """Return the Arrow data type, corresponding to a given BigQuery column. - - Returns: - None: if default Arrow type inspection should be used. - """ - if field.mode is not None and field.mode.upper() == "REPEATED": - inner_type = bq_to_arrow_data_type( - schema.SchemaField(field.name, field.field_type, fields=field.fields) - ) - if inner_type: - return pyarrow.list_(inner_type) - return None - - field_type_upper = field.field_type.upper() if field.field_type else "" - if field_type_upper in schema._STRUCT_TYPES: - return bq_to_arrow_struct_data_type(field) - - data_type_constructor = BQ_TO_ARROW_SCALARS.get(field_type_upper) - if data_type_constructor is None: - return None - return data_type_constructor() - - -def bq_to_arrow_field(bq_field): - """Return the Arrow field, corresponding to a given BigQuery column. - - Returns: - None: if the Arrow type cannot be determined. - """ - arrow_type = bq_to_arrow_data_type(bq_field) - if arrow_type: - is_nullable = bq_field.mode.upper() == "NULLABLE" - return pyarrow.field(bq_field.name, arrow_type, nullable=is_nullable) - - warnings.warn("Unable to determine type for field '{}'.".format(bq_field.name)) - return None - - -def bq_to_arrow_schema(bq_schema): - """Return the Arrow schema, corresponding to a given BigQuery schema. - - Returns: - None: if any Arrow type cannot be determined. - """ - arrow_fields = [] - for bq_field in bq_schema: - arrow_field = bq_to_arrow_field(bq_field) - if arrow_field is None: - # Auto-detect the schema if there is an unknown field type. - return None - arrow_fields.append(arrow_field) - return pyarrow.schema(arrow_fields) - - -def bq_to_arrow_array(series, bq_field): - arrow_type = bq_to_arrow_data_type(bq_field) - - field_type_upper = bq_field.field_type.upper() if bq_field.field_type else "" - - if bq_field.mode.upper() == "REPEATED": - return pyarrow.ListArray.from_pandas(series, type=arrow_type) - if field_type_upper in schema._STRUCT_TYPES: - return pyarrow.StructArray.from_pandas(series, type=arrow_type) - return pyarrow.Array.from_pandas(series, type=arrow_type) - - -def get_column_or_index(dataframe, name): - """Return a column or index as a pandas series.""" - if name in dataframe.columns: - return dataframe[name].reset_index(drop=True) - - if isinstance(dataframe.index, pandas.MultiIndex): - if name in dataframe.index.names: - return ( - dataframe.index.get_level_values(name) - .to_series() - .reset_index(drop=True) - ) - else: - if name == dataframe.index.name: - return dataframe.index.to_series().reset_index(drop=True) - - raise ValueError("column or index '{}' not found.".format(name)) - - -def list_columns_and_indexes(dataframe): - """Return all index and column names with dtypes. - - Returns: - Sequence[Tuple[str, dtype]]: - Returns a sorted list of indexes and column names with - corresponding dtypes. If an index is missing a name or has the - same name as a column, the index is omitted. - """ - column_names = frozenset(dataframe.columns) - columns_and_indexes = [] - if isinstance(dataframe.index, pandas.MultiIndex): - for name in dataframe.index.names: - if name and name not in column_names: - values = dataframe.index.get_level_values(name) - columns_and_indexes.append((name, values.dtype)) - else: - if dataframe.index.name and dataframe.index.name not in column_names: - columns_and_indexes.append((dataframe.index.name, dataframe.index.dtype)) - - columns_and_indexes += zip(dataframe.columns, dataframe.dtypes) - return columns_and_indexes - - -def dataframe_to_bq_schema(dataframe, bq_schema): - """Convert a pandas DataFrame schema to a BigQuery schema. - - Args: - dataframe (pandas.DataFrame): - DataFrame for which the client determines the BigQuery schema. - bq_schema (Sequence[Union[ \ - :class:`~google.cloud.bigquery.schema.SchemaField`, \ - Mapping[str, Any] \ - ]]): - A BigQuery schema. Use this argument to override the autodetected - type for some or all of the DataFrame columns. - - Returns: - Optional[Sequence[google.cloud.bigquery.schema.SchemaField]]: - The automatically determined schema. Returns None if the type of - any column cannot be determined. - """ - if bq_schema: - bq_schema = schema._to_schema_fields(bq_schema) - for field in bq_schema: - if field.field_type in schema._STRUCT_TYPES: - raise ValueError( - "Uploading dataframes with struct (record) column types " - "is not supported. See: " - "https://github.com/googleapis/google-cloud-python/issues/8191" - ) - bq_schema_index = {field.name: field for field in bq_schema} - bq_schema_unused = set(bq_schema_index.keys()) - else: - bq_schema_index = {} - bq_schema_unused = set() - - bq_schema_out = [] - unknown_type_fields = [] - - for column, dtype in list_columns_and_indexes(dataframe): - # Use provided type from schema, if present. - bq_field = bq_schema_index.get(column) - if bq_field: - bq_schema_out.append(bq_field) - bq_schema_unused.discard(bq_field.name) - continue - - # Otherwise, try to automatically determine the type based on the - # pandas dtype. - bq_type = _PANDAS_DTYPE_TO_BQ.get(dtype.name) - bq_field = schema.SchemaField(column, bq_type) - bq_schema_out.append(bq_field) - - if bq_field.field_type is None: - unknown_type_fields.append(bq_field) - - # Catch any schema mismatch. The developer explicitly asked to serialize a - # column, but it was not found. - if bq_schema_unused: - raise ValueError( - u"bq_schema contains fields not present in dataframe: {}".format( - bq_schema_unused - ) - ) - - # If schema detection was not successful for all columns, also try with - # pyarrow, if available. - if unknown_type_fields: - if not pyarrow: - msg = u"Could not determine the type of columns: {}".format( - ", ".join(field.name for field in unknown_type_fields) - ) - warnings.warn(msg) - return None # We cannot detect the schema in full. - - # The augment_schema() helper itself will also issue unknown type - # warnings if detection still fails for any of the fields. - bq_schema_out = augment_schema(dataframe, bq_schema_out) - - return tuple(bq_schema_out) if bq_schema_out else None - - -def augment_schema(dataframe, current_bq_schema): - """Try to deduce the unknown field types and return an improved schema. - - This function requires ``pyarrow`` to run. If all the missing types still - cannot be detected, ``None`` is returned. If all types are already known, - a shallow copy of the given schema is returned. - - Args: - dataframe (pandas.DataFrame): - DataFrame for which some of the field types are still unknown. - current_bq_schema (Sequence[google.cloud.bigquery.schema.SchemaField]): - A BigQuery schema for ``dataframe``. The types of some or all of - the fields may be ``None``. - Returns: - Optional[Sequence[google.cloud.bigquery.schema.SchemaField]] - """ - augmented_schema = [] - unknown_type_fields = [] - - for field in current_bq_schema: - if field.field_type is not None: - augmented_schema.append(field) - continue - - arrow_table = pyarrow.array(dataframe[field.name]) - detected_type = ARROW_SCALAR_IDS_TO_BQ.get(arrow_table.type.id) - - if detected_type is None: - unknown_type_fields.append(field) - continue - - new_field = schema.SchemaField( - name=field.name, - field_type=detected_type, - mode=field.mode, - description=field.description, - fields=field.fields, - ) - augmented_schema.append(new_field) - - if unknown_type_fields: - warnings.warn( - u"Pyarrow could not determine the type of columns: {}.".format( - ", ".join(field.name for field in unknown_type_fields) - ) - ) - return None - - return augmented_schema - - -def dataframe_to_arrow(dataframe, bq_schema): - """Convert pandas dataframe to Arrow table, using BigQuery schema. - - Args: - dataframe (pandas.DataFrame): - DataFrame to convert to Arrow table. - bq_schema (Sequence[Union[ \ - :class:`~google.cloud.bigquery.schema.SchemaField`, \ - Mapping[str, Any] \ - ]]): - Desired BigQuery schema. The number of columns must match the - number of columns in the DataFrame. - - Returns: - pyarrow.Table: - Table containing dataframe data, with schema derived from - BigQuery schema. - """ - column_names = set(dataframe.columns) - column_and_index_names = set( - name for name, _ in list_columns_and_indexes(dataframe) - ) - - bq_schema = schema._to_schema_fields(bq_schema) - bq_field_names = set(field.name for field in bq_schema) - - extra_fields = bq_field_names - column_and_index_names - if extra_fields: - raise ValueError( - u"bq_schema contains fields not present in dataframe: {}".format( - extra_fields - ) - ) - - # It's okay for indexes to be missing from bq_schema, but it's not okay to - # be missing columns. - missing_fields = column_names - bq_field_names - if missing_fields: - raise ValueError( - u"bq_schema is missing fields from dataframe: {}".format(missing_fields) - ) - - arrow_arrays = [] - arrow_names = [] - arrow_fields = [] - for bq_field in bq_schema: - arrow_fields.append(bq_to_arrow_field(bq_field)) - arrow_names.append(bq_field.name) - arrow_arrays.append( - bq_to_arrow_array(get_column_or_index(dataframe, bq_field.name), bq_field) - ) - - if all((field is not None for field in arrow_fields)): - return pyarrow.Table.from_arrays( - arrow_arrays, schema=pyarrow.schema(arrow_fields) - ) - return pyarrow.Table.from_arrays(arrow_arrays, names=arrow_names) - - -def dataframe_to_parquet(dataframe, bq_schema, filepath, parquet_compression="SNAPPY"): - """Write dataframe as a Parquet file, according to the desired BQ schema. - - This function requires the :mod:`pyarrow` package. Arrow is used as an - intermediate format. - - Args: - dataframe (pandas.DataFrame): - DataFrame to convert to Parquet file. - bq_schema (Sequence[Union[ \ - :class:`~google.cloud.bigquery.schema.SchemaField`, \ - Mapping[str, Any] \ - ]]): - Desired BigQuery schema. Number of columns must match number of - columns in the DataFrame. - filepath (str): - Path to write Parquet file to. - parquet_compression (str): - (optional) The compression codec to use by the the - ``pyarrow.parquet.write_table`` serializing method. Defaults to - "SNAPPY". - https://arrow.apache.org/docs/python/generated/pyarrow.parquet.write_table.html#pyarrow-parquet-write-table - """ - if pyarrow is None: - raise ValueError("pyarrow is required for BigQuery schema conversion.") - - bq_schema = schema._to_schema_fields(bq_schema) - arrow_table = dataframe_to_arrow(dataframe, bq_schema) - pyarrow.parquet.write_table(arrow_table, filepath, compression=parquet_compression) - - -def _tabledata_list_page_to_arrow(page, column_names, arrow_types): - # Iterate over the page to force the API request to get the page data. - try: - next(iter(page)) - except StopIteration: - pass - - arrays = [] - for column_index, arrow_type in enumerate(arrow_types): - arrays.append(pyarrow.array(page._columns[column_index], type=arrow_type)) - - if isinstance(column_names, pyarrow.Schema): - return pyarrow.RecordBatch.from_arrays(arrays, schema=column_names) - return pyarrow.RecordBatch.from_arrays(arrays, names=column_names) - - -def download_arrow_tabledata_list(pages, bq_schema): - """Use tabledata.list to construct an iterable of RecordBatches. - - Args: - pages (Iterator[:class:`google.api_core.page_iterator.Page`]): - An iterator over the result pages. - bq_schema (Sequence[Union[ \ - :class:`~google.cloud.bigquery.schema.SchemaField`, \ - Mapping[str, Any] \ - ]]): - A decription of the fields in result pages. - Yields: - :class:`pyarrow.RecordBatch` - The next page of records as a ``pyarrow`` record batch. - """ - bq_schema = schema._to_schema_fields(bq_schema) - column_names = bq_to_arrow_schema(bq_schema) or [field.name for field in bq_schema] - arrow_types = [bq_to_arrow_data_type(field) for field in bq_schema] - - for page in pages: - yield _tabledata_list_page_to_arrow(page, column_names, arrow_types) - - -def _tabledata_list_page_to_dataframe(page, column_names, dtypes): - # Iterate over the page to force the API request to get the page data. - try: - next(iter(page)) - except StopIteration: - pass - - columns = {} - for column_index, column_name in enumerate(column_names): - dtype = dtypes.get(column_name) - columns[column_name] = pandas.Series(page._columns[column_index], dtype=dtype) - - return pandas.DataFrame(columns, columns=column_names) - - -def download_dataframe_tabledata_list(pages, bq_schema, dtypes): - """Use (slower, but free) tabledata.list to construct a DataFrame. - - Args: - pages (Iterator[:class:`google.api_core.page_iterator.Page`]): - An iterator over the result pages. - bq_schema (Sequence[Union[ \ - :class:`~google.cloud.bigquery.schema.SchemaField`, \ - Mapping[str, Any] \ - ]]): - A decription of the fields in result pages. - dtypes(Mapping[str, numpy.dtype]): - The types of columns in result data to hint construction of the - resulting DataFrame. Not all column types have to be specified. - Yields: - :class:`pandas.DataFrame` - The next page of records as a ``pandas.DataFrame`` record batch. - """ - bq_schema = schema._to_schema_fields(bq_schema) - column_names = [field.name for field in bq_schema] - for page in pages: - yield _tabledata_list_page_to_dataframe(page, column_names, dtypes) - - -def _bqstorage_page_to_arrow(page): - return page.to_arrow() - - -def _bqstorage_page_to_dataframe(column_names, dtypes, page): - # page.to_dataframe() does not preserve column order in some versions - # of google-cloud-bigquery-storage. Access by column name to rearrange. - return page.to_dataframe(dtypes=dtypes)[column_names] - - -def _download_table_bqstorage_stream( - download_state, bqstorage_client, session, stream, worker_queue, page_to_item -): - position = bigquery_storage_v1beta1.types.StreamPosition(stream=stream) - rowstream = bqstorage_client.read_rows(position).rows(session) - - for page in rowstream.pages: - if download_state.done: - return - item = page_to_item(page) - worker_queue.put(item) - - -def _nowait(futures): - """Separate finished and unfinished threads, much like - :func:`concurrent.futures.wait`, but don't wait. - """ - done = [] - not_done = [] - for future in futures: - if future.done(): - done.append(future) - else: - not_done.append(future) - return done, not_done - - -def _download_table_bqstorage( - project_id, - table, - bqstorage_client, - preserve_order=False, - selected_fields=None, - page_to_item=None, -): - """Use (faster, but billable) BQ Storage API to construct DataFrame.""" - if "$" in table.table_id: - raise ValueError( - "Reading from a specific partition is not currently supported." - ) - if "@" in table.table_id: - raise ValueError("Reading from a specific snapshot is not currently supported.") - - read_options = bigquery_storage_v1beta1.types.TableReadOptions() - if selected_fields is not None: - for field in selected_fields: - read_options.selected_fields.append(field.name) - - requested_streams = 0 - if preserve_order: - requested_streams = 1 - - session = bqstorage_client.create_read_session( - table.to_bqstorage(), - "projects/{}".format(project_id), - format_=bigquery_storage_v1beta1.enums.DataFormat.ARROW, - read_options=read_options, - requested_streams=requested_streams, - ) - _LOGGER.debug( - "Started reading table '{}.{}.{}' with BQ Storage API session '{}'.".format( - table.project, table.dataset_id, table.table_id, session.name - ) - ) - - # Avoid reading rows from an empty table. - if not session.streams: - return - - total_streams = len(session.streams) - - # Use _DownloadState to notify worker threads when to quit. - # See: https://stackoverflow.com/a/29237343/101923 - download_state = _DownloadState() - - # Create a queue to collect frames as they are created in each thread. - worker_queue = queue.Queue() - - with concurrent.futures.ThreadPoolExecutor(max_workers=total_streams) as pool: - try: - # Manually submit jobs and wait for download to complete rather - # than using pool.map because pool.map continues running in the - # background even if there is an exception on the main thread. - # See: https://github.com/googleapis/google-cloud-python/pull/7698 - not_done = [ - pool.submit( - _download_table_bqstorage_stream, - download_state, - bqstorage_client, - session, - stream, - worker_queue, - page_to_item, - ) - for stream in session.streams - ] - - while not_done: - # Don't block on the worker threads. For performance reasons, - # we want to block on the queue's get method, instead. This - # prevents the queue from filling up, because the main thread - # has smaller gaps in time between calls to the queue's get - # method. For a detailed explaination, see: - # https://friendliness.dev/2019/06/18/python-nowait/ - done, not_done = _nowait(not_done) - for future in done: - # Call result() on any finished threads to raise any - # exceptions encountered. - future.result() - - try: - frame = worker_queue.get(timeout=_PROGRESS_INTERVAL) - yield frame - except queue.Empty: # pragma: NO COVER - continue - - # Return any remaining values after the workers finished. - while not worker_queue.empty(): # pragma: NO COVER - try: - # Include a timeout because even though the queue is - # non-empty, it doesn't guarantee that a subsequent call to - # get() will not block. - frame = worker_queue.get(timeout=_PROGRESS_INTERVAL) - yield frame - except queue.Empty: # pragma: NO COVER - continue - finally: - # No need for a lock because reading/replacing a variable is - # defined to be an atomic operation in the Python language - # definition (enforced by the global interpreter lock). - download_state.done = True - - # Shutdown all background threads, now that they should know to - # exit early. - pool.shutdown(wait=True) - - -def download_arrow_bqstorage( - project_id, table, bqstorage_client, preserve_order=False, selected_fields=None -): - return _download_table_bqstorage( - project_id, - table, - bqstorage_client, - preserve_order=preserve_order, - selected_fields=selected_fields, - page_to_item=_bqstorage_page_to_arrow, - ) - - -def download_dataframe_bqstorage( - project_id, - table, - bqstorage_client, - column_names, - dtypes, - preserve_order=False, - selected_fields=None, -): - page_to_item = functools.partial(_bqstorage_page_to_dataframe, column_names, dtypes) - return _download_table_bqstorage( - project_id, - table, - bqstorage_client, - preserve_order=preserve_order, - selected_fields=selected_fields, - page_to_item=page_to_item, - ) diff --git a/bigquery/google/cloud/bigquery/client.py b/bigquery/google/cloud/bigquery/client.py deleted file mode 100644 index 5da12990b390..000000000000 --- a/bigquery/google/cloud/bigquery/client.py +++ /dev/null @@ -1,2928 +0,0 @@ -# Copyright 2015 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Client for interacting with the Google BigQuery API.""" - -from __future__ import absolute_import -from __future__ import division - -try: - from collections import abc as collections_abc -except ImportError: # Python 2.7 - import collections as collections_abc - -import concurrent.futures -import copy -import functools -import gzip -import io -import itertools -import json -import math -import os -import tempfile -import uuid -import warnings - -try: - import pyarrow -except ImportError: # pragma: NO COVER - pyarrow = None -import six - -from google import resumable_media -from google.resumable_media.requests import MultipartUpload -from google.resumable_media.requests import ResumableUpload - -import google.api_core.client_options -import google.api_core.exceptions -from google.api_core import page_iterator -from google.auth.transport.requests import TimeoutGuard -import google.cloud._helpers -from google.cloud import exceptions -from google.cloud.client import ClientWithProject - -from google.cloud.bigquery._helpers import _record_field_to_json -from google.cloud.bigquery._helpers import _str_or_none -from google.cloud.bigquery._helpers import _verify_job_config_type -from google.cloud.bigquery._http import Connection -from google.cloud.bigquery import _pandas_helpers -from google.cloud.bigquery.dataset import Dataset -from google.cloud.bigquery.dataset import DatasetListItem -from google.cloud.bigquery.dataset import DatasetReference -from google.cloud.bigquery import job -from google.cloud.bigquery.model import Model -from google.cloud.bigquery.model import ModelReference -from google.cloud.bigquery.query import _QueryResults -from google.cloud.bigquery.retry import DEFAULT_RETRY -from google.cloud.bigquery.routine import Routine -from google.cloud.bigquery.routine import RoutineReference -from google.cloud.bigquery.schema import SchemaField -from google.cloud.bigquery.table import _table_arg_to_table -from google.cloud.bigquery.table import _table_arg_to_table_ref -from google.cloud.bigquery.table import Table -from google.cloud.bigquery.table import TableListItem -from google.cloud.bigquery.table import TableReference -from google.cloud.bigquery.table import RowIterator - - -_DEFAULT_CHUNKSIZE = 1048576 # 1024 * 1024 B = 1 MB -_MAX_MULTIPART_SIZE = 5 * 1024 * 1024 -_DEFAULT_NUM_RETRIES = 6 -_BASE_UPLOAD_TEMPLATE = ( - u"https://bigquery.googleapis.com/upload/bigquery/v2/projects/" - u"{project}/jobs?uploadType=" -) -_MULTIPART_URL_TEMPLATE = _BASE_UPLOAD_TEMPLATE + u"multipart" -_RESUMABLE_URL_TEMPLATE = _BASE_UPLOAD_TEMPLATE + u"resumable" -_GENERIC_CONTENT_TYPE = u"*/*" -_READ_LESS_THAN_SIZE = ( - "Size {:d} was specified but the file-like object only had " "{:d} bytes remaining." -) -_NEED_TABLE_ARGUMENT = ( - "The table argument should be a table ID string, Table, or TableReference" -) - - -class Project(object): - """Wrapper for resource describing a BigQuery project. - - Args: - project_id (str): Opaque ID of the project - - numeric_id (int): Numeric ID of the project - - friendly_name (str): Display name of the project - """ - - def __init__(self, project_id, numeric_id, friendly_name): - self.project_id = project_id - self.numeric_id = numeric_id - self.friendly_name = friendly_name - - @classmethod - def from_api_repr(cls, resource): - """Factory: construct an instance from a resource dict.""" - return cls(resource["id"], resource["numericId"], resource["friendlyName"]) - - -class Client(ClientWithProject): - """Client to bundle configuration needed for API requests. - - Args: - project (str): - Project ID for the project which the client acts on behalf of. - Will be passed when creating a dataset / job. If not passed, - falls back to the default inferred from the environment. - credentials (google.auth.credentials.Credentials): - (Optional) The OAuth2 Credentials to use for this client. If not - passed (and if no ``_http`` object is passed), falls back to the - default inferred from the environment. - _http (requests.Session): - (Optional) HTTP object to make requests. Can be any object that - defines ``request()`` with the same interface as - :meth:`requests.Session.request`. If not passed, an ``_http`` - object is created that is bound to the ``credentials`` for the - current object. - This parameter should be considered private, and could change in - the future. - location (str): - (Optional) Default location for jobs / datasets / tables. - default_query_job_config (google.cloud.bigquery.job.QueryJobConfig): - (Optional) Default ``QueryJobConfig``. - Will be merged into job configs passed into the ``query`` method. - client_info (google.api_core.client_info.ClientInfo): - The client info used to send a user-agent string along with API - requests. If ``None``, then default info will be used. Generally, - you only need to set this if you're developing your own library - or partner tool. - client_options (Union[google.api_core.client_options.ClientOptions, Dict]): - (Optional) Client options used to set user options on the client. - API Endpoint should be set through client_options. - - Raises: - google.auth.exceptions.DefaultCredentialsError: - Raised if ``credentials`` is not specified and the library fails - to acquire default credentials. - """ - - SCOPE = ( - "https://www.googleapis.com/auth/bigquery", - "https://www.googleapis.com/auth/cloud-platform", - ) - """The scopes required for authenticating as a BigQuery consumer.""" - - def __init__( - self, - project=None, - credentials=None, - _http=None, - location=None, - default_query_job_config=None, - client_info=None, - client_options=None, - ): - super(Client, self).__init__( - project=project, credentials=credentials, _http=_http - ) - - kw_args = {"client_info": client_info} - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - kw_args["api_endpoint"] = api_endpoint - - self._connection = Connection(self, **kw_args) - self._location = location - self._default_query_job_config = copy.deepcopy(default_query_job_config) - - @property - def location(self): - """Default location for jobs / datasets / tables.""" - return self._location - - def close(self): - """Close the underlying transport objects, releasing system resources. - - .. note:: - - The client instance can be used for making additional requests even - after closing, in which case the underlying connections are - automatically re-created. - """ - self._http._auth_request.session.close() - self._http.close() - - def get_service_account_email( - self, project=None, retry=DEFAULT_RETRY, timeout=None - ): - """Get the email address of the project's BigQuery service account - - Note: - This is the service account that BigQuery uses to manage tables - encrypted by a key in KMS. - - Args: - project (str, optional): - Project ID to use for retreiving service account email. - Defaults to the client's project. - retry (Optional[google.api_core.retry.Retry]): How to retry the RPC. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - - Returns: - str: service account email address - - Example: - - >>> from google.cloud import bigquery - >>> client = bigquery.Client() - >>> client.get_service_account_email() - my_service_account@my-project.iam.gserviceaccount.com - - """ - if project is None: - project = self.project - path = "/projects/%s/serviceAccount" % (project,) - - api_response = self._call_api(retry, method="GET", path=path, timeout=timeout) - return api_response["email"] - - def list_projects( - self, max_results=None, page_token=None, retry=DEFAULT_RETRY, timeout=None - ): - """List projects for the project associated with this client. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/projects/list - - Args: - max_results (int): - (Optional) maximum number of projects to return, - If not passed, defaults to a value set by the API. - - page_token (str): - (Optional) Token representing a cursor into the projects. If - not passed, the API will return the first page of projects. - The token marks the beginning of the iterator to be returned - and the value of the ``page_token`` can be accessed at - ``next_page_token`` of the - :class:`~google.api_core.page_iterator.HTTPIterator`. - - retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. - - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - - Returns: - google.api_core.page_iterator.Iterator: - Iterator of :class:`~google.cloud.bigquery.client.Project` - accessible to the current client. - """ - return page_iterator.HTTPIterator( - client=self, - api_request=functools.partial(self._call_api, retry, timeout=timeout), - path="/projects", - item_to_value=_item_to_project, - items_key="projects", - page_token=page_token, - max_results=max_results, - ) - - def list_datasets( - self, - project=None, - include_all=False, - filter=None, - max_results=None, - page_token=None, - retry=DEFAULT_RETRY, - timeout=None, - ): - """List datasets for the project associated with this client. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/list - - Args: - project (str): - Optional. Project ID to use for retreiving datasets. Defaults - to the client's project. - include_all (bool): - Optional. True if results include hidden datasets. Defaults - to False. - filter (str): - Optional. An expression for filtering the results by label. - For syntax, see - https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/list#body.QUERY_PARAMETERS.filter - max_results (int): - Optional. Maximum number of datasets to return. - page_token (str): - Optional. Token representing a cursor into the datasets. If - not passed, the API will return the first page of datasets. - The token marks the beginning of the iterator to be returned - and the value of the ``page_token`` can be accessed at - ``next_page_token`` of the - :class:`~google.api_core.page_iterator.HTTPIterator`. - retry (google.api_core.retry.Retry): - Optional. How to retry the RPC. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - - Returns: - google.api_core.page_iterator.Iterator: - Iterator of :class:`~google.cloud.bigquery.dataset.DatasetListItem`. - associated with the project. - """ - extra_params = {} - if project is None: - project = self.project - if include_all: - extra_params["all"] = True - if filter: - # TODO: consider supporting a dict of label -> value for filter, - # and converting it into a string here. - extra_params["filter"] = filter - path = "/projects/%s/datasets" % (project,) - return page_iterator.HTTPIterator( - client=self, - api_request=functools.partial(self._call_api, retry, timeout=timeout), - path=path, - item_to_value=_item_to_dataset, - items_key="datasets", - page_token=page_token, - max_results=max_results, - extra_params=extra_params, - ) - - def dataset(self, dataset_id, project=None): - """Deprecated: Construct a reference to a dataset. - - .. deprecated:: 1.24.0 - Construct a - :class:`~google.cloud.bigquery.dataset.DatasetReference` using its - constructor or use a string where previously a reference object - was used. - - As of ``google-cloud-bigquery`` version 1.7.0, all client methods - that take a - :class:`~google.cloud.bigquery.dataset.DatasetReference` or - :class:`~google.cloud.bigquery.table.TableReference` also take a - string in standard SQL format, e.g. ``project.dataset_id`` or - ``project.dataset_id.table_id``. - - Args: - dataset_id (str): ID of the dataset. - - project (str): - (Optional) project ID for the dataset (defaults to - the project of the client). - - Returns: - google.cloud.bigquery.dataset.DatasetReference: - a new ``DatasetReference`` instance. - """ - if project is None: - project = self.project - - warnings.warn( - "Client.dataset is deprecated and will be removed in a future version. " - "Use a string like 'my_project.my_dataset' or a " - "cloud.google.bigquery.DatasetReference object, instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - return DatasetReference(project, dataset_id) - - def _create_bqstorage_client(self): - """Create a BigQuery Storage API client using this client's credentials. - - Returns: - google.cloud.bigquery_storage_v1beta1.BigQueryStorageClient: - A BigQuery Storage API client. - """ - from google.cloud import bigquery_storage_v1beta1 - - return bigquery_storage_v1beta1.BigQueryStorageClient( - credentials=self._credentials - ) - - def create_dataset( - self, dataset, exists_ok=False, retry=DEFAULT_RETRY, timeout=None - ): - """API call: create the dataset via a POST request. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/insert - - Args: - dataset (Union[ \ - google.cloud.bigquery.dataset.Dataset, \ - google.cloud.bigquery.dataset.DatasetReference, \ - str, \ - ]): - A :class:`~google.cloud.bigquery.dataset.Dataset` to create. - If ``dataset`` is a reference, an empty dataset is created - with the specified ID and client's default location. - exists_ok (bool): - Defaults to ``False``. If ``True``, ignore "already exists" - errors when creating the dataset. - retry (google.api_core.retry.Retry): - Optional. How to retry the RPC. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - - Returns: - google.cloud.bigquery.dataset.Dataset: - A new ``Dataset`` returned from the API. - - Example: - - >>> from google.cloud import bigquery - >>> client = bigquery.Client() - >>> dataset = bigquery.Dataset('my_project.my_dataset') - >>> dataset = client.create_dataset(dataset) - - """ - if isinstance(dataset, str): - dataset = DatasetReference.from_string( - dataset, default_project=self.project - ) - if isinstance(dataset, DatasetReference): - dataset = Dataset(dataset) - - path = "/projects/%s/datasets" % (dataset.project,) - - data = dataset.to_api_repr() - if data.get("location") is None and self.location is not None: - data["location"] = self.location - - try: - api_response = self._call_api( - retry, method="POST", path=path, data=data, timeout=timeout - ) - return Dataset.from_api_repr(api_response) - except google.api_core.exceptions.Conflict: - if not exists_ok: - raise - return self.get_dataset(dataset.reference, retry=retry) - - def create_routine( - self, routine, exists_ok=False, retry=DEFAULT_RETRY, timeout=None - ): - """[Beta] Create a routine via a POST request. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/routines/insert - - Args: - routine (google.cloud.bigquery.routine.Routine): - A :class:`~google.cloud.bigquery.routine.Routine` to create. - The dataset that the routine belongs to must already exist. - exists_ok (bool): - Defaults to ``False``. If ``True``, ignore "already exists" - errors when creating the routine. - retry (google.api_core.retry.Retry): - Optional. How to retry the RPC. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - - Returns: - google.cloud.bigquery.routine.Routine: - A new ``Routine`` returned from the service. - """ - reference = routine.reference - path = "/projects/{}/datasets/{}/routines".format( - reference.project, reference.dataset_id - ) - resource = routine.to_api_repr() - try: - api_response = self._call_api( - retry, method="POST", path=path, data=resource, timeout=timeout - ) - return Routine.from_api_repr(api_response) - except google.api_core.exceptions.Conflict: - if not exists_ok: - raise - return self.get_routine(routine.reference, retry=retry) - - def create_table(self, table, exists_ok=False, retry=DEFAULT_RETRY, timeout=None): - """API call: create a table via a PUT request - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/insert - - Args: - table (Union[ \ - google.cloud.bigquery.table.Table, \ - google.cloud.bigquery.table.TableReference, \ - str, \ - ]): - A :class:`~google.cloud.bigquery.table.Table` to create. - If ``table`` is a reference, an empty table is created - with the specified ID. The dataset that the table belongs to - must already exist. - exists_ok (bool): - Defaults to ``False``. If ``True``, ignore "already exists" - errors when creating the table. - retry (google.api_core.retry.Retry): - Optional. How to retry the RPC. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - - Returns: - google.cloud.bigquery.table.Table: - A new ``Table`` returned from the service. - """ - table = _table_arg_to_table(table, default_project=self.project) - - path = "/projects/%s/datasets/%s/tables" % (table.project, table.dataset_id) - data = table.to_api_repr() - try: - api_response = self._call_api( - retry, method="POST", path=path, data=data, timeout=timeout - ) - return Table.from_api_repr(api_response) - except google.api_core.exceptions.Conflict: - if not exists_ok: - raise - return self.get_table(table.reference, retry=retry) - - def _call_api(self, retry, **kwargs): - call = functools.partial(self._connection.api_request, **kwargs) - if retry: - call = retry(call) - return call() - - def get_dataset(self, dataset_ref, retry=DEFAULT_RETRY, timeout=None): - """Fetch the dataset referenced by ``dataset_ref`` - - Args: - dataset_ref (Union[ \ - google.cloud.bigquery.dataset.DatasetReference, \ - str, \ - ]): - A reference to the dataset to fetch from the BigQuery API. - If a string is passed in, this method attempts to create a - dataset reference from a string using - :func:`~google.cloud.bigquery.dataset.DatasetReference.from_string`. - retry (google.api_core.retry.Retry): - (Optional) How to retry the RPC. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - - Returns: - google.cloud.bigquery.dataset.Dataset: - A ``Dataset`` instance. - """ - if isinstance(dataset_ref, str): - dataset_ref = DatasetReference.from_string( - dataset_ref, default_project=self.project - ) - - api_response = self._call_api( - retry, method="GET", path=dataset_ref.path, timeout=timeout - ) - return Dataset.from_api_repr(api_response) - - def get_model(self, model_ref, retry=DEFAULT_RETRY, timeout=None): - """[Beta] Fetch the model referenced by ``model_ref``. - - Args: - model_ref (Union[ \ - google.cloud.bigquery.model.ModelReference, \ - str, \ - ]): - A reference to the model to fetch from the BigQuery API. - If a string is passed in, this method attempts to create a - model reference from a string using - :func:`google.cloud.bigquery.model.ModelReference.from_string`. - retry (google.api_core.retry.Retry): - (Optional) How to retry the RPC. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - - Returns: - google.cloud.bigquery.model.Model: A ``Model`` instance. - """ - if isinstance(model_ref, str): - model_ref = ModelReference.from_string( - model_ref, default_project=self.project - ) - - api_response = self._call_api( - retry, method="GET", path=model_ref.path, timeout=timeout - ) - return Model.from_api_repr(api_response) - - def get_routine(self, routine_ref, retry=DEFAULT_RETRY, timeout=None): - """[Beta] Get the routine referenced by ``routine_ref``. - - Args: - routine_ref (Union[ \ - google.cloud.bigquery.routine.Routine, \ - google.cloud.bigquery.routine.RoutineReference, \ - str, \ - ]): - A reference to the routine to fetch from the BigQuery API. If - a string is passed in, this method attempts to create a - reference from a string using - :func:`google.cloud.bigquery.routine.RoutineReference.from_string`. - retry (google.api_core.retry.Retry): - (Optional) How to retry the API call. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - - Returns: - google.cloud.bigquery.routine.Routine: - A ``Routine`` instance. - """ - if isinstance(routine_ref, str): - routine_ref = RoutineReference.from_string( - routine_ref, default_project=self.project - ) - - api_response = self._call_api( - retry, method="GET", path=routine_ref.path, timeout=timeout - ) - return Routine.from_api_repr(api_response) - - def get_table(self, table, retry=DEFAULT_RETRY, timeout=None): - """Fetch the table referenced by ``table``. - - Args: - table (Union[ \ - google.cloud.bigquery.table.Table, \ - google.cloud.bigquery.table.TableReference, \ - str, \ - ]): - A reference to the table to fetch from the BigQuery API. - If a string is passed in, this method attempts to create a - table reference from a string using - :func:`google.cloud.bigquery.table.TableReference.from_string`. - retry (google.api_core.retry.Retry): - (Optional) How to retry the RPC. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - - Returns: - google.cloud.bigquery.table.Table: - A ``Table`` instance. - """ - table_ref = _table_arg_to_table_ref(table, default_project=self.project) - api_response = self._call_api( - retry, method="GET", path=table_ref.path, timeout=timeout - ) - return Table.from_api_repr(api_response) - - def update_dataset(self, dataset, fields, retry=DEFAULT_RETRY, timeout=None): - """Change some fields of a dataset. - - Use ``fields`` to specify which fields to update. At least one field - must be provided. If a field is listed in ``fields`` and is ``None`` in - ``dataset``, it will be deleted. - - If ``dataset.etag`` is not ``None``, the update will only - succeed if the dataset on the server has the same ETag. Thus - reading a dataset with ``get_dataset``, changing its fields, - and then passing it to ``update_dataset`` will ensure that the changes - will only be saved if no modifications to the dataset occurred - since the read. - - Args: - dataset (google.cloud.bigquery.dataset.Dataset): - The dataset to update. - fields (Sequence[str]): - The properties of ``dataset`` to change (e.g. "friendly_name"). - retry (google.api_core.retry.Retry, optional): - How to retry the RPC. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - - Returns: - google.cloud.bigquery.dataset.Dataset: - The modified ``Dataset`` instance. - """ - partial = dataset._build_resource(fields) - if dataset.etag is not None: - headers = {"If-Match": dataset.etag} - else: - headers = None - api_response = self._call_api( - retry, - method="PATCH", - path=dataset.path, - data=partial, - headers=headers, - timeout=timeout, - ) - return Dataset.from_api_repr(api_response) - - def update_model(self, model, fields, retry=DEFAULT_RETRY, timeout=None): - """[Beta] Change some fields of a model. - - Use ``fields`` to specify which fields to update. At least one field - must be provided. If a field is listed in ``fields`` and is ``None`` - in ``model``, the field value will be deleted. - - If ``model.etag`` is not ``None``, the update will only succeed if - the model on the server has the same ETag. Thus reading a model with - ``get_model``, changing its fields, and then passing it to - ``update_model`` will ensure that the changes will only be saved if - no modifications to the model occurred since the read. - - Args: - model (google.cloud.bigquery.model.Model): The model to update. - fields (Sequence[str]): - The fields of ``model`` to change, spelled as the Model - properties (e.g. "friendly_name"). - retry (google.api_core.retry.Retry): - (Optional) A description of how to retry the API call. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - - Returns: - google.cloud.bigquery.model.Model: - The model resource returned from the API call. - """ - partial = model._build_resource(fields) - if model.etag: - headers = {"If-Match": model.etag} - else: - headers = None - api_response = self._call_api( - retry, - method="PATCH", - path=model.path, - data=partial, - headers=headers, - timeout=timeout, - ) - return Model.from_api_repr(api_response) - - def update_routine(self, routine, fields, retry=DEFAULT_RETRY, timeout=None): - """[Beta] Change some fields of a routine. - - Use ``fields`` to specify which fields to update. At least one field - must be provided. If a field is listed in ``fields`` and is ``None`` - in ``routine``, the field value will be deleted. - - .. warning:: - During beta, partial updates are not supported. You must provide - all fields in the resource. - - If :attr:`~google.cloud.bigquery.routine.Routine.etag` is not - ``None``, the update will only succeed if the resource on the server - has the same ETag. Thus reading a routine with - :func:`~google.cloud.bigquery.client.Client.get_routine`, changing - its fields, and then passing it to this method will ensure that the - changes will only be saved if no modifications to the resource - occurred since the read. - - Args: - routine (google.cloud.bigquery.routine.Routine): The routine to update. - fields (Sequence[str]): - The fields of ``routine`` to change, spelled as the - :class:`~google.cloud.bigquery.routine.Routine` properties - (e.g. ``type_``). - retry (google.api_core.retry.Retry): - (Optional) A description of how to retry the API call. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - - Returns: - google.cloud.bigquery.routine.Routine: - The routine resource returned from the API call. - """ - partial = routine._build_resource(fields) - if routine.etag: - headers = {"If-Match": routine.etag} - else: - headers = None - - # TODO: remove when routines update supports partial requests. - partial["routineReference"] = routine.reference.to_api_repr() - - api_response = self._call_api( - retry, - method="PUT", - path=routine.path, - data=partial, - headers=headers, - timeout=timeout, - ) - return Routine.from_api_repr(api_response) - - def update_table(self, table, fields, retry=DEFAULT_RETRY, timeout=None): - """Change some fields of a table. - - Use ``fields`` to specify which fields to update. At least one field - must be provided. If a field is listed in ``fields`` and is ``None`` - in ``table``, the field value will be deleted. - - If ``table.etag`` is not ``None``, the update will only succeed if - the table on the server has the same ETag. Thus reading a table with - ``get_table``, changing its fields, and then passing it to - ``update_table`` will ensure that the changes will only be saved if - no modifications to the table occurred since the read. - - Args: - table (google.cloud.bigquery.table.Table): The table to update. - fields (Sequence[str]): - The fields of ``table`` to change, spelled as the Table - properties (e.g. "friendly_name"). - retry (google.api_core.retry.Retry): - (Optional) A description of how to retry the API call. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - - Returns: - google.cloud.bigquery.table.Table: - The table resource returned from the API call. - """ - partial = table._build_resource(fields) - if table.etag is not None: - headers = {"If-Match": table.etag} - else: - headers = None - api_response = self._call_api( - retry, - method="PATCH", - path=table.path, - data=partial, - headers=headers, - timeout=timeout, - ) - return Table.from_api_repr(api_response) - - def list_models( - self, - dataset, - max_results=None, - page_token=None, - retry=DEFAULT_RETRY, - timeout=None, - ): - """[Beta] List models in the dataset. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/models/list - - Args: - dataset (Union[ \ - google.cloud.bigquery.dataset.Dataset, \ - google.cloud.bigquery.dataset.DatasetReference, \ - str, \ - ]): - A reference to the dataset whose models to list from the - BigQuery API. If a string is passed in, this method attempts - to create a dataset reference from a string using - :func:`google.cloud.bigquery.dataset.DatasetReference.from_string`. - max_results (int): - (Optional) Maximum number of models to return. If not passed, - defaults to a value set by the API. - page_token (str): - (Optional) Token representing a cursor into the models. If - not passed, the API will return the first page of models. The - token marks the beginning of the iterator to be returned and - the value of the ``page_token`` can be accessed at - ``next_page_token`` of the - :class:`~google.api_core.page_iterator.HTTPIterator`. - retry (google.api_core.retry.Retry): - (Optional) How to retry the RPC. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - - Returns: - google.api_core.page_iterator.Iterator: - Iterator of - :class:`~google.cloud.bigquery.model.Model` contained - within the requested dataset. - """ - if isinstance(dataset, str): - dataset = DatasetReference.from_string( - dataset, default_project=self.project - ) - - if not isinstance(dataset, (Dataset, DatasetReference)): - raise TypeError("dataset must be a Dataset, DatasetReference, or string") - - path = "%s/models" % dataset.path - result = page_iterator.HTTPIterator( - client=self, - api_request=functools.partial(self._call_api, retry, timeout=timeout), - path=path, - item_to_value=_item_to_model, - items_key="models", - page_token=page_token, - max_results=max_results, - ) - result.dataset = dataset - return result - - def list_routines( - self, - dataset, - max_results=None, - page_token=None, - retry=DEFAULT_RETRY, - timeout=None, - ): - """[Beta] List routines in the dataset. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/routines/list - - Args: - dataset (Union[ \ - google.cloud.bigquery.dataset.Dataset, \ - google.cloud.bigquery.dataset.DatasetReference, \ - str, \ - ]): - A reference to the dataset whose routines to list from the - BigQuery API. If a string is passed in, this method attempts - to create a dataset reference from a string using - :func:`google.cloud.bigquery.dataset.DatasetReference.from_string`. - max_results (int): - (Optional) Maximum number of routines to return. If not passed, - defaults to a value set by the API. - page_token (str): - (Optional) Token representing a cursor into the routines. If - not passed, the API will return the first page of routines. The - token marks the beginning of the iterator to be returned and - the value of the ``page_token`` can be accessed at - ``next_page_token`` of the - :class:`~google.api_core.page_iterator.HTTPIterator`. - retry (google.api_core.retry.Retry): - (Optional) How to retry the RPC. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - - Returns: - google.api_core.page_iterator.Iterator: - Iterator of all - :class:`~google.cloud.bigquery.routine.Routine`s contained - within the requested dataset, limited by ``max_results``. - """ - if isinstance(dataset, str): - dataset = DatasetReference.from_string( - dataset, default_project=self.project - ) - - if not isinstance(dataset, (Dataset, DatasetReference)): - raise TypeError("dataset must be a Dataset, DatasetReference, or string") - - path = "{}/routines".format(dataset.path) - result = page_iterator.HTTPIterator( - client=self, - api_request=functools.partial(self._call_api, retry, timeout=timeout), - path=path, - item_to_value=_item_to_routine, - items_key="routines", - page_token=page_token, - max_results=max_results, - ) - result.dataset = dataset - return result - - def list_tables( - self, - dataset, - max_results=None, - page_token=None, - retry=DEFAULT_RETRY, - timeout=None, - ): - """List tables in the dataset. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/list - - Args: - dataset (Union[ \ - google.cloud.bigquery.dataset.Dataset, \ - google.cloud.bigquery.dataset.DatasetReference, \ - str, \ - ]): - A reference to the dataset whose tables to list from the - BigQuery API. If a string is passed in, this method attempts - to create a dataset reference from a string using - :func:`google.cloud.bigquery.dataset.DatasetReference.from_string`. - max_results (int): - (Optional) Maximum number of tables to return. If not passed, - defaults to a value set by the API. - page_token (str): - (Optional) Token representing a cursor into the tables. If - not passed, the API will return the first page of tables. The - token marks the beginning of the iterator to be returned and - the value of the ``page_token`` can be accessed at - ``next_page_token`` of the - :class:`~google.api_core.page_iterator.HTTPIterator`. - retry (google.api_core.retry.Retry): - (Optional) How to retry the RPC. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - - Returns: - google.api_core.page_iterator.Iterator: - Iterator of - :class:`~google.cloud.bigquery.table.TableListItem` contained - within the requested dataset. - """ - if isinstance(dataset, str): - dataset = DatasetReference.from_string( - dataset, default_project=self.project - ) - - if not isinstance(dataset, (Dataset, DatasetReference)): - raise TypeError("dataset must be a Dataset, DatasetReference, or string") - - path = "%s/tables" % dataset.path - result = page_iterator.HTTPIterator( - client=self, - api_request=functools.partial(self._call_api, retry, timeout=timeout), - path=path, - item_to_value=_item_to_table, - items_key="tables", - page_token=page_token, - max_results=max_results, - ) - result.dataset = dataset - return result - - def delete_dataset( - self, - dataset, - delete_contents=False, - retry=DEFAULT_RETRY, - timeout=None, - not_found_ok=False, - ): - """Delete a dataset. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/delete - - Args - dataset (Union[ \ - google.cloud.bigquery.dataset.Dataset, \ - google.cloud.bigquery.dataset.DatasetReference, \ - str, \ - ]): - A reference to the dataset to delete. If a string is passed - in, this method attempts to create a dataset reference from a - string using - :func:`google.cloud.bigquery.dataset.DatasetReference.from_string`. - delete_contents (boolean): - (Optional) If True, delete all the tables in the dataset. If - False and the dataset contains tables, the request will fail. - Default is False. - retry (google.api_core.retry.Retry): - (Optional) How to retry the RPC. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - not_found_ok (bool): - Defaults to ``False``. If ``True``, ignore "not found" errors - when deleting the dataset. - """ - if isinstance(dataset, str): - dataset = DatasetReference.from_string( - dataset, default_project=self.project - ) - - if not isinstance(dataset, (Dataset, DatasetReference)): - raise TypeError("dataset must be a Dataset or a DatasetReference") - - params = {} - if delete_contents: - params["deleteContents"] = "true" - - try: - self._call_api( - retry, - method="DELETE", - path=dataset.path, - query_params=params, - timeout=timeout, - ) - except google.api_core.exceptions.NotFound: - if not not_found_ok: - raise - - def delete_model( - self, model, retry=DEFAULT_RETRY, timeout=None, not_found_ok=False - ): - """[Beta] Delete a model - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/models/delete - - Args: - model (Union[ \ - google.cloud.bigquery.model.Model, \ - google.cloud.bigquery.model.ModelReference, \ - str, \ - ]): - A reference to the model to delete. If a string is passed in, - this method attempts to create a model reference from a - string using - :func:`google.cloud.bigquery.model.ModelReference.from_string`. - retry (google.api_core.retry.Retry): - (Optional) How to retry the RPC. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - not_found_ok (bool): - Defaults to ``False``. If ``True``, ignore "not found" errors - when deleting the model. - """ - if isinstance(model, str): - model = ModelReference.from_string(model, default_project=self.project) - - if not isinstance(model, (Model, ModelReference)): - raise TypeError("model must be a Model or a ModelReference") - - try: - self._call_api(retry, method="DELETE", path=model.path, timeout=timeout) - except google.api_core.exceptions.NotFound: - if not not_found_ok: - raise - - def delete_routine( - self, routine, retry=DEFAULT_RETRY, timeout=None, not_found_ok=False - ): - """[Beta] Delete a routine. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/routines/delete - - Args: - model (Union[ \ - google.cloud.bigquery.routine.Routine, \ - google.cloud.bigquery.routine.RoutineReference, \ - str, \ - ]): - A reference to the routine to delete. If a string is passed - in, this method attempts to create a routine reference from a - string using - :func:`google.cloud.bigquery.routine.RoutineReference.from_string`. - retry (google.api_core.retry.Retry): - (Optional) How to retry the RPC. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - not_found_ok (bool): - Defaults to ``False``. If ``True``, ignore "not found" errors - when deleting the routine. - """ - if isinstance(routine, str): - routine = RoutineReference.from_string( - routine, default_project=self.project - ) - - if not isinstance(routine, (Routine, RoutineReference)): - raise TypeError("routine must be a Routine or a RoutineReference") - - try: - self._call_api(retry, method="DELETE", path=routine.path, timeout=timeout) - except google.api_core.exceptions.NotFound: - if not not_found_ok: - raise - - def delete_table( - self, table, retry=DEFAULT_RETRY, timeout=None, not_found_ok=False - ): - """Delete a table - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/delete - - Args: - table (Union[ \ - google.cloud.bigquery.table.Table, \ - google.cloud.bigquery.table.TableReference, \ - str, \ - ]): - A reference to the table to delete. If a string is passed in, - this method attempts to create a table reference from a - string using - :func:`google.cloud.bigquery.table.TableReference.from_string`. - retry (google.api_core.retry.Retry): - (Optional) How to retry the RPC. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - not_found_ok (bool): - Defaults to ``False``. If ``True``, ignore "not found" errors - when deleting the table. - """ - table = _table_arg_to_table_ref(table, default_project=self.project) - if not isinstance(table, TableReference): - raise TypeError("Unable to get TableReference for table '{}'".format(table)) - - try: - self._call_api(retry, method="DELETE", path=table.path, timeout=timeout) - except google.api_core.exceptions.NotFound: - if not not_found_ok: - raise - - def _get_query_results( - self, job_id, retry, project=None, timeout_ms=None, location=None, timeout=None - ): - """Get the query results object for a query job. - - Arguments: - job_id (str): Name of the query job. - retry (google.api_core.retry.Retry): - (Optional) How to retry the RPC. - project (str): - (Optional) project ID for the query job (defaults to the - project of the client). - timeout_ms (int): - (Optional) number of milliseconds the the API call should - wait for the query to complete before the request times out. - location (str): Location of the query job. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - - Returns: - google.cloud.bigquery.query._QueryResults: - A new ``_QueryResults`` instance. - """ - - extra_params = {"maxResults": 0} - - if project is None: - project = self.project - - if timeout_ms is not None: - extra_params["timeoutMs"] = timeout_ms - - if location is None: - location = self.location - - if location is not None: - extra_params["location"] = location - - path = "/projects/{}/queries/{}".format(project, job_id) - - # This call is typically made in a polling loop that checks whether the - # job is complete (from QueryJob.done(), called ultimately from - # QueryJob.result()). So we don't need to poll here. - resource = self._call_api( - retry, method="GET", path=path, query_params=extra_params, timeout=timeout - ) - return _QueryResults.from_api_repr(resource) - - def job_from_resource(self, resource): - """Detect correct job type from resource and instantiate. - - Args: - resource (Dict): one job resource from API response - - Returns: - Union[ \ - google.cloud.bigquery.job.LoadJob, \ - google.cloud.bigquery.job.CopyJob, \ - google.cloud.bigquery.job.ExtractJob, \ - google.cloud.bigquery.job.QueryJob \ - ]: - The job instance, constructed via the resource. - """ - config = resource.get("configuration", {}) - if "load" in config: - return job.LoadJob.from_api_repr(resource, self) - elif "copy" in config: - return job.CopyJob.from_api_repr(resource, self) - elif "extract" in config: - return job.ExtractJob.from_api_repr(resource, self) - elif "query" in config: - return job.QueryJob.from_api_repr(resource, self) - return job.UnknownJob.from_api_repr(resource, self) - - def get_job( - self, job_id, project=None, location=None, retry=DEFAULT_RETRY, timeout=None - ): - """Fetch a job for the project associated with this client. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/get - - Arguments: - job_id (str): Unique job identifier. - - Keyword Arguments: - project (str): - (Optional) ID of the project which ownsthe job (defaults to - the client's project). - location (str): Location where the job was run. - retry (google.api_core.retry.Retry): - (Optional) How to retry the RPC. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - - Returns: - Union[ \ - google.cloud.bigquery.job.LoadJob, \ - google.cloud.bigquery.job.CopyJob, \ - google.cloud.bigquery.job.ExtractJob, \ - google.cloud.bigquery.job.QueryJob \ - ]: - Job instance, based on the resource returned by the API. - """ - extra_params = {"projection": "full"} - - if project is None: - project = self.project - - if location is None: - location = self.location - - if location is not None: - extra_params["location"] = location - - path = "/projects/{}/jobs/{}".format(project, job_id) - - resource = self._call_api( - retry, method="GET", path=path, query_params=extra_params, timeout=timeout - ) - - return self.job_from_resource(resource) - - def cancel_job( - self, job_id, project=None, location=None, retry=DEFAULT_RETRY, timeout=None - ): - """Attempt to cancel a job from a job ID. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/cancel - - Args: - job_id (str): Unique job identifier. - - Keyword Arguments: - project (str): - (Optional) ID of the project which owns the job (defaults to - the client's project). - location (str): Location where the job was run. - retry (google.api_core.retry.Retry): - (Optional) How to retry the RPC. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - - Returns: - Union[ \ - google.cloud.bigquery.job.LoadJob, \ - google.cloud.bigquery.job.CopyJob, \ - google.cloud.bigquery.job.ExtractJob, \ - google.cloud.bigquery.job.QueryJob, \ - ]: - Job instance, based on the resource returned by the API. - """ - extra_params = {"projection": "full"} - - if project is None: - project = self.project - - if location is None: - location = self.location - - if location is not None: - extra_params["location"] = location - - path = "/projects/{}/jobs/{}/cancel".format(project, job_id) - - resource = self._call_api( - retry, method="POST", path=path, query_params=extra_params, timeout=timeout - ) - - return self.job_from_resource(resource["job"]) - - def list_jobs( - self, - project=None, - parent_job=None, - max_results=None, - page_token=None, - all_users=None, - state_filter=None, - retry=DEFAULT_RETRY, - timeout=None, - min_creation_time=None, - max_creation_time=None, - ): - """List jobs for the project associated with this client. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/list - - Args: - project (Optional[str]): - Project ID to use for retreiving datasets. Defaults - to the client's project. - parent_job (Optional[Union[ \ - google.cloud.bigquery.job._AsyncJob, \ - str, \ - ]]): - If set, retrieve only child jobs of the specified parent. - max_results (Optional[int]): - Maximum number of jobs to return. - page_token (Optional[str]): - Opaque marker for the next "page" of jobs. If not - passed, the API will return the first page of jobs. The token - marks the beginning of the iterator to be returned and the - value of the ``page_token`` can be accessed at - ``next_page_token`` of - :class:`~google.api_core.page_iterator.HTTPIterator`. - all_users (Optional[bool]): - If true, include jobs owned by all users in the project. - Defaults to :data:`False`. - state_filter (Optional[str]): - If set, include only jobs matching the given state. One of: - * ``"done"`` - * ``"pending"`` - * ``"running"`` - retry (Optional[google.api_core.retry.Retry]): - How to retry the RPC. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - min_creation_time (Optional[datetime.datetime]): - Min value for job creation time. If set, only jobs created - after or at this timestamp are returned. If the datetime has - no time zone assumes UTC time. - max_creation_time (Optional[datetime.datetime]): - Max value for job creation time. If set, only jobs created - before or at this timestamp are returned. If the datetime has - no time zone assumes UTC time. - - Returns: - google.api_core.page_iterator.Iterator: - Iterable of job instances. - """ - if isinstance(parent_job, job._AsyncJob): - parent_job = parent_job.job_id - - extra_params = { - "allUsers": all_users, - "stateFilter": state_filter, - "minCreationTime": _str_or_none( - google.cloud._helpers._millis_from_datetime(min_creation_time) - ), - "maxCreationTime": _str_or_none( - google.cloud._helpers._millis_from_datetime(max_creation_time) - ), - "projection": "full", - "parentJobId": parent_job, - } - - extra_params = { - param: value for param, value in extra_params.items() if value is not None - } - - if project is None: - project = self.project - - path = "/projects/%s/jobs" % (project,) - return page_iterator.HTTPIterator( - client=self, - api_request=functools.partial(self._call_api, retry, timeout=timeout), - path=path, - item_to_value=_item_to_job, - items_key="jobs", - page_token=page_token, - max_results=max_results, - extra_params=extra_params, - ) - - def load_table_from_uri( - self, - source_uris, - destination, - job_id=None, - job_id_prefix=None, - location=None, - project=None, - job_config=None, - retry=DEFAULT_RETRY, - timeout=None, - ): - """Starts a job for loading data into a table from CloudStorage. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#jobconfigurationload - - Arguments: - source_uris (Union[str, Sequence[str]]): - URIs of data files to be loaded; in format - ``gs:///``. - destination (Union[ \ - google.cloud.bigquery.table.Table, \ - google.cloud.bigquery.table.TableReference, \ - str, \ - ]): - Table into which data is to be loaded. If a string is passed - in, this method attempts to create a table reference from a - string using - :func:`google.cloud.bigquery.table.TableReference.from_string`. - - Keyword Arguments: - job_id (str): (Optional) Name of the job. - job_id_prefix (str): - (Optional) the user-provided prefix for a randomly generated - job ID. This parameter will be ignored if a ``job_id`` is - also given. - location (str): - Location where to run the job. Must match the location of the - destination table. - project (str): - Project ID of the project of where to run the job. Defaults - to the client's project. - job_config (google.cloud.bigquery.job.LoadJobConfig): - (Optional) Extra configuration options for the job. - retry (google.api_core.retry.Retry): - (Optional) How to retry the RPC. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - - Returns: - google.cloud.bigquery.job.LoadJob: A new load job. - - Raises: - TypeError: - If ``job_config`` is not an instance of :class:`~google.cloud.bigquery.job.LoadJobConfig` - class. - """ - job_id = _make_job_id(job_id, job_id_prefix) - - if project is None: - project = self.project - - if location is None: - location = self.location - - job_ref = job._JobReference(job_id, project=project, location=location) - - if isinstance(source_uris, six.string_types): - source_uris = [source_uris] - - destination = _table_arg_to_table_ref(destination, default_project=self.project) - - if job_config: - job_config = copy.deepcopy(job_config) - _verify_job_config_type(job_config, google.cloud.bigquery.job.LoadJobConfig) - - load_job = job.LoadJob(job_ref, source_uris, destination, self, job_config) - load_job._begin(retry=retry, timeout=timeout) - - return load_job - - def load_table_from_file( - self, - file_obj, - destination, - rewind=False, - size=None, - num_retries=_DEFAULT_NUM_RETRIES, - job_id=None, - job_id_prefix=None, - location=None, - project=None, - job_config=None, - ): - """Upload the contents of this table from a file-like object. - - Similar to :meth:`load_table_from_uri`, this method creates, starts and - returns a :class:`~google.cloud.bigquery.job.LoadJob`. - - Arguments: - file_obj (file): A file handle opened in binary mode for reading. - destination (Union[ \ - google.cloud.bigquery.table.Table, \ - google.cloud.bigquery.table.TableReference, \ - str, \ - ]): - Table into which data is to be loaded. If a string is passed - in, this method attempts to create a table reference from a - string using - :func:`google.cloud.bigquery.table.TableReference.from_string`. - - Keyword Arguments: - rewind (bool): - If True, seek to the beginning of the file handle before - reading the file. - size (int): - The number of bytes to read from the file handle. If size is - ``None`` or large, resumable upload will be used. Otherwise, - multipart upload will be used. - num_retries (int): Number of upload retries. Defaults to 6. - job_id (str): (Optional) Name of the job. - job_id_prefix (str): - (Optional) the user-provided prefix for a randomly generated - job ID. This parameter will be ignored if a ``job_id`` is - also given. - location (str): - Location where to run the job. Must match the location of the - destination table. - project (str): - Project ID of the project of where to run the job. Defaults - to the client's project. - job_config (google.cloud.bigquery.job.LoadJobConfig): - (Optional) Extra configuration options for the job. - - Returns: - google.cloud.bigquery.job.LoadJob: A new load job. - - Raises: - ValueError: - If ``size`` is not passed in and can not be determined, or if - the ``file_obj`` can be detected to be a file opened in text - mode. - - TypeError: - If ``job_config`` is not an instance of :class:`~google.cloud.bigquery.job.LoadJobConfig` - class. - """ - job_id = _make_job_id(job_id, job_id_prefix) - - if project is None: - project = self.project - - if location is None: - location = self.location - - destination = _table_arg_to_table_ref(destination, default_project=self.project) - job_ref = job._JobReference(job_id, project=project, location=location) - if job_config: - job_config = copy.deepcopy(job_config) - _verify_job_config_type(job_config, google.cloud.bigquery.job.LoadJobConfig) - load_job = job.LoadJob(job_ref, None, destination, self, job_config) - job_resource = load_job.to_api_repr() - - if rewind: - file_obj.seek(0, os.SEEK_SET) - - _check_mode(file_obj) - - try: - if size is None or size >= _MAX_MULTIPART_SIZE: - response = self._do_resumable_upload( - file_obj, job_resource, num_retries - ) - else: - response = self._do_multipart_upload( - file_obj, job_resource, size, num_retries - ) - except resumable_media.InvalidResponse as exc: - raise exceptions.from_http_response(exc.response) - - return self.job_from_resource(response.json()) - - def load_table_from_dataframe( - self, - dataframe, - destination, - num_retries=_DEFAULT_NUM_RETRIES, - job_id=None, - job_id_prefix=None, - location=None, - project=None, - job_config=None, - parquet_compression="snappy", - ): - """Upload the contents of a table from a pandas DataFrame. - - Similar to :meth:`load_table_from_uri`, this method creates, starts and - returns a :class:`~google.cloud.bigquery.job.LoadJob`. - - Arguments: - dataframe (pandas.DataFrame): - A :class:`~pandas.DataFrame` containing the data to load. - destination (google.cloud.bigquery.table.TableReference): - The destination table to use for loading the data. If it is an - existing table, the schema of the :class:`~pandas.DataFrame` - must match the schema of the destination table. If the table - does not yet exist, the schema is inferred from the - :class:`~pandas.DataFrame`. - - If a string is passed in, this method attempts to create a - table reference from a string using - :func:`google.cloud.bigquery.table.TableReference.from_string`. - - Keyword Arguments: - num_retries (Optional[int]): Number of upload retries. - job_id (Optional[str]): Name of the job. - job_id_prefix (Optional[str]): - The user-provided prefix for a randomly generated - job ID. This parameter will be ignored if a ``job_id`` is - also given. - location (str): - Location where to run the job. Must match the location of the - destination table. - project (Optional[str]): - Project ID of the project of where to run the job. Defaults - to the client's project. - job_config (Optional[google.cloud.bigquery.job.LoadJobConfig]): - Extra configuration options for the job. - - To override the default pandas data type conversions, supply - a value for - :attr:`~google.cloud.bigquery.job.LoadJobConfig.schema` with - column names matching those of the dataframe. The BigQuery - schema is used to determine the correct data type conversion. - Indexes are not loaded. Requires the :mod:`pyarrow` library. - parquet_compression (str): - [Beta] The compression method to use if intermittently - serializing ``dataframe`` to a parquet file. - - If ``pyarrow`` and job config schema are used, the argument - is directly passed as the ``compression`` argument to the - underlying ``pyarrow.parquet.write_table()`` method (the - default value "snappy" gets converted to uppercase). - https://arrow.apache.org/docs/python/generated/pyarrow.parquet.write_table.html#pyarrow-parquet-write-table - - If either ``pyarrow`` or job config schema are missing, the - argument is directly passed as the ``compression`` argument - to the underlying ``DataFrame.to_parquet()`` method. - https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.to_parquet.html#pandas.DataFrame.to_parquet - - Returns: - google.cloud.bigquery.job.LoadJob: A new load job. - - Raises: - ImportError: - If a usable parquet engine cannot be found. This method - requires :mod:`pyarrow` or :mod:`fastparquet` to be - installed. - TypeError: - If ``job_config`` is not an instance of :class:`~google.cloud.bigquery.job.LoadJobConfig` - class. - """ - job_id = _make_job_id(job_id, job_id_prefix) - - if job_config: - _verify_job_config_type(job_config, google.cloud.bigquery.job.LoadJobConfig) - # Make a copy so that the job config isn't modified in-place. - job_config_properties = copy.deepcopy(job_config._properties) - job_config = job.LoadJobConfig() - job_config._properties = job_config_properties - - else: - job_config = job.LoadJobConfig() - - job_config.source_format = job.SourceFormat.PARQUET - - if location is None: - location = self.location - - # If table schema is not provided, we try to fetch the existing table - # schema, and check if dataframe schema is compatible with it - except - # for WRITE_TRUNCATE jobs, the existing schema does not matter then. - if ( - not job_config.schema - and job_config.write_disposition != job.WriteDisposition.WRITE_TRUNCATE - ): - try: - table = self.get_table(destination) - except google.api_core.exceptions.NotFound: - table = None - else: - columns_and_indexes = frozenset( - name - for name, _ in _pandas_helpers.list_columns_and_indexes(dataframe) - ) - # schema fields not present in the dataframe are not needed - job_config.schema = [ - field for field in table.schema if field.name in columns_and_indexes - ] - - job_config.schema = _pandas_helpers.dataframe_to_bq_schema( - dataframe, job_config.schema - ) - - if not job_config.schema: - # the schema could not be fully detected - warnings.warn( - "Schema could not be detected for all columns. Loading from a " - "dataframe without a schema will be deprecated in the future, " - "please provide a schema.", - PendingDeprecationWarning, - stacklevel=2, - ) - - tmpfd, tmppath = tempfile.mkstemp(suffix="_job_{}.parquet".format(job_id[:8])) - os.close(tmpfd) - - try: - if pyarrow and job_config.schema: - if parquet_compression == "snappy": # adjust the default value - parquet_compression = parquet_compression.upper() - - _pandas_helpers.dataframe_to_parquet( - dataframe, - job_config.schema, - tmppath, - parquet_compression=parquet_compression, - ) - else: - if job_config.schema: - warnings.warn( - "job_config.schema is set, but not used to assist in " - "identifying correct types for data serialization. " - "Please install the pyarrow package.", - PendingDeprecationWarning, - stacklevel=2, - ) - - dataframe.to_parquet(tmppath, compression=parquet_compression) - - with open(tmppath, "rb") as parquet_file: - return self.load_table_from_file( - parquet_file, - destination, - num_retries=num_retries, - rewind=True, - job_id=job_id, - job_id_prefix=job_id_prefix, - location=location, - project=project, - job_config=job_config, - ) - - finally: - os.remove(tmppath) - - def load_table_from_json( - self, - json_rows, - destination, - num_retries=_DEFAULT_NUM_RETRIES, - job_id=None, - job_id_prefix=None, - location=None, - project=None, - job_config=None, - ): - """Upload the contents of a table from a JSON string or dict. - - Args: - json_rows (Iterable[Dict[str, Any]]): - Row data to be inserted. Keys must match the table schema fields - and values must be JSON-compatible representations. - - .. note:: - - If your data is already a newline-delimited JSON string, - it is best to wrap it into a file-like object and pass it - to :meth:`~google.cloud.bigquery.client.Client.load_table_from_file`:: - - import io - from google.cloud import bigquery - - data = u'{"foo": "bar"}' - data_as_file = io.StringIO(data) - - client = bigquery.Client() - client.load_table_from_file(data_as_file, ...) - - destination (Union[ \ - google.cloud.bigquery.table.Table, \ - google.cloud.bigquery.table.TableReference, \ - str, \ - ]): - Table into which data is to be loaded. If a string is passed - in, this method attempts to create a table reference from a - string using - :func:`google.cloud.bigquery.table.TableReference.from_string`. - - Keyword Arguments: - num_retries (Optional[int]): Number of upload retries. - job_id (str): (Optional) Name of the job. - job_id_prefix (str): - (Optional) the user-provided prefix for a randomly generated - job ID. This parameter will be ignored if a ``job_id`` is - also given. - location (str): - Location where to run the job. Must match the location of the - destination table. - project (str): - Project ID of the project of where to run the job. Defaults - to the client's project. - job_config (google.cloud.bigquery.job.LoadJobConfig): - (Optional) Extra configuration options for the job. The - ``source_format`` setting is always set to - :attr:`~google.cloud.bigquery.job.SourceFormat.NEWLINE_DELIMITED_JSON`. - - Returns: - google.cloud.bigquery.job.LoadJob: A new load job. - - Raises: - TypeError: - If ``job_config`` is not an instance of :class:`~google.cloud.bigquery.job.LoadJobConfig` - class. - """ - job_id = _make_job_id(job_id, job_id_prefix) - - if job_config: - _verify_job_config_type(job_config, google.cloud.bigquery.job.LoadJobConfig) - # Make a copy so that the job config isn't modified in-place. - job_config = copy.deepcopy(job_config) - else: - job_config = job.LoadJobConfig() - - job_config.source_format = job.SourceFormat.NEWLINE_DELIMITED_JSON - - if job_config.schema is None: - job_config.autodetect = True - - if project is None: - project = self.project - - if location is None: - location = self.location - - destination = _table_arg_to_table_ref(destination, default_project=self.project) - - data_str = u"\n".join(json.dumps(item) for item in json_rows) - data_file = io.BytesIO(data_str.encode()) - - return self.load_table_from_file( - data_file, - destination, - num_retries=num_retries, - job_id=job_id, - job_id_prefix=job_id_prefix, - location=location, - project=project, - job_config=job_config, - ) - - def _do_resumable_upload(self, stream, metadata, num_retries): - """Perform a resumable upload. - - Args: - stream (IO[bytes]): A bytes IO object open for reading. - - metadata (Dict): The metadata associated with the upload. - - num_retries (int): - Number of upload retries. (Deprecated: This - argument will be removed in a future release.) - - Returns: - requests.Response: - The "200 OK" response object returned after the final chunk - is uploaded. - """ - upload, transport = self._initiate_resumable_upload( - stream, metadata, num_retries - ) - - while not upload.finished: - response = upload.transmit_next_chunk(transport) - - return response - - def _initiate_resumable_upload(self, stream, metadata, num_retries): - """Initiate a resumable upload. - - Args: - stream (IO[bytes]): A bytes IO object open for reading. - - metadata (Dict): The metadata associated with the upload. - - num_retries (int): - Number of upload retries. (Deprecated: This - argument will be removed in a future release.) - - Returns: - Tuple: - Pair of - - * The :class:`~google.resumable_media.requests.ResumableUpload` - that was created - * The ``transport`` used to initiate the upload. - """ - chunk_size = _DEFAULT_CHUNKSIZE - transport = self._http - headers = _get_upload_headers(self._connection.user_agent) - upload_url = _RESUMABLE_URL_TEMPLATE.format(project=self.project) - # TODO: modify ResumableUpload to take a retry.Retry object - # that it can use for the initial RPC. - upload = ResumableUpload(upload_url, chunk_size, headers=headers) - - if num_retries is not None: - upload._retry_strategy = resumable_media.RetryStrategy( - max_retries=num_retries - ) - - upload.initiate( - transport, stream, metadata, _GENERIC_CONTENT_TYPE, stream_final=False - ) - - return upload, transport - - def _do_multipart_upload(self, stream, metadata, size, num_retries): - """Perform a multipart upload. - - Args: - stream (IO[bytes]): A bytes IO object open for reading. - - metadata (Dict): The metadata associated with the upload. - - size (int): - The number of bytes to be uploaded (which will be read - from ``stream``). If not provided, the upload will be - concluded once ``stream`` is exhausted (or :data:`None`). - - num_retries (int): - Number of upload retries. (Deprecated: This - argument will be removed in a future release.) - - Returns: - requests.Response: - The "200 OK" response object returned after the multipart - upload request. - - Raises: - ValueError: - if the ``stream`` has fewer than ``size`` - bytes remaining. - """ - data = stream.read(size) - if len(data) < size: - msg = _READ_LESS_THAN_SIZE.format(size, len(data)) - raise ValueError(msg) - - headers = _get_upload_headers(self._connection.user_agent) - - upload_url = _MULTIPART_URL_TEMPLATE.format(project=self.project) - upload = MultipartUpload(upload_url, headers=headers) - - if num_retries is not None: - upload._retry_strategy = resumable_media.RetryStrategy( - max_retries=num_retries - ) - - response = upload.transmit(self._http, data, metadata, _GENERIC_CONTENT_TYPE) - - return response - - def copy_table( - self, - sources, - destination, - job_id=None, - job_id_prefix=None, - location=None, - project=None, - job_config=None, - retry=DEFAULT_RETRY, - timeout=None, - ): - """Copy one or more tables to another table. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#jobconfigurationtablecopy - - Args: - sources (Union[ \ - google.cloud.bigquery.table.Table, \ - google.cloud.bigquery.table.TableReference, \ - str, \ - Sequence[ \ - Union[ \ - google.cloud.bigquery.table.Table, \ - google.cloud.bigquery.table.TableReference, \ - str, \ - ] \ - ], \ - ]): - Table or tables to be copied. - destination (Union[ \ - google.cloud.bigquery.table.Table, \ - google.cloud.bigquery.table.TableReference, \ - str, \ - ]): - Table into which data is to be copied. - - Keyword Arguments: - job_id (str): (Optional) The ID of the job. - job_id_prefix (str) - (Optional) the user-provided prefix for a randomly generated - job ID. This parameter will be ignored if a ``job_id`` is - also given. - location (str): - Location where to run the job. Must match the location of any - source table as well as the destination table. - project (str): - Project ID of the project of where to run the job. Defaults - to the client's project. - job_config (google.cloud.bigquery.job.CopyJobConfig): - (Optional) Extra configuration options for the job. - retry (google.api_core.retry.Retry): - (Optional) How to retry the RPC. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - - Returns: - google.cloud.bigquery.job.CopyJob: A new copy job instance. - - Raises: - TypeError: - If ``job_config`` is not an instance of :class:`~google.cloud.bigquery.job.CopyJobConfig` - class. - """ - job_id = _make_job_id(job_id, job_id_prefix) - - if project is None: - project = self.project - - if location is None: - location = self.location - - job_ref = job._JobReference(job_id, project=project, location=location) - - # sources can be one of many different input types. (string, Table, - # TableReference, or a sequence of any of those.) Convert them all to a - # list of TableReferences. - # - # _table_arg_to_table_ref leaves lists unmodified. - sources = _table_arg_to_table_ref(sources, default_project=self.project) - - if not isinstance(sources, collections_abc.Sequence): - sources = [sources] - - sources = [ - _table_arg_to_table_ref(source, default_project=self.project) - for source in sources - ] - - destination = _table_arg_to_table_ref(destination, default_project=self.project) - - if job_config: - _verify_job_config_type(job_config, google.cloud.bigquery.job.CopyJobConfig) - job_config = copy.deepcopy(job_config) - - copy_job = job.CopyJob( - job_ref, sources, destination, client=self, job_config=job_config - ) - copy_job._begin(retry=retry, timeout=timeout) - - return copy_job - - def extract_table( - self, - source, - destination_uris, - job_id=None, - job_id_prefix=None, - location=None, - project=None, - job_config=None, - retry=DEFAULT_RETRY, - timeout=None, - ): - """Start a job to extract a table into Cloud Storage files. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#jobconfigurationextract - - Args: - source (Union[ \ - google.cloud.bigquery.table.Table, \ - google.cloud.bigquery.table.TableReference, \ - src, \ - ]): - Table to be extracted. - destination_uris (Union[str, Sequence[str]]): - URIs of Cloud Storage file(s) into which table data is to be - extracted; in format - ``gs:///``. - - Keyword Arguments: - job_id (str): (Optional) The ID of the job. - job_id_prefix (str) - (Optional) the user-provided prefix for a randomly generated - job ID. This parameter will be ignored if a ``job_id`` is - also given. - location (str): - Location where to run the job. Must match the location of the - source table. - project (str): - Project ID of the project of where to run the job. Defaults - to the client's project. - job_config (google.cloud.bigquery.job.ExtractJobConfig): - (Optional) Extra configuration options for the job. - retry (google.api_core.retry.Retry): - (Optional) How to retry the RPC. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - Args: - source (google.cloud.bigquery.table.TableReference): table to be extracted. - - Returns: - google.cloud.bigquery.job.ExtractJob: A new extract job instance. - - Raises: - TypeError: - If ``job_config`` is not an instance of :class:`~google.cloud.bigquery.job.ExtractJobConfig` - class. - """ - job_id = _make_job_id(job_id, job_id_prefix) - - if project is None: - project = self.project - - if location is None: - location = self.location - - job_ref = job._JobReference(job_id, project=project, location=location) - source = _table_arg_to_table_ref(source, default_project=self.project) - - if isinstance(destination_uris, six.string_types): - destination_uris = [destination_uris] - - if job_config: - _verify_job_config_type( - job_config, google.cloud.bigquery.job.ExtractJobConfig - ) - job_config = copy.deepcopy(job_config) - - extract_job = job.ExtractJob( - job_ref, source, destination_uris, client=self, job_config=job_config - ) - extract_job._begin(retry=retry, timeout=timeout) - - return extract_job - - def query( - self, - query, - job_config=None, - job_id=None, - job_id_prefix=None, - location=None, - project=None, - retry=DEFAULT_RETRY, - timeout=None, - ): - """Run a SQL query. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#jobconfigurationquery - - Args: - query (str): - SQL query to be executed. Defaults to the standard SQL - dialect. Use the ``job_config`` parameter to change dialects. - - Keyword Arguments: - job_config (google.cloud.bigquery.job.QueryJobConfig): - (Optional) Extra configuration options for the job. - To override any options that were previously set in - the ``default_query_job_config`` given to the - ``Client`` constructor, manually set those options to ``None``, - or whatever value is preferred. - job_id (str): (Optional) ID to use for the query job. - job_id_prefix (str): - (Optional) The prefix to use for a randomly generated job ID. - This parameter will be ignored if a ``job_id`` is also given. - location (str): - Location where to run the job. Must match the location of the - any table used in the query as well as the destination table. - project (str): - Project ID of the project of where to run the job. Defaults - to the client's project. - retry (google.api_core.retry.Retry): - (Optional) How to retry the RPC. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - - Returns: - google.cloud.bigquery.job.QueryJob: A new query job instance. - - Raises: - TypeError: - If ``job_config`` is not an instance of :class:`~google.cloud.bigquery.job.QueryJobConfig` - class. - """ - job_id = _make_job_id(job_id, job_id_prefix) - - if project is None: - project = self.project - - if location is None: - location = self.location - - job_config = copy.deepcopy(job_config) - - if self._default_query_job_config: - if job_config: - _verify_job_config_type( - job_config, google.cloud.bigquery.job.QueryJobConfig - ) - # anything that's not defined on the incoming - # that is in the default, - # should be filled in with the default - # the incoming therefore has precedence - job_config = job_config._fill_from_default( - self._default_query_job_config - ) - else: - _verify_job_config_type( - self._default_query_job_config, - google.cloud.bigquery.job.QueryJobConfig, - ) - job_config = copy.deepcopy(self._default_query_job_config) - - job_ref = job._JobReference(job_id, project=project, location=location) - query_job = job.QueryJob(job_ref, query, client=self, job_config=job_config) - query_job._begin(retry=retry, timeout=timeout) - - return query_job - - def insert_rows(self, table, rows, selected_fields=None, **kwargs): - """Insert rows into a table via the streaming API. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tabledata/insertAll - - Args: - table (Union[ \ - google.cloud.bigquery.table.Table, \ - google.cloud.bigquery.table.TableReference, \ - str, \ - ]): - The destination table for the row data, or a reference to it. - rows (Union[Sequence[Tuple], Sequence[Dict]]): - Row data to be inserted. If a list of tuples is given, each - tuple should contain data for each schema field on the - current table and in the same order as the schema fields. If - a list of dictionaries is given, the keys must include all - required fields in the schema. Keys which do not correspond - to a field in the schema are ignored. - selected_fields (Sequence[google.cloud.bigquery.schema.SchemaField]): - The fields to return. Required if ``table`` is a - :class:`~google.cloud.bigquery.table.TableReference`. - kwargs (Dict): - Keyword arguments to - :meth:`~google.cloud.bigquery.client.Client.insert_rows_json`. - - Returns: - Sequence[Mappings]: - One mapping per row with insert errors: the "index" key - identifies the row, and the "errors" key contains a list of - the mappings describing one or more problems with the row. - - Raises: - ValueError: if table's schema is not set or `rows` is not a `Sequence`. - """ - if not isinstance(rows, (collections_abc.Sequence, collections_abc.Iterator)): - raise TypeError("rows argument should be a sequence of dicts or tuples") - - table = _table_arg_to_table(table, default_project=self.project) - - if not isinstance(table, Table): - raise TypeError(_NEED_TABLE_ARGUMENT) - - schema = table.schema - - # selected_fields can override the table schema. - if selected_fields is not None: - schema = selected_fields - - if len(schema) == 0: - raise ValueError( - ( - "Could not determine schema for table '{}'. Call client.get_table() " - "or pass in a list of schema fields to the selected_fields argument." - ).format(table) - ) - - json_rows = [_record_field_to_json(schema, row) for row in rows] - - return self.insert_rows_json(table, json_rows, **kwargs) - - def insert_rows_from_dataframe( - self, table, dataframe, selected_fields=None, chunk_size=500, **kwargs - ): - """Insert rows into a table from a dataframe via the streaming API. - - Args: - table (Union[ \ - google.cloud.bigquery.table.Table, \ - google.cloud.bigquery.table.TableReference, \ - str, \ - ]): - The destination table for the row data, or a reference to it. - dataframe (pandas.DataFrame): - A :class:`~pandas.DataFrame` containing the data to load. - selected_fields (Sequence[google.cloud.bigquery.schema.SchemaField]): - The fields to return. Required if ``table`` is a - :class:`~google.cloud.bigquery.table.TableReference`. - chunk_size (int): - The number of rows to stream in a single chunk. Must be positive. - kwargs (Dict): - Keyword arguments to - :meth:`~google.cloud.bigquery.client.Client.insert_rows_json`. - - Returns: - Sequence[Sequence[Mappings]]: - A list with insert errors for each insert chunk. Each element - is a list containing one mapping per row with insert errors: - the "index" key identifies the row, and the "errors" key - contains a list of the mappings describing one or more problems - with the row. - - Raises: - ValueError: if table's schema is not set - """ - insert_results = [] - - chunk_count = int(math.ceil(len(dataframe) / chunk_size)) - rows_iter = ( - dict(six.moves.zip(dataframe.columns, row)) - for row in dataframe.itertuples(index=False, name=None) - ) - - for _ in range(chunk_count): - rows_chunk = itertools.islice(rows_iter, chunk_size) - result = self.insert_rows(table, rows_chunk, selected_fields, **kwargs) - insert_results.append(result) - - return insert_results - - def insert_rows_json( - self, - table, - json_rows, - row_ids=None, - skip_invalid_rows=None, - ignore_unknown_values=None, - template_suffix=None, - retry=DEFAULT_RETRY, - timeout=None, - ): - """Insert rows into a table without applying local type conversions. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tabledata/insertAll - - Args: - table (Union[ \ - google.cloud.bigquery.table.Table \ - google.cloud.bigquery.table.TableReference, \ - str \ - ]): - The destination table for the row data, or a reference to it. - json_rows (Sequence[Dict]): - Row data to be inserted. Keys must match the table schema fields - and values must be JSON-compatible representations. - row_ids (Optional[Sequence[Optional[str]]]): - Unique IDs, one per row being inserted. An ID can also be - ``None``, indicating that an explicit insert ID should **not** - be used for that row. If the argument is omitted altogether, - unique IDs are created automatically. - skip_invalid_rows (Optional[bool]): - Insert all valid rows of a request, even if invalid rows exist. - The default value is ``False``, which causes the entire request - to fail if any invalid rows exist. - ignore_unknown_values (Optional[bool]): - Accept rows that contain values that do not match the schema. - The unknown values are ignored. Default is ``False``, which - treats unknown values as errors. - template_suffix (Optional[str]): - Treat ``name`` as a template table and provide a suffix. - BigQuery will create the table `` + `` - based on the schema of the template table. See - https://cloud.google.com/bigquery/streaming-data-into-bigquery#template-tables - retry (Optional[google.api_core.retry.Retry]): - How to retry the RPC. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - - Returns: - Sequence[Mappings]: - One mapping per row with insert errors: the "index" key - identifies the row, and the "errors" key contains a list of - the mappings describing one or more problems with the row. - - Raises: - TypeError: if `json_rows` is not a `Sequence`. - """ - if not isinstance( - json_rows, (collections_abc.Sequence, collections_abc.Iterator) - ): - raise TypeError("json_rows argument should be a sequence of dicts") - # Convert table to just a reference because unlike insert_rows, - # insert_rows_json doesn't need the table schema. It's not doing any - # type conversions. - table = _table_arg_to_table_ref(table, default_project=self.project) - rows_info = [] - data = {"rows": rows_info} - - for index, row in enumerate(json_rows): - info = {"json": row} - if row_ids is not None: - info["insertId"] = row_ids[index] - else: - info["insertId"] = str(uuid.uuid4()) - rows_info.append(info) - - if skip_invalid_rows is not None: - data["skipInvalidRows"] = skip_invalid_rows - - if ignore_unknown_values is not None: - data["ignoreUnknownValues"] = ignore_unknown_values - - if template_suffix is not None: - data["templateSuffix"] = template_suffix - - # We can always retry, because every row has an insert ID. - response = self._call_api( - retry, - method="POST", - path="%s/insertAll" % table.path, - data=data, - timeout=timeout, - ) - errors = [] - - for error in response.get("insertErrors", ()): - errors.append({"index": int(error["index"]), "errors": error["errors"]}) - - return errors - - def list_partitions(self, table, retry=DEFAULT_RETRY, timeout=None): - """List the partitions in a table. - - Args: - table (Union[ \ - google.cloud.bigquery.table.Table, \ - google.cloud.bigquery.table.TableReference, \ - str, \ - ]): - The table or reference from which to get partition info - retry (google.api_core.retry.Retry): - (Optional) How to retry the RPC. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - If multiple requests are made under the hood, ``timeout`` is - interpreted as the approximate total time of **all** requests. - - Returns: - List[str]: - A list of the partition ids present in the partitioned table - """ - table = _table_arg_to_table_ref(table, default_project=self.project) - - with TimeoutGuard( - timeout, timeout_error_type=concurrent.futures.TimeoutError - ) as guard: - meta_table = self.get_table( - TableReference( - DatasetReference(table.project, table.dataset_id), - "%s$__PARTITIONS_SUMMARY__" % table.table_id, - ), - retry=retry, - timeout=timeout, - ) - timeout = guard.remaining_timeout - - subset = [col for col in meta_table.schema if col.name == "partition_id"] - return [ - row[0] - for row in self.list_rows( - meta_table, selected_fields=subset, retry=retry, timeout=timeout - ) - ] - - def list_rows( - self, - table, - selected_fields=None, - max_results=None, - page_token=None, - start_index=None, - page_size=None, - retry=DEFAULT_RETRY, - timeout=None, - ): - """List the rows of the table. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tabledata/list - - .. note:: - - This method assumes that the provided schema is up-to-date with the - schema as defined on the back-end: if the two schemas are not - identical, the values returned may be incomplete. To ensure that the - local copy of the schema is up-to-date, call ``client.get_table``. - - Args: - table (Union[ \ - google.cloud.bigquery.table.Table, \ - google.cloud.bigquery.table.TableListItem, \ - google.cloud.bigquery.table.TableReference, \ - str, \ - ]): - The table to list, or a reference to it. When the table - object does not contain a schema and ``selected_fields`` is - not supplied, this method calls ``get_table`` to fetch the - table schema. - selected_fields (Sequence[google.cloud.bigquery.schema.SchemaField]): - The fields to return. If not supplied, data for all columns - are downloaded. - max_results (int): - (Optional) maximum number of rows to return. - page_token (str): - (Optional) Token representing a cursor into the table's rows. - If not passed, the API will return the first page of the - rows. The token marks the beginning of the iterator to be - returned and the value of the ``page_token`` can be accessed - at ``next_page_token`` of the - :class:`~google.cloud.bigquery.table.RowIterator`. - start_index (int): - (Optional) The zero-based index of the starting row to read. - page_size (int): - Optional. The maximum number of rows in each page of results - from this request. Non-positive values are ignored. Defaults - to a sensible value set by the API. - retry (google.api_core.retry.Retry): - (Optional) How to retry the RPC. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - If multiple requests are made under the hood, ``timeout`` is - interpreted as the approximate total time of **all** requests. - - Returns: - google.cloud.bigquery.table.RowIterator: - Iterator of row data - :class:`~google.cloud.bigquery.table.Row`-s. During each - page, the iterator will have the ``total_rows`` attribute - set, which counts the total number of rows **in the table** - (this is distinct from the total number of rows in the - current page: ``iterator.page.num_items``). - """ - table = _table_arg_to_table(table, default_project=self.project) - - if not isinstance(table, Table): - raise TypeError(_NEED_TABLE_ARGUMENT) - - schema = table.schema - - # selected_fields can override the table schema. - if selected_fields is not None: - schema = selected_fields - - # No schema, but no selected_fields. Assume the developer wants all - # columns, so get the table resource for them rather than failing. - elif len(schema) == 0: - with TimeoutGuard( - timeout, timeout_error_type=concurrent.futures.TimeoutError - ) as guard: - table = self.get_table(table.reference, retry=retry, timeout=timeout) - timeout = guard.remaining_timeout - schema = table.schema - - params = {} - if selected_fields is not None: - params["selectedFields"] = ",".join(field.name for field in selected_fields) - if start_index is not None: - params["startIndex"] = start_index - - row_iterator = RowIterator( - client=self, - api_request=functools.partial(self._call_api, retry, timeout=timeout), - path="%s/data" % (table.path,), - schema=schema, - page_token=page_token, - max_results=max_results, - page_size=page_size, - extra_params=params, - table=table, - # Pass in selected_fields separately from schema so that full - # tables can be fetched without a column filter. - selected_fields=selected_fields, - ) - return row_iterator - - def _schema_from_json_file_object(self, file_obj): - """Helper function for schema_from_json that takes a - file object that describes a table schema. - - Returns: - List of schema field objects. - """ - json_data = json.load(file_obj) - return [SchemaField.from_api_repr(field) for field in json_data] - - def _schema_to_json_file_object(self, schema_list, file_obj): - """Helper function for schema_to_json that takes a schema list and file - object and writes the schema list to the file object with json.dump - """ - json.dump(schema_list, file_obj, indent=2, sort_keys=True) - - def schema_from_json(self, file_or_path): - """Takes a file object or file path that contains json that describes - a table schema. - - Returns: - List of schema field objects. - """ - if isinstance(file_or_path, io.IOBase): - return self._schema_from_json_file_object(file_or_path) - - with open(file_or_path) as file_obj: - return self._schema_from_json_file_object(file_obj) - - def schema_to_json(self, schema_list, destination): - """Takes a list of schema field objects. - - Serializes the list of schema field objects as json to a file. - - Destination is a file path or a file object. - """ - json_schema_list = [f.to_api_repr() for f in schema_list] - - if isinstance(destination, io.IOBase): - return self._schema_to_json_file_object(json_schema_list, destination) - - with open(destination, mode="w") as file_obj: - return self._schema_to_json_file_object(json_schema_list, file_obj) - - -# pylint: disable=unused-argument -def _item_to_project(iterator, resource): - """Convert a JSON project to the native object. - - Args: - iterator (google.api_core.page_iterator.Iterator): The iterator that is currently in use. - - resource (Dict): An item to be converted to a project. - - Returns: - google.cloud.bigquery.client.Project: The next project in the page. - """ - return Project.from_api_repr(resource) - - -# pylint: enable=unused-argument - - -def _item_to_dataset(iterator, resource): - """Convert a JSON dataset to the native object. - - Args: - iterator (google.api_core.page_iterator.Iterator): The iterator that is currently in use. - - resource (Dict): An item to be converted to a dataset. - - Returns: - google.cloud.bigquery.dataset.DatasetListItem: The next dataset in the page. - """ - return DatasetListItem(resource) - - -def _item_to_job(iterator, resource): - """Convert a JSON job to the native object. - - Args: - iterator (google.api_core.page_iterator.Iterator): The iterator that is currently in use. - - resource (Dict): An item to be converted to a job. - - Returns: - job instance: The next job in the page. - """ - return iterator.client.job_from_resource(resource) - - -def _item_to_model(iterator, resource): - """Convert a JSON model to the native object. - - Args: - iterator (google.api_core.page_iterator.Iterator): - The iterator that is currently in use. - resource (Dict): An item to be converted to a model. - - Returns: - google.cloud.bigquery.model.Model: The next model in the page. - """ - return Model.from_api_repr(resource) - - -def _item_to_routine(iterator, resource): - """Convert a JSON model to the native object. - - Args: - iterator (google.api_core.page_iterator.Iterator): - The iterator that is currently in use. - resource (Dict): An item to be converted to a routine. - - Returns: - google.cloud.bigquery.routine.Routine: The next routine in the page. - """ - return Routine.from_api_repr(resource) - - -def _item_to_table(iterator, resource): - """Convert a JSON table to the native object. - - Args: - iterator (google.api_core.page_iterator.Iterator): The iterator that is currently in use. - - resource (Dict): An item to be converted to a table. - - Returns: - google.cloud.bigquery.table.Table: The next table in the page. - """ - return TableListItem(resource) - - -def _make_job_id(job_id, prefix=None): - """Construct an ID for a new job. - - Args: - job_id (Optional[str]): the user-provided job ID. - - prefix (Optional[str]): the user-provided prefix for a job ID. - - Returns: - str: A job ID - """ - if job_id is not None: - return job_id - elif prefix is not None: - return str(prefix) + str(uuid.uuid4()) - else: - return str(uuid.uuid4()) - - -def _check_mode(stream): - """Check that a stream was opened in read-binary mode. - - Args: - stream (IO[bytes]): A bytes IO object open for reading. - - Raises: - ValueError: - if the ``stream.mode`` is a valid attribute - and is not among ``rb``, ``r+b`` or ``rb+``. - """ - mode = getattr(stream, "mode", None) - - if isinstance(stream, gzip.GzipFile): - if mode != gzip.READ: - raise ValueError( - "Cannot upload gzip files opened in write mode: use " - "gzip.GzipFile(filename, mode='rb')" - ) - else: - if mode is not None and mode not in ("rb", "r+b", "rb+"): - raise ValueError( - "Cannot upload files opened in text mode: use " - "open(filename, mode='rb') or open(filename, mode='r+b')" - ) - - -def _get_upload_headers(user_agent): - """Get the headers for an upload request. - - Args: - user_agent (str): The user-agent for requests. - - Returns: - Dict: The headers to be used for the request. - """ - return { - "Accept": "application/json", - "Accept-Encoding": "gzip, deflate", - "User-Agent": user_agent, - "content-type": "application/json", - } diff --git a/bigquery/google/cloud/bigquery/dataset.py b/bigquery/google/cloud/bigquery/dataset.py deleted file mode 100644 index 99c47026fe3a..000000000000 --- a/bigquery/google/cloud/bigquery/dataset.py +++ /dev/null @@ -1,752 +0,0 @@ -# Copyright 2015 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Define API Datasets.""" - -from __future__ import absolute_import - -import six -import copy - -import google.cloud._helpers -from google.cloud.bigquery import _helpers -from google.cloud.bigquery.model import ModelReference -from google.cloud.bigquery.routine import RoutineReference -from google.cloud.bigquery.table import TableReference -from google.cloud.bigquery.encryption_configuration import EncryptionConfiguration - - -def _get_table_reference(self, table_id): - """Constructs a TableReference. - - Args: - table_id (str): The ID of the table. - - Returns: - google.cloud.bigquery.table.TableReference: - A table reference for a table in this dataset. - """ - return TableReference(self, table_id) - - -def _get_model_reference(self, model_id): - """Constructs a ModelReference. - - Args: - model_id (str): the ID of the model. - - Returns: - google.cloud.bigquery.model.ModelReference: - A ModelReference for a model in this dataset. - """ - return ModelReference.from_api_repr( - {"projectId": self.project, "datasetId": self.dataset_id, "modelId": model_id} - ) - - -def _get_routine_reference(self, routine_id): - """Constructs a RoutineReference. - - Args: - routine_id (str): the ID of the routine. - - Returns: - google.cloud.bigquery.routine.RoutineReference: - A RoutineReference for a routine in this dataset. - """ - return RoutineReference.from_api_repr( - { - "projectId": self.project, - "datasetId": self.dataset_id, - "routineId": routine_id, - } - ) - - -class AccessEntry(object): - """Represents grant of an access role to an entity. - - An entry must have exactly one of the allowed :attr:`ENTITY_TYPES`. If - anything but ``view`` is set, a ``role`` is also required. ``role`` is - omitted for a ``view``, because ``view`` s are always read-only. - - See https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets. - - Attributes: - role (str): - Role granted to the entity. The following string values are - supported: `'READER'`, `'WRITER'`, `'OWNER'`. It may also be - :data:`None` if the ``entity_type`` is ``view``. - - entity_type (str): - Type of entity being granted the role. One of :attr:`ENTITY_TYPES`. - - entity_id (Union[str, Dict[str, str]]): - If the ``entity_type`` is not 'view', the ``entity_id`` is the - ``str`` ID of the entity being granted the role. If the - ``entity_type`` is 'view', the ``entity_id`` is a ``dict`` - representing the view from a different dataset to grant access to - in the following format:: - - { - 'projectId': string, - 'datasetId': string, - 'tableId': string - } - - Raises: - ValueError: - If the ``entity_type`` is not among :attr:`ENTITY_TYPES`, or if a - ``view`` has ``role`` set, or a non ``view`` **does not** have a - ``role`` set. - - Examples: - >>> entry = AccessEntry('OWNER', 'userByEmail', 'user@example.com') - - >>> view = { - ... 'projectId': 'my-project', - ... 'datasetId': 'my_dataset', - ... 'tableId': 'my_table' - ... } - >>> entry = AccessEntry(None, 'view', view) - """ - - ENTITY_TYPES = frozenset( - ["userByEmail", "groupByEmail", "domain", "specialGroup", "view", "iamMember"] - ) - """Allowed entity types.""" - - def __init__(self, role, entity_type, entity_id): - if entity_type not in self.ENTITY_TYPES: - message = "Entity type %r not among: %s" % ( - entity_type, - ", ".join(self.ENTITY_TYPES), - ) - raise ValueError(message) - if entity_type == "view": - if role is not None: - raise ValueError( - "Role must be None for a view. Received " "role: %r" % (role,) - ) - else: - if role is None: - raise ValueError( - "Role must be set for entity " "type %r" % (entity_type,) - ) - - self.role = role - self.entity_type = entity_type - self.entity_id = entity_id - - def __eq__(self, other): - if not isinstance(other, AccessEntry): - return NotImplemented - return ( - self.role == other.role - and self.entity_type == other.entity_type - and self.entity_id == other.entity_id - ) - - def __ne__(self, other): - return not self == other - - def __repr__(self): - return "" % ( - self.role, - self.entity_type, - self.entity_id, - ) - - def to_api_repr(self): - """Construct the API resource representation of this access entry - - Returns: - Dict[str, object]: Access entry represented as an API resource - """ - resource = {self.entity_type: self.entity_id} - if self.role is not None: - resource["role"] = self.role - return resource - - @classmethod - def from_api_repr(cls, resource): - """Factory: construct an access entry given its API representation - - Args: - resource (Dict[str, object]): - Access entry resource representation returned from the API - - Returns: - google.cloud.bigquery.dataset.AccessEntry: - Access entry parsed from ``resource``. - - Raises: - ValueError: - If the resource has more keys than ``role`` and one additional - key. - """ - entry = resource.copy() - role = entry.pop("role", None) - entity_type, entity_id = entry.popitem() - if len(entry) != 0: - raise ValueError("Entry has unexpected keys remaining.", entry) - return cls(role, entity_type, entity_id) - - -class DatasetReference(object): - """DatasetReferences are pointers to datasets. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets#datasetreference - - Args: - project (str): The ID of the project - dataset_id (str): The ID of the dataset - - Raises: - ValueError: If either argument is not of type ``str``. - """ - - def __init__(self, project, dataset_id): - if not isinstance(project, six.string_types): - raise ValueError("Pass a string for project") - if not isinstance(dataset_id, six.string_types): - raise ValueError("Pass a string for dataset_id") - self._project = project - self._dataset_id = dataset_id - - @property - def project(self): - """str: Project ID of the dataset.""" - return self._project - - @property - def dataset_id(self): - """str: Dataset ID.""" - return self._dataset_id - - @property - def path(self): - """str: URL path for the dataset based on project and dataset ID.""" - return "/projects/%s/datasets/%s" % (self.project, self.dataset_id) - - table = _get_table_reference - - model = _get_model_reference - - routine = _get_routine_reference - - @classmethod - def from_api_repr(cls, resource): - """Factory: construct a dataset reference given its API representation - - Args: - resource (Dict[str, str]): - Dataset reference resource representation returned from the API - - Returns: - google.cloud.bigquery.dataset.DatasetReference: - Dataset reference parsed from ``resource``. - """ - project = resource["projectId"] - dataset_id = resource["datasetId"] - return cls(project, dataset_id) - - @classmethod - def from_string(cls, dataset_id, default_project=None): - """Construct a dataset reference from dataset ID string. - - Args: - dataset_id (str): - A dataset ID in standard SQL format. If ``default_project`` - is not specified, this must include both the project ID and - the dataset ID, separated by ``.``. - default_project (str): - Optional. The project ID to use when ``dataset_id`` does not - include a project ID. - - Returns: - DatasetReference: - Dataset reference parsed from ``dataset_id``. - - Examples: - >>> DatasetReference.from_string('my-project-id.some_dataset') - DatasetReference('my-project-id', 'some_dataset') - - Raises: - ValueError: - If ``dataset_id`` is not a fully-qualified dataset ID in - standard SQL format. - """ - output_dataset_id = dataset_id - output_project_id = default_project - parts = _helpers._split_id(dataset_id) - - if len(parts) == 1 and not default_project: - raise ValueError( - "When default_project is not set, dataset_id must be a " - "fully-qualified dataset ID in standard SQL format, " - 'e.g., "project.dataset_id" got {}'.format(dataset_id) - ) - elif len(parts) == 2: - output_project_id, output_dataset_id = parts - elif len(parts) > 2: - raise ValueError( - "Too many parts in dataset_id. Expected a fully-qualified " - "dataset ID in standard SQL format. e.g. " - '"project.dataset_id", got {}'.format(dataset_id) - ) - - return cls(output_project_id, output_dataset_id) - - def to_api_repr(self): - """Construct the API resource representation of this dataset reference - - Returns: - Dict[str, str]: dataset reference represented as an API resource - """ - return {"projectId": self._project, "datasetId": self._dataset_id} - - def _key(self): - """A tuple key that uniquely describes this field. - - Used to compute this instance's hashcode and evaluate equality. - - Returns: - Tuple[str]: The contents of this :class:`.DatasetReference`. - """ - return (self._project, self._dataset_id) - - def __eq__(self, other): - if not isinstance(other, DatasetReference): - return NotImplemented - return self._key() == other._key() - - def __ne__(self, other): - return not self == other - - def __hash__(self): - return hash(self._key()) - - def __repr__(self): - return "DatasetReference{}".format(self._key()) - - -class Dataset(object): - """Datasets are containers for tables. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets#resource-dataset - - Args: - dataset_ref (Union[google.cloud.bigquery.dataset.DatasetReference, str]): - A pointer to a dataset. If ``dataset_ref`` is a string, it must - include both the project ID and the dataset ID, separated by - ``.``. - """ - - _PROPERTY_TO_API_FIELD = { - "access_entries": "access", - "created": "creationTime", - "default_partition_expiration_ms": "defaultPartitionExpirationMs", - "default_table_expiration_ms": "defaultTableExpirationMs", - "friendly_name": "friendlyName", - "default_encryption_configuration": "defaultEncryptionConfiguration", - } - - def __init__(self, dataset_ref): - if isinstance(dataset_ref, six.string_types): - dataset_ref = DatasetReference.from_string(dataset_ref) - self._properties = {"datasetReference": dataset_ref.to_api_repr(), "labels": {}} - - @property - def project(self): - """str: Project ID of the project bound to the dataset.""" - return self._properties["datasetReference"]["projectId"] - - @property - def path(self): - """str: URL path for the dataset based on project and dataset ID.""" - return "/projects/%s/datasets/%s" % (self.project, self.dataset_id) - - @property - def access_entries(self): - """List[google.cloud.bigquery.dataset.AccessEntry]: Dataset's access - entries. - - ``role`` augments the entity type and must be present **unless** the - entity type is ``view``. - - Raises: - TypeError: If 'value' is not a sequence - ValueError: - If any item in the sequence is not an - :class:`~google.cloud.bigquery.dataset.AccessEntry`. - """ - entries = self._properties.get("access", []) - return [AccessEntry.from_api_repr(entry) for entry in entries] - - @access_entries.setter - def access_entries(self, value): - if not all(isinstance(field, AccessEntry) for field in value): - raise ValueError("Values must be AccessEntry instances") - entries = [entry.to_api_repr() for entry in value] - self._properties["access"] = entries - - @property - def created(self): - """Union[datetime.datetime, None]: Datetime at which the dataset was - created (:data:`None` until set from the server). - """ - creation_time = self._properties.get("creationTime") - if creation_time is not None: - # creation_time will be in milliseconds. - return google.cloud._helpers._datetime_from_microseconds( - 1000.0 * float(creation_time) - ) - - @property - def dataset_id(self): - """str: Dataset ID.""" - return self._properties["datasetReference"]["datasetId"] - - @property - def full_dataset_id(self): - """Union[str, None]: ID for the dataset resource (:data:`None` until - set from the server) - - In the format ``project_id:dataset_id``. - """ - return self._properties.get("id") - - @property - def reference(self): - """google.cloud.bigquery.dataset.DatasetReference: A reference to this - dataset. - """ - return DatasetReference(self.project, self.dataset_id) - - @property - def etag(self): - """Union[str, None]: ETag for the dataset resource (:data:`None` until - set from the server). - """ - return self._properties.get("etag") - - @property - def modified(self): - """Union[datetime.datetime, None]: Datetime at which the dataset was - last modified (:data:`None` until set from the server). - """ - modified_time = self._properties.get("lastModifiedTime") - if modified_time is not None: - # modified_time will be in milliseconds. - return google.cloud._helpers._datetime_from_microseconds( - 1000.0 * float(modified_time) - ) - - @property - def self_link(self): - """Union[str, None]: URL for the dataset resource (:data:`None` until - set from the server). - """ - return self._properties.get("selfLink") - - @property - def default_partition_expiration_ms(self): - """Optional[int]: The default partition expiration for all - partitioned tables in the dataset, in milliseconds. - - Once this property is set, all newly-created partitioned tables in - the dataset will have an ``time_paritioning.expiration_ms`` property - set to this value, and changing the value will only affect new - tables, not existing ones. The storage in a partition will have an - expiration time of its partition time plus this value. - - Setting this property overrides the use of - ``default_table_expiration_ms`` for partitioned tables: only one of - ``default_table_expiration_ms`` and - ``default_partition_expiration_ms`` will be used for any new - partitioned table. If you provide an explicit - ``time_partitioning.expiration_ms`` when creating or updating a - partitioned table, that value takes precedence over the default - partition expiration time indicated by this property. - """ - return _helpers._int_or_none( - self._properties.get("defaultPartitionExpirationMs") - ) - - @default_partition_expiration_ms.setter - def default_partition_expiration_ms(self, value): - self._properties["defaultPartitionExpirationMs"] = _helpers._str_or_none(value) - - @property - def default_table_expiration_ms(self): - """Union[int, None]: Default expiration time for tables in the dataset - (defaults to :data:`None`). - - Raises: - ValueError: For invalid value types. - """ - return _helpers._int_or_none(self._properties.get("defaultTableExpirationMs")) - - @default_table_expiration_ms.setter - def default_table_expiration_ms(self, value): - if not isinstance(value, six.integer_types) and value is not None: - raise ValueError("Pass an integer, or None") - self._properties["defaultTableExpirationMs"] = _helpers._str_or_none(value) - - @property - def description(self): - """Optional[str]: Description of the dataset as set by the user - (defaults to :data:`None`). - - Raises: - ValueError: for invalid value types. - """ - return self._properties.get("description") - - @description.setter - def description(self, value): - if not isinstance(value, six.string_types) and value is not None: - raise ValueError("Pass a string, or None") - self._properties["description"] = value - - @property - def friendly_name(self): - """Union[str, None]: Title of the dataset as set by the user - (defaults to :data:`None`). - - Raises: - ValueError: for invalid value types. - """ - return self._properties.get("friendlyName") - - @friendly_name.setter - def friendly_name(self, value): - if not isinstance(value, six.string_types) and value is not None: - raise ValueError("Pass a string, or None") - self._properties["friendlyName"] = value - - @property - def location(self): - """Union[str, None]: Location in which the dataset is hosted as set by - the user (defaults to :data:`None`). - - Raises: - ValueError: for invalid value types. - """ - return self._properties.get("location") - - @location.setter - def location(self, value): - if not isinstance(value, six.string_types) and value is not None: - raise ValueError("Pass a string, or None") - self._properties["location"] = value - - @property - def labels(self): - """Dict[str, str]: Labels for the dataset. - - This method always returns a dict. To change a dataset's labels, - modify the dict, then call - :meth:`google.cloud.bigquery.client.Client.update_dataset`. To delete - a label, set its value to :data:`None` before updating. - - Raises: - ValueError: for invalid value types. - """ - return self._properties.setdefault("labels", {}) - - @labels.setter - def labels(self, value): - if not isinstance(value, dict): - raise ValueError("Pass a dict") - self._properties["labels"] = value - - @property - def default_encryption_configuration(self): - """google.cloud.bigquery.encryption_configuration.EncryptionConfiguration: Custom - encryption configuration for all tables in the dataset. - - Custom encryption configuration (e.g., Cloud KMS keys) or :data:`None` - if using default encryption. - - See `protecting data with Cloud KMS keys - `_ - in the BigQuery documentation. - """ - prop = self._properties.get("defaultEncryptionConfiguration") - if prop: - prop = EncryptionConfiguration.from_api_repr(prop) - return prop - - @default_encryption_configuration.setter - def default_encryption_configuration(self, value): - api_repr = value - if value: - api_repr = value.to_api_repr() - self._properties["defaultEncryptionConfiguration"] = api_repr - - @classmethod - def from_string(cls, full_dataset_id): - """Construct a dataset from fully-qualified dataset ID. - - Args: - full_dataset_id (str): - A fully-qualified dataset ID in standard SQL format. Must - include both the project ID and the dataset ID, separated by - ``.``. - - Returns: - Dataset: Dataset parsed from ``full_dataset_id``. - - Examples: - >>> Dataset.from_string('my-project-id.some_dataset') - Dataset(DatasetReference('my-project-id', 'some_dataset')) - - Raises: - ValueError: - If ``full_dataset_id`` is not a fully-qualified dataset ID in - standard SQL format. - """ - return cls(DatasetReference.from_string(full_dataset_id)) - - @classmethod - def from_api_repr(cls, resource): - """Factory: construct a dataset given its API representation - - Args: - resource (Dict[str: object]): - Dataset resource representation returned from the API - - Returns: - google.cloud.bigquery.dataset.Dataset: - Dataset parsed from ``resource``. - """ - if ( - "datasetReference" not in resource - or "datasetId" not in resource["datasetReference"] - ): - raise KeyError( - "Resource lacks required identity information:" - '["datasetReference"]["datasetId"]' - ) - project_id = resource["datasetReference"]["projectId"] - dataset_id = resource["datasetReference"]["datasetId"] - dataset = cls(DatasetReference(project_id, dataset_id)) - dataset._properties = copy.deepcopy(resource) - return dataset - - def to_api_repr(self): - """Construct the API resource representation of this dataset - - Returns: - Dict[str, object]: The dataset represented as an API resource - """ - return copy.deepcopy(self._properties) - - def _build_resource(self, filter_fields): - """Generate a resource for ``update``.""" - return _helpers._build_resource_from_properties(self, filter_fields) - - table = _get_table_reference - - model = _get_model_reference - - routine = _get_routine_reference - - def __repr__(self): - return "Dataset({})".format(repr(self.reference)) - - -class DatasetListItem(object): - """A read-only dataset resource from a list operation. - - For performance reasons, the BigQuery API only includes some of the - dataset properties when listing datasets. Notably, - :attr:`~google.cloud.bigquery.dataset.Dataset.access_entries` is missing. - - For a full list of the properties that the BigQuery API returns, see the - `REST documentation for datasets.list - `_. - - - Args: - resource (Dict[str, str]): - A dataset-like resource object from a dataset list response. A - ``datasetReference`` property is required. - - Raises: - ValueError: - If ``datasetReference`` or one of its required members is missing - from ``resource``. - """ - - def __init__(self, resource): - if "datasetReference" not in resource: - raise ValueError("resource must contain a datasetReference value") - if "projectId" not in resource["datasetReference"]: - raise ValueError( - "resource['datasetReference'] must contain a projectId value" - ) - if "datasetId" not in resource["datasetReference"]: - raise ValueError( - "resource['datasetReference'] must contain a datasetId value" - ) - self._properties = resource - - @property - def project(self): - """str: Project bound to the dataset.""" - return self._properties["datasetReference"]["projectId"] - - @property - def dataset_id(self): - """str: Dataset ID.""" - return self._properties["datasetReference"]["datasetId"] - - @property - def full_dataset_id(self): - """Union[str, None]: ID for the dataset resource (:data:`None` until - set from the server) - - In the format ``project_id:dataset_id``. - """ - return self._properties.get("id") - - @property - def friendly_name(self): - """Union[str, None]: Title of the dataset as set by the user - (defaults to :data:`None`). - """ - return self._properties.get("friendlyName") - - @property - def labels(self): - """Dict[str, str]: Labels for the dataset.""" - return self._properties.setdefault("labels", {}) - - @property - def reference(self): - """google.cloud.bigquery.dataset.DatasetReference: A reference to this - dataset. - """ - return DatasetReference(self.project, self.dataset_id) - - table = _get_table_reference - - model = _get_model_reference - - routine = _get_routine_reference diff --git a/bigquery/google/cloud/bigquery/dbapi/__init__.py b/bigquery/google/cloud/bigquery/dbapi/__init__.py deleted file mode 100644 index d1a723949b10..000000000000 --- a/bigquery/google/cloud/bigquery/dbapi/__init__.py +++ /dev/null @@ -1,87 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Google BigQuery implementation of the Database API Specification v2.0. - -This module implements the `Python Database API Specification v2.0 (DB-API)`_ -for Google BigQuery. - -.. _Python Database API Specification v2.0 (DB-API): - https://www.python.org/dev/peps/pep-0249/ -""" - -from google.cloud.bigquery.dbapi.connection import connect -from google.cloud.bigquery.dbapi.connection import Connection -from google.cloud.bigquery.dbapi.cursor import Cursor -from google.cloud.bigquery.dbapi.exceptions import Warning -from google.cloud.bigquery.dbapi.exceptions import Error -from google.cloud.bigquery.dbapi.exceptions import InterfaceError -from google.cloud.bigquery.dbapi.exceptions import DatabaseError -from google.cloud.bigquery.dbapi.exceptions import DataError -from google.cloud.bigquery.dbapi.exceptions import OperationalError -from google.cloud.bigquery.dbapi.exceptions import IntegrityError -from google.cloud.bigquery.dbapi.exceptions import InternalError -from google.cloud.bigquery.dbapi.exceptions import ProgrammingError -from google.cloud.bigquery.dbapi.exceptions import NotSupportedError -from google.cloud.bigquery.dbapi.types import Binary -from google.cloud.bigquery.dbapi.types import Date -from google.cloud.bigquery.dbapi.types import DateFromTicks -from google.cloud.bigquery.dbapi.types import Time -from google.cloud.bigquery.dbapi.types import TimeFromTicks -from google.cloud.bigquery.dbapi.types import Timestamp -from google.cloud.bigquery.dbapi.types import TimestampFromTicks -from google.cloud.bigquery.dbapi.types import BINARY -from google.cloud.bigquery.dbapi.types import DATETIME -from google.cloud.bigquery.dbapi.types import NUMBER -from google.cloud.bigquery.dbapi.types import ROWID -from google.cloud.bigquery.dbapi.types import STRING - - -apilevel = "2.0" - -# Threads may share the module and connections, but not cursors. -threadsafety = 2 - -paramstyle = "pyformat" - -__all__ = [ - "apilevel", - "threadsafety", - "paramstyle", - "connect", - "Connection", - "Cursor", - "Warning", - "Error", - "InterfaceError", - "DatabaseError", - "DataError", - "OperationalError", - "IntegrityError", - "InternalError", - "ProgrammingError", - "NotSupportedError", - "Binary", - "Date", - "DateFromTicks", - "Time", - "TimeFromTicks", - "Timestamp", - "TimestampFromTicks", - "BINARY", - "DATETIME", - "NUMBER", - "ROWID", - "STRING", -] diff --git a/bigquery/google/cloud/bigquery/dbapi/_helpers.py b/bigquery/google/cloud/bigquery/dbapi/_helpers.py deleted file mode 100644 index 651880feac90..000000000000 --- a/bigquery/google/cloud/bigquery/dbapi/_helpers.py +++ /dev/null @@ -1,220 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -try: - from collections import abc as collections_abc -except ImportError: # Python 2.7 - import collections as collections_abc - -import datetime -import decimal -import numbers - -import six - -from google.cloud import bigquery -from google.cloud.bigquery.dbapi import exceptions - - -def scalar_to_query_parameter(value, name=None): - """Convert a scalar value into a query parameter. - - Args: - value (Any): - A scalar value to convert into a query parameter. - - name (str): - (Optional) Name of the query parameter. - - Returns: - google.cloud.bigquery.ScalarQueryParameter: - A query parameter corresponding with the type and value of the plain - Python object. - - Raises: - google.cloud.bigquery.dbapi.exceptions.ProgrammingError: - if the type cannot be determined. - """ - parameter_type = bigquery_scalar_type(value) - - if parameter_type is None: - raise exceptions.ProgrammingError( - "encountered parameter {} with value {} of unexpected type".format( - name, value - ) - ) - return bigquery.ScalarQueryParameter(name, parameter_type, value) - - -def array_to_query_parameter(value, name=None): - """Convert an array-like value into a query parameter. - - Args: - value (Sequence[Any]): The elements of the array (should not be a - string-like Sequence). - name (Optional[str]): Name of the query parameter. - - Returns: - A query parameter corresponding with the type and value of the plain - Python object. - - Raises: - google.cloud.bigquery.dbapi.exceptions.ProgrammingError: - if the type of array elements cannot be determined. - """ - if not array_like(value): - raise exceptions.ProgrammingError( - "The value of parameter {} must be a sequence that is " - "not string-like.".format(name) - ) - - if not value: - raise exceptions.ProgrammingError( - "Encountered an empty array-like value of parameter {}, cannot " - "determine array elements type.".format(name) - ) - - # Assume that all elements are of the same type, and let the backend handle - # any type incompatibilities among the array elements - array_type = bigquery_scalar_type(value[0]) - if array_type is None: - raise exceptions.ProgrammingError( - "Encountered unexpected first array element of parameter {}, " - "cannot determine array elements type.".format(name) - ) - - return bigquery.ArrayQueryParameter(name, array_type, value) - - -def to_query_parameters_list(parameters): - """Converts a sequence of parameter values into query parameters. - - Args: - parameters (Sequence[Any]): Sequence of query parameter values. - - Returns: - List[google.cloud.bigquery.query._AbstractQueryParameter]: - A list of query parameters. - """ - result = [] - - for value in parameters: - if isinstance(value, collections_abc.Mapping): - raise NotImplementedError("STRUCT-like parameter values are not supported.") - elif array_like(value): - param = array_to_query_parameter(value) - else: - param = scalar_to_query_parameter(value) - result.append(param) - - return result - - -def to_query_parameters_dict(parameters): - """Converts a dictionary of parameter values into query parameters. - - Args: - parameters (Mapping[str, Any]): Dictionary of query parameter values. - - Returns: - List[google.cloud.bigquery.query._AbstractQueryParameter]: - A list of named query parameters. - """ - result = [] - - for name, value in six.iteritems(parameters): - if isinstance(value, collections_abc.Mapping): - raise NotImplementedError( - "STRUCT-like parameter values are not supported " - "(parameter {}).".format(name) - ) - elif array_like(value): - param = array_to_query_parameter(value, name=name) - else: - param = scalar_to_query_parameter(value, name=name) - result.append(param) - - return result - - -def to_query_parameters(parameters): - """Converts DB-API parameter values into query parameters. - - Args: - parameters (Union[Mapping[str, Any], Sequence[Any]]): - A dictionary or sequence of query parameter values. - - Returns: - List[google.cloud.bigquery.query._AbstractQueryParameter]: - A list of query parameters. - """ - if parameters is None: - return [] - - if isinstance(parameters, collections_abc.Mapping): - return to_query_parameters_dict(parameters) - - return to_query_parameters_list(parameters) - - -def bigquery_scalar_type(value): - """Return a BigQuery name of the scalar type that matches the given value. - - If the scalar type name could not be determined (e.g. for non-scalar - values), ``None`` is returned. - - Args: - value (Any) - - Returns: - Optional[str]: The BigQuery scalar type name. - """ - if isinstance(value, bool): - return "BOOL" - elif isinstance(value, numbers.Integral): - return "INT64" - elif isinstance(value, numbers.Real): - return "FLOAT64" - elif isinstance(value, decimal.Decimal): - return "NUMERIC" - elif isinstance(value, six.text_type): - return "STRING" - elif isinstance(value, six.binary_type): - return "BYTES" - elif isinstance(value, datetime.datetime): - return "DATETIME" if value.tzinfo is None else "TIMESTAMP" - elif isinstance(value, datetime.date): - return "DATE" - elif isinstance(value, datetime.time): - return "TIME" - - return None - - -def array_like(value): - """Determine if the given value is array-like. - - Examples of array-like values (as interpreted by this function) are - sequences such as ``list`` and ``tuple``, but not strings and other - iterables such as sets. - - Args: - value (Any) - - Returns: - bool: ``True`` if the value is considered array-like, ``False`` otherwise. - """ - return isinstance(value, collections_abc.Sequence) and not isinstance( - value, (six.text_type, six.binary_type, bytearray) - ) diff --git a/bigquery/google/cloud/bigquery/dbapi/connection.py b/bigquery/google/cloud/bigquery/dbapi/connection.py deleted file mode 100644 index ee7d0dc3cc59..000000000000 --- a/bigquery/google/cloud/bigquery/dbapi/connection.py +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Connection for the Google BigQuery DB-API.""" - -from google.cloud import bigquery -from google.cloud.bigquery.dbapi import cursor - - -class Connection(object): - """DB-API Connection to Google BigQuery. - - Args: - client (google.cloud.bigquery.Client): A client used to connect to BigQuery. - """ - - def __init__(self, client): - self._client = client - - def close(self): - """No-op.""" - - def commit(self): - """No-op.""" - - def cursor(self): - """Return a new cursor object. - - Returns: - google.cloud.bigquery.dbapi.Cursor: A DB-API cursor that uses this connection. - """ - return cursor.Cursor(self) - - -def connect(client=None): - """Construct a DB-API connection to Google BigQuery. - - Args: - client (google.cloud.bigquery.Client): - (Optional) A client used to connect to BigQuery. If not passed, a - client is created using default options inferred from the environment. - - Returns: - google.cloud.bigquery.dbapi.Connection: A new DB-API connection to BigQuery. - """ - if client is None: - client = bigquery.Client() - return Connection(client) diff --git a/bigquery/google/cloud/bigquery/dbapi/cursor.py b/bigquery/google/cloud/bigquery/dbapi/cursor.py deleted file mode 100644 index a3e6ea5be87e..000000000000 --- a/bigquery/google/cloud/bigquery/dbapi/cursor.py +++ /dev/null @@ -1,374 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Cursor for the Google BigQuery DB-API.""" - -import collections - -try: - from collections import abc as collections_abc -except ImportError: # Python 2.7 - import collections as collections_abc - -import six - -from google.cloud.bigquery import job -from google.cloud.bigquery.dbapi import _helpers -from google.cloud.bigquery.dbapi import exceptions -import google.cloud.exceptions - -# Per PEP 249: A 7-item sequence containing information describing one result -# column. The first two items (name and type_code) are mandatory, the other -# five are optional and are set to None if no meaningful values can be -# provided. -Column = collections.namedtuple( - "Column", - [ - "name", - "type_code", - "display_size", - "internal_size", - "precision", - "scale", - "null_ok", - ], -) - - -class Cursor(object): - """DB-API Cursor to Google BigQuery. - - Args: - connection (google.cloud.bigquery.dbapi.Connection): - A DB-API connection to Google BigQuery. - """ - - def __init__(self, connection): - self.connection = connection - self.description = None - # Per PEP 249: The attribute is -1 in case no .execute*() has been - # performed on the cursor or the rowcount of the last operation - # cannot be determined by the interface. - self.rowcount = -1 - # Per PEP 249: The arraysize attribute defaults to 1, meaning to fetch - # a single row at a time. However, we deviate from that, and set the - # default to None, allowing the backend to automatically determine the - # most appropriate size. - self.arraysize = None - self._query_data = None - self._query_job = None - - def close(self): - """No-op.""" - - def _set_description(self, schema): - """Set description from schema. - - Args: - schema (Sequence[google.cloud.bigquery.schema.SchemaField]): - A description of fields in the schema. - """ - if schema is None: - self.description = None - return - - self.description = tuple( - [ - Column( - name=field.name, - type_code=field.field_type, - display_size=None, - internal_size=None, - precision=None, - scale=None, - null_ok=field.is_nullable, - ) - for field in schema - ] - ) - - def _set_rowcount(self, query_results): - """Set the rowcount from query results. - - Normally, this sets rowcount to the number of rows returned by the - query, but if it was a DML statement, it sets rowcount to the number - of modified rows. - - Args: - query_results (google.cloud.bigquery.query._QueryResults): - Results of a query. - """ - total_rows = 0 - num_dml_affected_rows = query_results.num_dml_affected_rows - - if query_results.total_rows is not None and query_results.total_rows > 0: - total_rows = query_results.total_rows - if num_dml_affected_rows is not None and num_dml_affected_rows > 0: - total_rows = num_dml_affected_rows - self.rowcount = total_rows - - def execute(self, operation, parameters=None, job_id=None, job_config=None): - """Prepare and execute a database operation. - - .. note:: - When setting query parameters, values which are "text" - (``unicode`` in Python2, ``str`` in Python3) will use - the 'STRING' BigQuery type. Values which are "bytes" (``str`` in - Python2, ``bytes`` in Python3), will use using the 'BYTES' type. - - A `~datetime.datetime` parameter without timezone information uses - the 'DATETIME' BigQuery type (example: Global Pi Day Celebration - March 14, 2017 at 1:59pm). A `~datetime.datetime` parameter with - timezone information uses the 'TIMESTAMP' BigQuery type (example: - a wedding on April 29, 2011 at 11am, British Summer Time). - - For more information about BigQuery data types, see: - https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types - - ``STRUCT``/``RECORD`` and ``REPEATED`` query parameters are not - yet supported. See: - https://github.com/GoogleCloudPlatform/google-cloud-python/issues/3524 - - Args: - operation (str): A Google BigQuery query string. - - parameters (Union[Mapping[str, Any], Sequence[Any]]): - (Optional) dictionary or sequence of parameter values. - - job_id (str): - (Optional) The job_id to use. If not set, a job ID - is generated at random. - - job_config (google.cloud.bigquery.job.QueryJobConfig): - (Optional) Extra configuration options for the query job. - """ - self._query_data = None - self._query_job = None - client = self.connection._client - - # The DB-API uses the pyformat formatting, since the way BigQuery does - # query parameters was not one of the standard options. Convert both - # the query and the parameters to the format expected by the client - # libraries. - formatted_operation = _format_operation(operation, parameters=parameters) - query_parameters = _helpers.to_query_parameters(parameters) - - config = job_config or job.QueryJobConfig(use_legacy_sql=False) - config.query_parameters = query_parameters - self._query_job = client.query( - formatted_operation, job_config=config, job_id=job_id - ) - - # Wait for the query to finish. - try: - self._query_job.result() - except google.cloud.exceptions.GoogleCloudError as exc: - raise exceptions.DatabaseError(exc) - - query_results = self._query_job._query_results - self._set_rowcount(query_results) - self._set_description(query_results.schema) - - def executemany(self, operation, seq_of_parameters): - """Prepare and execute a database operation multiple times. - - Args: - operation (str): A Google BigQuery query string. - - seq_of_parameters (Union[Sequence[Mapping[str, Any], Sequence[Any]]]): - Sequence of many sets of parameter values. - """ - for parameters in seq_of_parameters: - self.execute(operation, parameters) - - def _try_fetch(self, size=None): - """Try to start fetching data, if not yet started. - - Mutates self to indicate that iteration has started. - """ - if self._query_job is None: - raise exceptions.InterfaceError( - "No query results: execute() must be called before fetch." - ) - - is_dml = ( - self._query_job.statement_type - and self._query_job.statement_type.upper() != "SELECT" - ) - if is_dml: - self._query_data = iter([]) - return - - if self._query_data is None: - client = self.connection._client - rows_iter = client.list_rows( - self._query_job.destination, - selected_fields=self._query_job._query_results.schema, - page_size=self.arraysize, - ) - self._query_data = iter(rows_iter) - - def fetchone(self): - """Fetch a single row from the results of the last ``execute*()`` call. - - Returns: - Tuple: - A tuple representing a row or ``None`` if no more data is - available. - - Raises: - google.cloud.bigquery.dbapi.InterfaceError: if called before ``execute()``. - """ - self._try_fetch() - try: - return six.next(self._query_data) - except StopIteration: - return None - - def fetchmany(self, size=None): - """Fetch multiple results from the last ``execute*()`` call. - - .. note:: - The size parameter is not used for the request/response size. - Set the ``arraysize`` attribute before calling ``execute()`` to - set the batch size. - - Args: - size (int): - (Optional) Maximum number of rows to return. Defaults to the - ``arraysize`` property value. If ``arraysize`` is not set, it - defaults to ``1``. - - Returns: - List[Tuple]: A list of rows. - - Raises: - google.cloud.bigquery.dbapi.InterfaceError: if called before ``execute()``. - """ - if size is None: - # Since self.arraysize can be None (a deviation from PEP 249), - # use an actual PEP 249 default of 1 in such case (*some* number - # is needed here). - size = self.arraysize if self.arraysize else 1 - - self._try_fetch(size=size) - rows = [] - - for row in self._query_data: - rows.append(row) - if len(rows) >= size: - break - - return rows - - def fetchall(self): - """Fetch all remaining results from the last ``execute*()`` call. - - Returns: - List[Tuple]: A list of all the rows in the results. - - Raises: - google.cloud.bigquery.dbapi.InterfaceError: if called before ``execute()``. - """ - self._try_fetch() - return list(self._query_data) - - def setinputsizes(self, sizes): - """No-op.""" - - def setoutputsize(self, size, column=None): - """No-op.""" - - -def _format_operation_list(operation, parameters): - """Formats parameters in operation in the way BigQuery expects. - - The input operation will be a query like ``SELECT %s`` and the output - will be a query like ``SELECT ?``. - - Args: - operation (str): A Google BigQuery query string. - - parameters (Sequence[Any]): Sequence of parameter values. - - Returns: - str: A formatted query string. - - Raises: - google.cloud.bigquery.dbapi.ProgrammingError: - if a parameter used in the operation is not found in the - ``parameters`` argument. - """ - formatted_params = ["?" for _ in parameters] - - try: - return operation % tuple(formatted_params) - except TypeError as exc: - raise exceptions.ProgrammingError(exc) - - -def _format_operation_dict(operation, parameters): - """Formats parameters in operation in the way BigQuery expects. - - The input operation will be a query like ``SELECT %(namedparam)s`` and - the output will be a query like ``SELECT @namedparam``. - - Args: - operation (str): A Google BigQuery query string. - - parameters (Mapping[str, Any]): Dictionary of parameter values. - - Returns: - str: A formatted query string. - - Raises: - google.cloud.bigquery.dbapi.ProgrammingError: - if a parameter used in the operation is not found in the - ``parameters`` argument. - """ - formatted_params = {} - for name in parameters: - escaped_name = name.replace("`", r"\`") - formatted_params[name] = "@`{}`".format(escaped_name) - - try: - return operation % formatted_params - except KeyError as exc: - raise exceptions.ProgrammingError(exc) - - -def _format_operation(operation, parameters=None): - """Formats parameters in operation in way BigQuery expects. - - Args: - operation (str): A Google BigQuery query string. - - parameters (Union[Mapping[str, Any], Sequence[Any]]): - Optional parameter values. - - Returns: - str: A formatted query string. - - Raises: - google.cloud.bigquery.dbapi.ProgrammingError: - if a parameter used in the operation is not found in the - ``parameters`` argument. - """ - if parameters is None: - return operation - - if isinstance(parameters, collections_abc.Mapping): - return _format_operation_dict(operation, parameters) - - return _format_operation_list(operation, parameters) diff --git a/bigquery/google/cloud/bigquery/dbapi/exceptions.py b/bigquery/google/cloud/bigquery/dbapi/exceptions.py deleted file mode 100644 index 37f7129d0618..000000000000 --- a/bigquery/google/cloud/bigquery/dbapi/exceptions.py +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Exceptions used in the Google BigQuery DB-API.""" - - -class Warning(Exception): - """Exception raised for important DB-API warnings.""" - - -class Error(Exception): - """Exception representing all non-warning DB-API errors.""" - - -class InterfaceError(Error): - """DB-API error related to the database interface.""" - - -class DatabaseError(Error): - """DB-API error related to the database.""" - - -class DataError(DatabaseError): - """DB-API error due to problems with the processed data.""" - - -class OperationalError(DatabaseError): - """DB-API error related to the database operation. - - These errors are not necessarily under the control of the programmer. - """ - - -class IntegrityError(DatabaseError): - """DB-API error when integrity of the database is affected.""" - - -class InternalError(DatabaseError): - """DB-API error when the database encounters an internal error.""" - - -class ProgrammingError(DatabaseError): - """DB-API exception raised for programming errors.""" - - -class NotSupportedError(DatabaseError): - """DB-API error for operations not supported by the database or API.""" diff --git a/bigquery/google/cloud/bigquery/dbapi/types.py b/bigquery/google/cloud/bigquery/dbapi/types.py deleted file mode 100644 index 14917820cd38..000000000000 --- a/bigquery/google/cloud/bigquery/dbapi/types.py +++ /dev/null @@ -1,84 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Types used in the Google BigQuery DB-API. - -See `PEP-249`_ for details. - -.. _PEP-249: - https://www.python.org/dev/peps/pep-0249/#type-objects-and-constructors -""" - -import datetime - - -Date = datetime.date -Time = datetime.time -Timestamp = datetime.datetime -DateFromTicks = datetime.date.fromtimestamp -TimestampFromTicks = datetime.datetime.fromtimestamp - - -def Binary(string): - """Contruct a DB-API binary value. - - Args: - string (str): A string to encode as a binary value. - - Returns: - bytes: The UTF-8 encoded bytes representing the string. - """ - return string.encode("utf-8") - - -def TimeFromTicks(ticks, tz=None): - """Construct a DB-API time value from the given ticks value. - - Args: - ticks (float): - a number of seconds since the epoch; see the documentation of the - standard Python time module for details. - - tz (datetime.tzinfo): (Optional) time zone to use for conversion - - Returns: - datetime.time: time represented by ticks. - """ - dt = datetime.datetime.fromtimestamp(ticks, tz=tz) - return dt.timetz() - - -class _DBAPITypeObject(object): - """DB-API type object which compares equal to many different strings. - - See `PEP-249`_ for details. - - .. _PEP-249: - https://www.python.org/dev/peps/pep-0249/#implementation-hints-for-module-authors - """ - - def __init__(self, *values): - self.values = values - - def __eq__(self, other): - return other in self.values - - -STRING = "STRING" -BINARY = _DBAPITypeObject("BYTES", "RECORD", "STRUCT") -NUMBER = _DBAPITypeObject( - "INTEGER", "INT64", "FLOAT", "FLOAT64", "NUMERIC", "BOOLEAN", "BOOL" -) -DATETIME = _DBAPITypeObject("TIMESTAMP", "DATE", "TIME", "DATETIME") -ROWID = "ROWID" diff --git a/bigquery/google/cloud/bigquery/encryption_configuration.py b/bigquery/google/cloud/bigquery/encryption_configuration.py deleted file mode 100644 index ba04ae2c45a7..000000000000 --- a/bigquery/google/cloud/bigquery/encryption_configuration.py +++ /dev/null @@ -1,84 +0,0 @@ -# Copyright 2015 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Define class for the custom encryption configuration.""" - -import copy - - -class EncryptionConfiguration(object): - """Custom encryption configuration (e.g., Cloud KMS keys). - - Args: - kms_key_name (str): resource ID of Cloud KMS key used for encryption - """ - - def __init__(self, kms_key_name=None): - self._properties = {} - if kms_key_name is not None: - self._properties["kmsKeyName"] = kms_key_name - - @property - def kms_key_name(self): - """str: Resource ID of Cloud KMS key - - Resource ID of Cloud KMS key or :data:`None` if using default - encryption. - """ - return self._properties.get("kmsKeyName") - - @kms_key_name.setter - def kms_key_name(self, value): - self._properties["kmsKeyName"] = value - - @classmethod - def from_api_repr(cls, resource): - """Construct an encryption configuration from its API representation - - Args: - resource (Dict[str, object]): - An encryption configuration representation as returned from - the API. - - Returns: - google.cloud.bigquery.table.EncryptionConfiguration: - An encryption configuration parsed from ``resource``. - """ - config = cls() - config._properties = copy.deepcopy(resource) - return config - - def to_api_repr(self): - """Construct the API resource representation of this encryption - configuration. - - Returns: - Dict[str, object]: - Encryption configuration as represented as an API resource - """ - return copy.deepcopy(self._properties) - - def __eq__(self, other): - if not isinstance(other, EncryptionConfiguration): - return NotImplemented - return self.kms_key_name == other.kms_key_name - - def __ne__(self, other): - return not self == other - - def __hash__(self): - return hash(self.kms_key_name) - - def __repr__(self): - return "EncryptionConfiguration({})".format(self.kms_key_name) diff --git a/bigquery/google/cloud/bigquery/enums.py b/bigquery/google/cloud/bigquery/enums.py deleted file mode 100644 index 29fe543f6505..000000000000 --- a/bigquery/google/cloud/bigquery/enums.py +++ /dev/null @@ -1,92 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import re - -import enum -import six - -from google.cloud.bigquery_v2.gapic import enums as gapic_enums - - -_SQL_SCALAR_TYPES = frozenset( - ( - "INT64", - "BOOL", - "FLOAT64", - "STRING", - "BYTES", - "TIMESTAMP", - "DATE", - "TIME", - "DATETIME", - "GEOGRAPHY", - "NUMERIC", - ) -) - -_SQL_NONSCALAR_TYPES = frozenset(("TYPE_KIND_UNSPECIFIED", "ARRAY", "STRUCT")) - - -def _make_sql_scalars_enum(): - """Create an enum based on a gapic enum containing only SQL scalar types.""" - - new_enum = enum.Enum( - "StandardSqlDataTypes", - ( - (member.name, member.value) - for member in gapic_enums.StandardSqlDataType.TypeKind - if member.name in _SQL_SCALAR_TYPES - ), - ) - - # make sure the docstring for the new enum is also correct - orig_doc = gapic_enums.StandardSqlDataType.TypeKind.__doc__ - skip_pattern = re.compile( - "|".join(_SQL_NONSCALAR_TYPES) - + "|because a JSON object" # the second description line of STRUCT member - ) - - new_doc = "\n".join( - six.moves.filterfalse(skip_pattern.search, orig_doc.splitlines()) - ) - new_enum.__doc__ = "An Enum of scalar SQL types.\n" + new_doc - - return new_enum - - -StandardSqlDataTypes = _make_sql_scalars_enum() - - -# See also: https://cloud.google.com/bigquery/data-types#legacy_sql_data_types -# and https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types -class SqlTypeNames(str, enum.Enum): - """Enum of allowed SQL type names in schema.SchemaField.""" - - STRING = "STRING" - BYTES = "BYTES" - INTEGER = "INTEGER" - INT64 = "INTEGER" - FLOAT = "FLOAT" - FLOAT64 = "FLOAT" - NUMERIC = "NUMERIC" - BOOLEAN = "BOOLEAN" - BOOL = "BOOLEAN" - GEOGRAPHY = "GEOGRAPHY" # NOTE: not available in legacy types - RECORD = "RECORD" - STRUCT = "RECORD" - TIMESTAMP = "TIMESTAMP" - DATE = "DATE" - TIME = "TIME" - DATETIME = "DATETIME" diff --git a/bigquery/google/cloud/bigquery/external_config.py b/bigquery/google/cloud/bigquery/external_config.py deleted file mode 100644 index d702d9d83302..000000000000 --- a/bigquery/google/cloud/bigquery/external_config.py +++ /dev/null @@ -1,790 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Define classes that describe external data sources. - - These are used for both Table.externalDataConfiguration and - Job.configuration.query.tableDefinitions. -""" - -from __future__ import absolute_import - -import base64 -import copy - -from google.cloud.bigquery._helpers import _to_bytes -from google.cloud.bigquery._helpers import _bytes_to_json -from google.cloud.bigquery._helpers import _int_or_none -from google.cloud.bigquery._helpers import _str_or_none -from google.cloud.bigquery.schema import SchemaField - - -class ExternalSourceFormat(object): - """The format for external data files. - - Note that the set of allowed values for external data sources is different - than the set used for loading data (see - :class:`~google.cloud.bigquery.job.SourceFormat`). - """ - - CSV = "CSV" - """Specifies CSV format.""" - - GOOGLE_SHEETS = "GOOGLE_SHEETS" - """Specifies Google Sheets format.""" - - NEWLINE_DELIMITED_JSON = "NEWLINE_DELIMITED_JSON" - """Specifies newline delimited JSON format.""" - - AVRO = "AVRO" - """Specifies Avro format.""" - - DATASTORE_BACKUP = "DATASTORE_BACKUP" - """Specifies datastore backup format""" - - BIGTABLE = "BIGTABLE" - """Specifies Bigtable format.""" - - -class BigtableColumn(object): - """Options for a Bigtable column.""" - - def __init__(self): - self._properties = {} - - @property - def encoding(self): - """str: The encoding of the values when the type is not `STRING` - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#BigtableColumn.FIELDS.encoding - """ - return self._properties.get("encoding") - - @encoding.setter - def encoding(self, value): - self._properties["encoding"] = value - - @property - def field_name(self): - """str: An identifier to use if the qualifier is not a valid BigQuery - field identifier - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#BigtableColumn.FIELDS.field_name - """ - return self._properties.get("fieldName") - - @field_name.setter - def field_name(self, value): - self._properties["fieldName"] = value - - @property - def only_read_latest(self): - """bool: If this is set, only the latest version of value in this - column are exposed. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#BigtableColumn.FIELDS.only_read_latest - """ - return self._properties.get("onlyReadLatest") - - @only_read_latest.setter - def only_read_latest(self, value): - self._properties["onlyReadLatest"] = value - - @property - def qualifier_encoded(self): - """Union[str, bytes]: The qualifier encoded in binary. - - The type is ``str`` (Python 2.x) or ``bytes`` (Python 3.x). The module - will handle base64 encoding for you. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#BigtableColumn.FIELDS.qualifier_encoded - """ - prop = self._properties.get("qualifierEncoded") - if prop is None: - return None - return base64.standard_b64decode(_to_bytes(prop)) - - @qualifier_encoded.setter - def qualifier_encoded(self, value): - self._properties["qualifierEncoded"] = _bytes_to_json(value) - - @property - def qualifier_string(self): - """str: A valid UTF-8 string qualifier - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#BigtableColumn.FIELDS.qualifier_string - """ - return self._properties.get("qualifierString") - - @qualifier_string.setter - def qualifier_string(self, value): - self._properties["qualifierString"] = value - - @property - def type_(self): - """str: The type to convert the value in cells of this column. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#BigtableColumn.FIELDS.type - """ - return self._properties.get("type") - - @type_.setter - def type_(self, value): - self._properties["type"] = value - - def to_api_repr(self): - """Build an API representation of this object. - - Returns: - Dict[str, Any]: - A dictionary in the format used by the BigQuery API. - """ - return copy.deepcopy(self._properties) - - @classmethod - def from_api_repr(cls, resource): - """Factory: construct a :class:`~.external_config.BigtableColumn` - instance given its API representation. - - Args: - resource (Dict[str, Any]): - Definition of a :class:`~.external_config.BigtableColumn` - instance in the same representation as is returned from the - API. - - Returns: - external_config.BigtableColumn: Configuration parsed from ``resource``. - """ - config = cls() - config._properties = copy.deepcopy(resource) - return config - - -class BigtableColumnFamily(object): - """Options for a Bigtable column family.""" - - def __init__(self): - self._properties = {} - - @property - def encoding(self): - """str: The encoding of the values when the type is not `STRING` - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#BigtableColumnFamily.FIELDS.encoding - """ - return self._properties.get("encoding") - - @encoding.setter - def encoding(self, value): - self._properties["encoding"] = value - - @property - def family_id(self): - """str: Identifier of the column family. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#BigtableColumnFamily.FIELDS.family_id - """ - return self._properties.get("familyId") - - @family_id.setter - def family_id(self, value): - self._properties["familyId"] = value - - @property - def only_read_latest(self): - """bool: If this is set only the latest version of value are exposed - for all columns in this column family. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#BigtableColumnFamily.FIELDS.only_read_latest - """ - return self._properties.get("onlyReadLatest") - - @only_read_latest.setter - def only_read_latest(self, value): - self._properties["onlyReadLatest"] = value - - @property - def type_(self): - """str: The type to convert the value in cells of this column family. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#BigtableColumnFamily.FIELDS.type - """ - return self._properties.get("type") - - @type_.setter - def type_(self, value): - self._properties["type"] = value - - @property - def columns(self): - """List[BigtableColumn]: Lists of columns - that should be exposed as individual fields. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#BigtableColumnFamily.FIELDS.columns - """ - prop = self._properties.get("columns", []) - return [BigtableColumn.from_api_repr(col) for col in prop] - - @columns.setter - def columns(self, value): - self._properties["columns"] = [col.to_api_repr() for col in value] - - def to_api_repr(self): - """Build an API representation of this object. - - Returns: - Dict[str, Any]: - A dictionary in the format used by the BigQuery API. - """ - return copy.deepcopy(self._properties) - - @classmethod - def from_api_repr(cls, resource): - """Factory: construct a :class:`~.external_config.BigtableColumnFamily` - instance given its API representation. - - Args: - resource (Dict[str, Any]): - Definition of a :class:`~.external_config.BigtableColumnFamily` - instance in the same representation as is returned from the - API. - - Returns: - :class:`~.external_config.BigtableColumnFamily`: - Configuration parsed from ``resource``. - """ - config = cls() - config._properties = copy.deepcopy(resource) - return config - - -class BigtableOptions(object): - """Options that describe how to treat Bigtable tables as BigQuery tables. - """ - - _SOURCE_FORMAT = "BIGTABLE" - _RESOURCE_NAME = "bigtableOptions" - - def __init__(self): - self._properties = {} - - @property - def ignore_unspecified_column_families(self): - """bool: If :data:`True`, ignore columns not specified in - :attr:`column_families` list. Defaults to :data:`False`. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#BigtableOptions.FIELDS.ignore_unspecified_column_families - """ - return self._properties.get("ignoreUnspecifiedColumnFamilies") - - @ignore_unspecified_column_families.setter - def ignore_unspecified_column_families(self, value): - self._properties["ignoreUnspecifiedColumnFamilies"] = value - - @property - def read_rowkey_as_string(self): - """bool: If :data:`True`, rowkey column families will be read and - converted to string. Defaults to :data:`False`. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#BigtableOptions.FIELDS.read_rowkey_as_string - """ - return self._properties.get("readRowkeyAsString") - - @read_rowkey_as_string.setter - def read_rowkey_as_string(self, value): - self._properties["readRowkeyAsString"] = value - - @property - def column_families(self): - """List[:class:`~.external_config.BigtableColumnFamily`]: List of - column families to expose in the table schema along with their types. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#BigtableOptions.FIELDS.column_families - """ - prop = self._properties.get("columnFamilies", []) - return [BigtableColumnFamily.from_api_repr(cf) for cf in prop] - - @column_families.setter - def column_families(self, value): - self._properties["columnFamilies"] = [cf.to_api_repr() for cf in value] - - def to_api_repr(self): - """Build an API representation of this object. - - Returns: - Dict[str, Any]: - A dictionary in the format used by the BigQuery API. - """ - return copy.deepcopy(self._properties) - - @classmethod - def from_api_repr(cls, resource): - """Factory: construct a :class:`~.external_config.BigtableOptions` - instance given its API representation. - - Args: - resource (Dict[str, Any]): - Definition of a :class:`~.external_config.BigtableOptions` - instance in the same representation as is returned from the - API. - - Returns: - BigtableOptions: Configuration parsed from ``resource``. - """ - config = cls() - config._properties = copy.deepcopy(resource) - return config - - -class CSVOptions(object): - """Options that describe how to treat CSV files as BigQuery tables.""" - - _SOURCE_FORMAT = "CSV" - _RESOURCE_NAME = "csvOptions" - - def __init__(self): - self._properties = {} - - @property - def allow_jagged_rows(self): - """bool: If :data:`True`, BigQuery treats missing trailing columns as - null values. Defaults to :data:`False`. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#CsvOptions.FIELDS.allow_jagged_rows - """ - return self._properties.get("allowJaggedRows") - - @allow_jagged_rows.setter - def allow_jagged_rows(self, value): - self._properties["allowJaggedRows"] = value - - @property - def allow_quoted_newlines(self): - """bool: If :data:`True`, quoted data sections that contain newline - characters in a CSV file are allowed. Defaults to :data:`False`. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#CsvOptions.FIELDS.allow_quoted_newlines - """ - return self._properties.get("allowQuotedNewlines") - - @allow_quoted_newlines.setter - def allow_quoted_newlines(self, value): - self._properties["allowQuotedNewlines"] = value - - @property - def encoding(self): - """str: The character encoding of the data. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#CsvOptions.FIELDS.encoding - """ - return self._properties.get("encoding") - - @encoding.setter - def encoding(self, value): - self._properties["encoding"] = value - - @property - def field_delimiter(self): - """str: The separator for fields in a CSV file. Defaults to comma (','). - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#CsvOptions.FIELDS.field_delimiter - """ - return self._properties.get("fieldDelimiter") - - @field_delimiter.setter - def field_delimiter(self, value): - self._properties["fieldDelimiter"] = value - - @property - def quote_character(self): - """str: The value that is used to quote data sections in a CSV file. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#CsvOptions.FIELDS.quote - """ - return self._properties.get("quote") - - @quote_character.setter - def quote_character(self, value): - self._properties["quote"] = value - - @property - def skip_leading_rows(self): - """int: The number of rows at the top of a CSV file. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#CsvOptions.FIELDS.skip_leading_rows - """ - return _int_or_none(self._properties.get("skipLeadingRows")) - - @skip_leading_rows.setter - def skip_leading_rows(self, value): - self._properties["skipLeadingRows"] = str(value) - - def to_api_repr(self): - """Build an API representation of this object. - - Returns: - Dict[str, Any]: A dictionary in the format used by the BigQuery API. - """ - return copy.deepcopy(self._properties) - - @classmethod - def from_api_repr(cls, resource): - """Factory: construct a :class:`~.external_config.CSVOptions` instance - given its API representation. - - Args: - resource (Dict[str, Any]): - Definition of a :class:`~.external_config.CSVOptions` - instance in the same representation as is returned from the - API. - - Returns: - CSVOptions: Configuration parsed from ``resource``. - """ - config = cls() - config._properties = copy.deepcopy(resource) - return config - - -class GoogleSheetsOptions(object): - """Options that describe how to treat Google Sheets as BigQuery tables.""" - - _SOURCE_FORMAT = "GOOGLE_SHEETS" - _RESOURCE_NAME = "googleSheetsOptions" - - def __init__(self): - self._properties = {} - - @property - def skip_leading_rows(self): - """int: The number of rows at the top of a sheet that BigQuery will - skip when reading the data. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#GoogleSheetsOptions.FIELDS.skip_leading_rows - """ - return _int_or_none(self._properties.get("skipLeadingRows")) - - @skip_leading_rows.setter - def skip_leading_rows(self, value): - self._properties["skipLeadingRows"] = str(value) - - @property - def range(self): - """str: The range of a sheet that BigQuery will query from. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#GoogleSheetsOptions.FIELDS.range - """ - return _str_or_none(self._properties.get("range")) - - @range.setter - def range(self, value): - self._properties["range"] = value - - def to_api_repr(self): - """Build an API representation of this object. - - Returns: - Dict[str, Any]: A dictionary in the format used by the BigQuery API. - """ - return copy.deepcopy(self._properties) - - @classmethod - def from_api_repr(cls, resource): - """Factory: construct a :class:`~.external_config.GoogleSheetsOptions` - instance given its API representation. - - Args: - resource (Dict[str, Any]): - Definition of a :class:`~.external_config.GoogleSheetsOptions` - instance in the same representation as is returned from the - API. - - Returns: - GoogleSheetsOptions: Configuration parsed from ``resource``. - """ - config = cls() - config._properties = copy.deepcopy(resource) - return config - - -_OPTION_CLASSES = (BigtableOptions, CSVOptions, GoogleSheetsOptions) - - -class HivePartitioningOptions(object): - """[Beta] Options that configure hive partitioning. - - .. note:: - **Experimental**. This feature is experimental and might change or - have limited support. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#HivePartitioningOptions - """ - - def __init__(self): - self._properties = {} - - @property - def mode(self): - """Optional[str]: When set, what mode of hive partitioning to use when reading data. - - Two modes are supported: "AUTO" and "STRINGS". - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#HivePartitioningOptions.FIELDS.mode - """ - return self._properties.get("mode") - - @mode.setter - def mode(self, value): - self._properties["mode"] = value - - @property - def source_uri_prefix(self): - """Optional[str]: When hive partition detection is requested, a common prefix for - all source URIs is required. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#HivePartitioningOptions.FIELDS.source_uri_prefix - """ - return self._properties.get("sourceUriPrefix") - - @source_uri_prefix.setter - def source_uri_prefix(self, value): - self._properties["sourceUriPrefix"] = value - - def to_api_repr(self): - """Build an API representation of this object. - - Returns: - Dict[str, Any]: A dictionary in the format used by the BigQuery API. - """ - return copy.deepcopy(self._properties) - - @classmethod - def from_api_repr(cls, resource): - """Factory: construct a :class:`~.external_config.HivePartitioningOptions` - instance given its API representation. - - Args: - resource (Dict[str, Any]): - Definition of a :class:`~.external_config.HivePartitioningOptions` - instance in the same representation as is returned from the - API. - - Returns: - HivePartitioningOptions: Configuration parsed from ``resource``. - """ - config = cls() - config._properties = copy.deepcopy(resource) - return config - - -class ExternalConfig(object): - """Description of an external data source. - - Args: - source_format (ExternalSourceFormat): - See :attr:`source_format`. - """ - - def __init__(self, source_format): - self._properties = {"sourceFormat": source_format} - self._options = None - for optcls in _OPTION_CLASSES: - if source_format == optcls._SOURCE_FORMAT: - self._options = optcls() - break - - @property - def source_format(self): - """:class:`~.external_config.ExternalSourceFormat`: - Format of external source. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#ExternalDataConfiguration.FIELDS.source_format - """ - return self._properties["sourceFormat"] - - @property - def options(self): - """Optional[Dict[str, Any]]: Source-specific options.""" - return self._options - - @property - def autodetect(self): - """bool: If :data:`True`, try to detect schema and format options - automatically. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#ExternalDataConfiguration.FIELDS.autodetect - """ - return self._properties.get("autodetect") - - @autodetect.setter - def autodetect(self, value): - self._properties["autodetect"] = value - - @property - def compression(self): - """str: The compression type of the data source. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#ExternalDataConfiguration.FIELDS.compression - """ - return self._properties.get("compression") - - @compression.setter - def compression(self, value): - self._properties["compression"] = value - - @property - def hive_partitioning(self): - """Optional[:class:`~.external_config.HivePartitioningOptions`]: [Beta] When set, \ - it configures hive partitioning support. - - .. note:: - **Experimental**. This feature is experimental and might change or - have limited support. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#ExternalDataConfiguration.FIELDS.hive_partitioning_options - """ - prop = self._properties.get("hivePartitioningOptions") - if prop is None: - return None - return HivePartitioningOptions.from_api_repr(prop) - - @hive_partitioning.setter - def hive_partitioning(self, value): - prop = value.to_api_repr() if value is not None else None - self._properties["hivePartitioningOptions"] = prop - - @property - def ignore_unknown_values(self): - """bool: If :data:`True`, extra values that are not represented in the - table schema are ignored. Defaults to :data:`False`. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#ExternalDataConfiguration.FIELDS.ignore_unknown_values - """ - return self._properties.get("ignoreUnknownValues") - - @ignore_unknown_values.setter - def ignore_unknown_values(self, value): - self._properties["ignoreUnknownValues"] = value - - @property - def max_bad_records(self): - """int: The maximum number of bad records that BigQuery can ignore when - reading data. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#ExternalDataConfiguration.FIELDS.max_bad_records - """ - return self._properties.get("maxBadRecords") - - @max_bad_records.setter - def max_bad_records(self, value): - self._properties["maxBadRecords"] = value - - @property - def source_uris(self): - """List[str]: URIs that point to your data in Google Cloud. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#ExternalDataConfiguration.FIELDS.source_uris - """ - return self._properties.get("sourceUris", []) - - @source_uris.setter - def source_uris(self, value): - self._properties["sourceUris"] = value - - @property - def schema(self): - """List[:class:`~google.cloud.bigquery.schema.SchemaField`]: The schema - for the data. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#ExternalDataConfiguration.FIELDS.schema - """ - prop = self._properties.get("schema", {}) - return [SchemaField.from_api_repr(field) for field in prop.get("fields", [])] - - @schema.setter - def schema(self, value): - prop = value - if value is not None: - prop = {"fields": [field.to_api_repr() for field in value]} - self._properties["schema"] = prop - - def to_api_repr(self): - """Build an API representation of this object. - - Returns: - Dict[str, Any]: - A dictionary in the format used by the BigQuery API. - """ - config = copy.deepcopy(self._properties) - if self.options is not None: - r = self.options.to_api_repr() - if r != {}: - config[self.options._RESOURCE_NAME] = r - return config - - @classmethod - def from_api_repr(cls, resource): - """Factory: construct an :class:`~.external_config.ExternalConfig` - instance given its API representation. - - Args: - resource (Dict[str, Any]): - Definition of an :class:`~.external_config.ExternalConfig` - instance in the same representation as is returned from the - API. - - Returns: - ExternalConfig: Configuration parsed from ``resource``. - """ - config = cls(resource["sourceFormat"]) - for optcls in _OPTION_CLASSES: - opts = resource.get(optcls._RESOURCE_NAME) - if opts is not None: - config._options = optcls.from_api_repr(opts) - break - config._properties = copy.deepcopy(resource) - return config diff --git a/bigquery/google/cloud/bigquery/job.py b/bigquery/google/cloud/bigquery/job.py deleted file mode 100644 index 5861febe830d..000000000000 --- a/bigquery/google/cloud/bigquery/job.py +++ /dev/null @@ -1,3808 +0,0 @@ -# Copyright 2015 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Define API Jobs.""" - -from __future__ import division - -import concurrent.futures -import copy -import re -import threading - -import requests -import six -from six.moves import http_client - -import google.api_core.future.polling -from google.auth.transport.requests import TimeoutGuard -from google.cloud import exceptions -from google.cloud.exceptions import NotFound -from google.cloud.bigquery.dataset import Dataset -from google.cloud.bigquery.dataset import DatasetListItem -from google.cloud.bigquery.dataset import DatasetReference -from google.cloud.bigquery.encryption_configuration import EncryptionConfiguration -from google.cloud.bigquery.external_config import ExternalConfig -from google.cloud.bigquery.external_config import HivePartitioningOptions -from google.cloud.bigquery import _helpers -from google.cloud.bigquery.query import _query_param_from_api_repr -from google.cloud.bigquery.query import ArrayQueryParameter -from google.cloud.bigquery.query import ScalarQueryParameter -from google.cloud.bigquery.query import StructQueryParameter -from google.cloud.bigquery.query import UDFResource -from google.cloud.bigquery.retry import DEFAULT_RETRY -from google.cloud.bigquery.routine import RoutineReference -from google.cloud.bigquery.schema import SchemaField -from google.cloud.bigquery.schema import _to_schema_fields -from google.cloud.bigquery.table import _EmptyRowIterator -from google.cloud.bigquery.table import RangePartitioning -from google.cloud.bigquery.table import _table_arg_to_table_ref -from google.cloud.bigquery.table import TableReference -from google.cloud.bigquery.table import Table -from google.cloud.bigquery.table import TimePartitioning - -_DONE_STATE = "DONE" -_STOPPED_REASON = "stopped" -_TIMEOUT_BUFFER_SECS = 0.1 -_SERVER_TIMEOUT_MARGIN_SECS = 1.0 -_CONTAINS_ORDER_BY = re.compile(r"ORDER\s+BY", re.IGNORECASE) - -_ERROR_REASON_TO_EXCEPTION = { - "accessDenied": http_client.FORBIDDEN, - "backendError": http_client.INTERNAL_SERVER_ERROR, - "billingNotEnabled": http_client.FORBIDDEN, - "billingTierLimitExceeded": http_client.BAD_REQUEST, - "blocked": http_client.FORBIDDEN, - "duplicate": http_client.CONFLICT, - "internalError": http_client.INTERNAL_SERVER_ERROR, - "invalid": http_client.BAD_REQUEST, - "invalidQuery": http_client.BAD_REQUEST, - "notFound": http_client.NOT_FOUND, - "notImplemented": http_client.NOT_IMPLEMENTED, - "quotaExceeded": http_client.FORBIDDEN, - "rateLimitExceeded": http_client.FORBIDDEN, - "resourceInUse": http_client.BAD_REQUEST, - "resourcesExceeded": http_client.BAD_REQUEST, - "responseTooLarge": http_client.FORBIDDEN, - "stopped": http_client.OK, - "tableUnavailable": http_client.BAD_REQUEST, -} - - -def _error_result_to_exception(error_result): - """Maps BigQuery error reasons to an exception. - - The reasons and their matching HTTP status codes are documented on - the `troubleshooting errors`_ page. - - .. _troubleshooting errors: https://cloud.google.com/bigquery\ - /troubleshooting-errors - - Args: - error_result (Mapping[str, str]): The error result from BigQuery. - - Returns: - google.cloud.exceptions.GoogleCloudError: The mapped exception. - """ - reason = error_result.get("reason") - status_code = _ERROR_REASON_TO_EXCEPTION.get( - reason, http_client.INTERNAL_SERVER_ERROR - ) - return exceptions.from_http_status( - status_code, error_result.get("message", ""), errors=[error_result] - ) - - -def _contains_order_by(query): - """Do we need to preserve the order of the query results? - - This function has known false positives, such as with ordered window - functions: - - .. code-block:: sql - - SELECT SUM(x) OVER ( - window_name - PARTITION BY... - ORDER BY... - window_frame_clause) - FROM ... - - This false positive failure case means the behavior will be correct, but - downloading results with the BigQuery Storage API may be slower than it - otherwise would. This is preferable to the false negative case, where - results are expected to be in order but are not (due to parallel reads). - """ - return query and _CONTAINS_ORDER_BY.search(query) - - -class Compression(object): - """The compression type to use for exported files. The default value is - :attr:`NONE`. - - :attr:`DEFLATE` and :attr:`SNAPPY` are - only supported for Avro. - """ - - GZIP = "GZIP" - """Specifies GZIP format.""" - - DEFLATE = "DEFLATE" - """Specifies DEFLATE format.""" - - SNAPPY = "SNAPPY" - """Specifies SNAPPY format.""" - - NONE = "NONE" - """Specifies no compression.""" - - -class CreateDisposition(object): - """Specifies whether the job is allowed to create new tables. The default - value is :attr:`CREATE_IF_NEEDED`. - - Creation, truncation and append actions occur as one atomic update - upon job completion. - """ - - CREATE_IF_NEEDED = "CREATE_IF_NEEDED" - """If the table does not exist, BigQuery creates the table.""" - - CREATE_NEVER = "CREATE_NEVER" - """The table must already exist. If it does not, a 'notFound' error is - returned in the job result.""" - - -class DestinationFormat(object): - """The exported file format. The default value is :attr:`CSV`. - - Tables with nested or repeated fields cannot be exported as CSV. - """ - - CSV = "CSV" - """Specifies CSV format.""" - - NEWLINE_DELIMITED_JSON = "NEWLINE_DELIMITED_JSON" - """Specifies newline delimited JSON format.""" - - AVRO = "AVRO" - """Specifies Avro format.""" - - -class Encoding(object): - """The character encoding of the data. The default is :attr:`UTF_8`. - - BigQuery decodes the data after the raw, binary data has been - split using the values of the quote and fieldDelimiter properties. - """ - - UTF_8 = "UTF-8" - """Specifies UTF-8 encoding.""" - - ISO_8859_1 = "ISO-8859-1" - """Specifies ISO-8859-1 encoding.""" - - -class QueryPriority(object): - """Specifies a priority for the query. The default value is - :attr:`INTERACTIVE`. - """ - - INTERACTIVE = "INTERACTIVE" - """Specifies interactive priority.""" - - BATCH = "BATCH" - """Specifies batch priority.""" - - -class SourceFormat(object): - """The format of the data files. The default value is :attr:`CSV`. - - Note that the set of allowed values for loading data is different - than the set used for external data sources (see - :class:`~google.cloud.bigquery.external_config.ExternalSourceFormat`). - """ - - CSV = "CSV" - """Specifies CSV format.""" - - DATASTORE_BACKUP = "DATASTORE_BACKUP" - """Specifies datastore backup format""" - - NEWLINE_DELIMITED_JSON = "NEWLINE_DELIMITED_JSON" - """Specifies newline delimited JSON format.""" - - AVRO = "AVRO" - """Specifies Avro format.""" - - PARQUET = "PARQUET" - """Specifies Parquet format.""" - - ORC = "ORC" - """Specifies Orc format.""" - - -class WriteDisposition(object): - """Specifies the action that occurs if destination table already exists. - - The default value is :attr:`WRITE_APPEND`. - - Each action is atomic and only occurs if BigQuery is able to complete - the job successfully. Creation, truncation and append actions occur as one - atomic update upon job completion. - """ - - WRITE_APPEND = "WRITE_APPEND" - """If the table already exists, BigQuery appends the data to the table.""" - - WRITE_TRUNCATE = "WRITE_TRUNCATE" - """If the table already exists, BigQuery overwrites the table data.""" - - WRITE_EMPTY = "WRITE_EMPTY" - """If the table already exists and contains data, a 'duplicate' error is - returned in the job result.""" - - -class SchemaUpdateOption(object): - """Specifies an update to the destination table schema as a side effect of - a load job. - """ - - ALLOW_FIELD_ADDITION = "ALLOW_FIELD_ADDITION" - """Allow adding a nullable field to the schema.""" - - ALLOW_FIELD_RELAXATION = "ALLOW_FIELD_RELAXATION" - """Allow relaxing a required field in the original schema to nullable.""" - - -class _JobReference(object): - """A reference to a job. - - Arguments: - job_id (str): ID of the job to run. - project (str): ID of the project where the job runs. - location (str): Location of where the job runs. - """ - - def __init__(self, job_id, project, location): - self._properties = {"jobId": job_id, "projectId": project} - # The location field must not be populated if it is None. - if location: - self._properties["location"] = location - - @property - def job_id(self): - """str: ID of the job.""" - return self._properties.get("jobId") - - @property - def project(self): - """str: ID of the project where the job runs.""" - return self._properties.get("projectId") - - @property - def location(self): - """str: Location where the job runs.""" - return self._properties.get("location") - - def _to_api_repr(self): - """Returns the API resource representation of the job reference.""" - return copy.deepcopy(self._properties) - - @classmethod - def _from_api_repr(cls, resource): - """Returns a job reference for an API resource representation.""" - job_id = resource.get("jobId") - project = resource.get("projectId") - location = resource.get("location") - job_ref = cls(job_id, project, location) - return job_ref - - -class _AsyncJob(google.api_core.future.polling.PollingFuture): - """Base class for asynchronous jobs. - - Arguments: - job_id (Union[str, _JobReference]): - Job's ID in the project associated with the client or a - fully-qualified job reference. - client (google.cloud.bigquery.client.Client): - Client which holds credentials and project configuration. - """ - - def __init__(self, job_id, client): - super(_AsyncJob, self).__init__() - - # The job reference can be either a plain job ID or the full resource. - # Populate the properties dictionary consistently depending on what has - # been passed in. - job_ref = job_id - if not isinstance(job_id, _JobReference): - job_ref = _JobReference(job_id, client.project, None) - self._properties = {"jobReference": job_ref._to_api_repr()} - - self._client = client - self._result_set = False - self._completion_lock = threading.Lock() - - @property - def job_id(self): - """str: ID of the job.""" - return _helpers._get_sub_prop(self._properties, ["jobReference", "jobId"]) - - @property - def parent_job_id(self): - """Return the ID of the parent job. - - See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics.FIELDS.parent_job_id - - Returns: - Optional[str]: parent job id. - """ - return _helpers._get_sub_prop(self._properties, ["statistics", "parentJobId"]) - - @property - def script_statistics(self): - resource = _helpers._get_sub_prop( - self._properties, ["statistics", "scriptStatistics"] - ) - if resource is None: - return None - return ScriptStatistics(resource) - - @property - def num_child_jobs(self): - """The number of child jobs executed. - - See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics.FIELDS.num_child_jobs - - Returns: - int - """ - count = _helpers._get_sub_prop(self._properties, ["statistics", "numChildJobs"]) - return int(count) if count is not None else 0 - - @property - def project(self): - """Project bound to the job. - - Returns: - str: the project (derived from the client). - """ - return _helpers._get_sub_prop(self._properties, ["jobReference", "projectId"]) - - @property - def location(self): - """str: Location where the job runs.""" - return _helpers._get_sub_prop(self._properties, ["jobReference", "location"]) - - def _require_client(self, client): - """Check client or verify over-ride. - - Args: - client (Optional[google.cloud.bigquery.client.Client]): - the client to use. If not passed, falls back to the - ``client`` stored on the current dataset. - - Returns: - google.cloud.bigquery.client.Client: - The client passed in or the currently bound client. - """ - if client is None: - client = self._client - return client - - @property - def job_type(self): - """Type of job. - - Returns: - str: one of 'load', 'copy', 'extract', 'query'. - """ - return self._JOB_TYPE - - @property - def path(self): - """URL path for the job's APIs. - - Returns: - str: the path based on project and job ID. - """ - return "/projects/%s/jobs/%s" % (self.project, self.job_id) - - @property - def labels(self): - """Dict[str, str]: Labels for the job.""" - return self._properties.setdefault("labels", {}) - - @property - def etag(self): - """ETag for the job resource. - - Returns: - Optional[str]: the ETag (None until set from the server). - """ - return self._properties.get("etag") - - @property - def self_link(self): - """URL for the job resource. - - Returns: - Optional[str]: the URL (None until set from the server). - """ - return self._properties.get("selfLink") - - @property - def user_email(self): - """E-mail address of user who submitted the job. - - Returns: - Optional[str]: the URL (None until set from the server). - """ - return self._properties.get("user_email") - - @property - def created(self): - """Datetime at which the job was created. - - Returns: - Optional[datetime.datetime]: - the creation time (None until set from the server). - """ - statistics = self._properties.get("statistics") - if statistics is not None: - millis = statistics.get("creationTime") - if millis is not None: - return _helpers._datetime_from_microseconds(millis * 1000.0) - - @property - def started(self): - """Datetime at which the job was started. - - Returns: - Optional[datetime.datetime]: - the start time (None until set from the server). - """ - statistics = self._properties.get("statistics") - if statistics is not None: - millis = statistics.get("startTime") - if millis is not None: - return _helpers._datetime_from_microseconds(millis * 1000.0) - - @property - def ended(self): - """Datetime at which the job finished. - - Returns: - Optional[datetime.datetime]: - the end time (None until set from the server). - """ - statistics = self._properties.get("statistics") - if statistics is not None: - millis = statistics.get("endTime") - if millis is not None: - return _helpers._datetime_from_microseconds(millis * 1000.0) - - def _job_statistics(self): - """Helper for job-type specific statistics-based properties.""" - statistics = self._properties.get("statistics", {}) - return statistics.get(self._JOB_TYPE, {}) - - @property - def error_result(self): - """Error information about the job as a whole. - - Returns: - Optional[Mapping]: the error information (None until set from the server). - """ - status = self._properties.get("status") - if status is not None: - return status.get("errorResult") - - @property - def errors(self): - """Information about individual errors generated by the job. - - Returns: - Optional[List[Mapping]]: - the error information (None until set from the server). - """ - status = self._properties.get("status") - if status is not None: - return status.get("errors") - - @property - def state(self): - """Status of the job. - - Returns: - Optional[str]: - the state (None until set from the server). - """ - status = self._properties.get("status") - if status is not None: - return status.get("state") - - def _scrub_local_properties(self, cleaned): - """Helper: handle subclass properties in cleaned.""" - pass - - def _copy_configuration_properties(self, configuration): - """Helper: assign subclass configuration properties in cleaned.""" - raise NotImplementedError("Abstract") - - def _set_properties(self, api_response): - """Update properties from resource in body of ``api_response`` - - Args: - api_response (Dict): response returned from an API call. - """ - cleaned = api_response.copy() - self._scrub_local_properties(cleaned) - - statistics = cleaned.get("statistics", {}) - if "creationTime" in statistics: - statistics["creationTime"] = float(statistics["creationTime"]) - if "startTime" in statistics: - statistics["startTime"] = float(statistics["startTime"]) - if "endTime" in statistics: - statistics["endTime"] = float(statistics["endTime"]) - - self._properties.clear() - self._properties.update(cleaned) - self._copy_configuration_properties(cleaned.get("configuration", {})) - - # For Future interface - self._set_future_result() - - @classmethod - def _get_resource_config(cls, resource): - """Helper for :meth:`from_api_repr` - - Args: - resource (Dict): resource for the job. - - Returns: - (str, Dict): - tuple (string, dict), where the first element is the - job ID and the second contains job-specific configuration. - - Raises: - KeyError: - If the resource has no identifier, or - is missing the appropriate configuration. - """ - if "jobReference" not in resource or "jobId" not in resource["jobReference"]: - raise KeyError( - "Resource lacks required identity information: " - '["jobReference"]["jobId"]' - ) - job_id = resource["jobReference"]["jobId"] - if ( - "configuration" not in resource - or cls._JOB_TYPE not in resource["configuration"] - ): - raise KeyError( - "Resource lacks required configuration: " - '["configuration"]["%s"]' % cls._JOB_TYPE - ) - return job_id, resource["configuration"] - - def to_api_repr(self): - """Generate a resource for the job.""" - raise NotImplementedError("Abstract") - - _build_resource = to_api_repr # backward-compatibility alias - - def _begin(self, client=None, retry=DEFAULT_RETRY, timeout=None): - """API call: begin the job via a POST request - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/insert - - Args: - client (Optional[google.cloud.bigquery.client.Client]): - The client to use. If not passed, falls back to the ``client`` - associated with the job object or``NoneType`` - retry (Optional[google.api_core.retry.Retry]): - How to retry the RPC. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - - Raises: - ValueError: - If the job has already begun. - """ - if self.state is not None: - raise ValueError("Job already begun.") - - client = self._require_client(client) - path = "/projects/%s/jobs" % (self.project,) - - # jobs.insert is idempotent because we ensure that every new - # job has an ID. - api_response = client._call_api( - retry, method="POST", path=path, data=self.to_api_repr(), timeout=timeout - ) - self._set_properties(api_response) - - def exists(self, client=None, retry=DEFAULT_RETRY, timeout=None): - """API call: test for the existence of the job via a GET request - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/get - - Args: - client (Optional[google.cloud.bigquery.client.Client]): - the client to use. If not passed, falls back to the - ``client`` stored on the current dataset. - - retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - - Returns: - bool: Boolean indicating existence of the job. - """ - client = self._require_client(client) - - extra_params = {"fields": "id"} - if self.location: - extra_params["location"] = self.location - - try: - client._call_api( - retry, - method="GET", - path=self.path, - query_params=extra_params, - timeout=timeout, - ) - except NotFound: - return False - else: - return True - - def reload(self, client=None, retry=DEFAULT_RETRY, timeout=None): - """API call: refresh job properties via a GET request. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/get - - Args: - client (Optional[google.cloud.bigquery.client.Client]): - the client to use. If not passed, falls back to the - ``client`` stored on the current dataset. - - retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - """ - client = self._require_client(client) - - extra_params = {} - if self.location: - extra_params["location"] = self.location - - api_response = client._call_api( - retry, - method="GET", - path=self.path, - query_params=extra_params, - timeout=timeout, - ) - self._set_properties(api_response) - - def cancel(self, client=None, retry=DEFAULT_RETRY, timeout=None): - """API call: cancel job via a POST request - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/cancel - - Args: - client (Optional[google.cloud.bigquery.client.Client]): - the client to use. If not passed, falls back to the - ``client`` stored on the current dataset. - retry (Optional[google.api_core.retry.Retry]): How to retry the RPC. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry`` - - Returns: - bool: Boolean indicating that the cancel request was sent. - """ - client = self._require_client(client) - - extra_params = {} - if self.location: - extra_params["location"] = self.location - - api_response = client._call_api( - retry, - method="POST", - path="{}/cancel".format(self.path), - query_params=extra_params, - timeout=timeout, - ) - self._set_properties(api_response["job"]) - # The Future interface requires that we return True if the *attempt* - # to cancel was successful. - return True - - # The following methods implement the PollingFuture interface. Note that - # the methods above are from the pre-Future interface and are left for - # compatibility. The only "overloaded" method is :meth:`cancel`, which - # satisfies both interfaces. - - def _set_future_result(self): - """Set the result or exception from the job if it is complete.""" - # This must be done in a lock to prevent the polling thread - # and main thread from both executing the completion logic - # at the same time. - with self._completion_lock: - # If the operation isn't complete or if the result has already been - # set, do not call set_result/set_exception again. - # Note: self._result_set is set to True in set_result and - # set_exception, in case those methods are invoked directly. - if self.state != _DONE_STATE or self._result_set: - return - - if self.error_result is not None: - exception = _error_result_to_exception(self.error_result) - self.set_exception(exception) - else: - self.set_result(self) - - def done(self, retry=DEFAULT_RETRY, timeout=None): - """Refresh the job and checks if it is complete. - - Args: - retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - - Returns: - bool: True if the job is complete, False otherwise. - """ - # Do not refresh is the state is already done, as the job will not - # change once complete. - if self.state != _DONE_STATE: - self.reload(retry=retry, timeout=timeout) - return self.state == _DONE_STATE - - def result(self, retry=DEFAULT_RETRY, timeout=None): - """Start the job and wait for it to complete and get the result. - - Args: - retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - If multiple requests are made under the hood, ``timeout`` is - interpreted as the approximate total time of **all** requests. - - Returns: - _AsyncJob: This instance. - - Raises: - google.cloud.exceptions.GoogleCloudError: - if the job failed. - concurrent.futures.TimeoutError: - if the job did not complete in the given timeout. - """ - if self.state is None: - with TimeoutGuard( - timeout, timeout_error_type=concurrent.futures.TimeoutError - ) as guard: - self._begin(retry=retry, timeout=timeout) - timeout = guard.remaining_timeout - # TODO: modify PollingFuture so it can pass a retry argument to done(). - return super(_AsyncJob, self).result(timeout=timeout) - - def cancelled(self): - """Check if the job has been cancelled. - - This always returns False. It's not possible to check if a job was - cancelled in the API. This method is here to satisfy the interface - for :class:`google.api_core.future.Future`. - - Returns: - bool: False - """ - return ( - self.error_result is not None - and self.error_result.get("reason") == _STOPPED_REASON - ) - - -class _JobConfig(object): - """Abstract base class for job configuration objects. - - Args: - job_type (str): The key to use for the job configuration. - """ - - def __init__(self, job_type, **kwargs): - self._job_type = job_type - self._properties = {job_type: {}} - for prop, val in kwargs.items(): - setattr(self, prop, val) - - @property - def labels(self): - """Dict[str, str]: Labels for the job. - - This method always returns a dict. To change a job's labels, - modify the dict, then call ``Client.update_job``. To delete a - label, set its value to :data:`None` before updating. - - Raises: - ValueError: If ``value`` type is invalid. - """ - return self._properties.setdefault("labels", {}) - - @labels.setter - def labels(self, value): - if not isinstance(value, dict): - raise ValueError("Pass a dict") - self._properties["labels"] = value - - def _get_sub_prop(self, key, default=None): - """Get a value in the ``self._properties[self._job_type]`` dictionary. - - Most job properties are inside the dictionary related to the job type - (e.g. 'copy', 'extract', 'load', 'query'). Use this method to access - those properties:: - - self._get_sub_prop('destinationTable') - - This is equivalent to using the ``_helpers._get_sub_prop`` function:: - - _helpers._get_sub_prop( - self._properties, ['query', 'destinationTable']) - - Args: - key (str): - Key for the value to get in the - ``self._properties[self._job_type]`` dictionary. - default (object): - (Optional) Default value to return if the key is not found. - Defaults to :data:`None`. - - Returns: - object: The value if present or the default. - """ - return _helpers._get_sub_prop( - self._properties, [self._job_type, key], default=default - ) - - def _set_sub_prop(self, key, value): - """Set a value in the ``self._properties[self._job_type]`` dictionary. - - Most job properties are inside the dictionary related to the job type - (e.g. 'copy', 'extract', 'load', 'query'). Use this method to set - those properties:: - - self._set_sub_prop('useLegacySql', False) - - This is equivalent to using the ``_helper._set_sub_prop`` function:: - - _helper._set_sub_prop( - self._properties, ['query', 'useLegacySql'], False) - - Args: - key (str): - Key to set in the ``self._properties[self._job_type]`` - dictionary. - value (object): Value to set. - """ - _helpers._set_sub_prop(self._properties, [self._job_type, key], value) - - def _del_sub_prop(self, key): - """Remove ``key`` from the ``self._properties[self._job_type]`` dict. - - Most job properties are inside the dictionary related to the job type - (e.g. 'copy', 'extract', 'load', 'query'). Use this method to clear - those properties:: - - self._del_sub_prop('useLegacySql') - - This is equivalent to using the ``_helper._del_sub_prop`` function:: - - _helper._del_sub_prop( - self._properties, ['query', 'useLegacySql']) - - Args: - key (str): - Key to remove in the ``self._properties[self._job_type]`` - dictionary. - """ - _helpers._del_sub_prop(self._properties, [self._job_type, key]) - - def to_api_repr(self): - """Build an API representation of the job config. - - Returns: - Dict: A dictionary in the format used by the BigQuery API. - """ - return copy.deepcopy(self._properties) - - def _fill_from_default(self, default_job_config): - """Merge this job config with a default job config. - - The keys in this object take precedence over the keys in the default - config. The merge is done at the top-level as well as for keys one - level below the job type. - - Args: - default_job_config (google.cloud.bigquery.job._JobConfig): - The default job config that will be used to fill in self. - - Returns: - google.cloud.bigquery.job._JobConfig: A new (merged) job config. - """ - if self._job_type != default_job_config._job_type: - raise TypeError( - "attempted to merge two incompatible job types: " - + repr(self._job_type) - + ", " - + repr(default_job_config._job_type) - ) - - new_job_config = self.__class__() - - default_job_properties = copy.deepcopy(default_job_config._properties) - for key in self._properties: - if key != self._job_type: - default_job_properties[key] = self._properties[key] - - default_job_properties[self._job_type].update(self._properties[self._job_type]) - new_job_config._properties = default_job_properties - - return new_job_config - - @classmethod - def from_api_repr(cls, resource): - """Factory: construct a job configuration given its API representation - - Args: - resource (Dict): - An extract job configuration in the same representation as is - returned from the API. - - Returns: - google.cloud.bigquery.job._JobConfig: Configuration parsed from ``resource``. - """ - config = cls() - config._properties = copy.deepcopy(resource) - return config - - -class LoadJobConfig(_JobConfig): - """Configuration options for load jobs. - - All properties in this class are optional. Values which are :data:`None` -> - server defaults. Set properties on the constructed configuration by using - the property name as the name of a keyword argument. - """ - - def __init__(self, **kwargs): - super(LoadJobConfig, self).__init__("load", **kwargs) - - @property - def allow_jagged_rows(self): - """bool: Allow missing trailing optional columns (CSV only). - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.allow_jagged_rows - """ - return self._get_sub_prop("allowJaggedRows") - - @allow_jagged_rows.setter - def allow_jagged_rows(self, value): - self._set_sub_prop("allowJaggedRows", value) - - @property - def allow_quoted_newlines(self): - """bool: Allow quoted data containing newline characters (CSV only). - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.allow_quoted_newlines - """ - return self._get_sub_prop("allowQuotedNewlines") - - @allow_quoted_newlines.setter - def allow_quoted_newlines(self, value): - self._set_sub_prop("allowQuotedNewlines", value) - - @property - def autodetect(self): - """bool: Automatically infer the schema from a sample of the data. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.autodetect - """ - return self._get_sub_prop("autodetect") - - @autodetect.setter - def autodetect(self, value): - self._set_sub_prop("autodetect", value) - - @property - def clustering_fields(self): - """Union[List[str], None]: Fields defining clustering for the table - - (Defaults to :data:`None`). - - Clustering fields are immutable after table creation. - - .. note:: - - As of 2018-06-29, clustering fields cannot be set on a table - which does not also have time partioning defined. - """ - prop = self._get_sub_prop("clustering") - if prop is not None: - return list(prop.get("fields", ())) - - @clustering_fields.setter - def clustering_fields(self, value): - """Union[List[str], None]: Fields defining clustering for the table - - (Defaults to :data:`None`). - """ - if value is not None: - self._set_sub_prop("clustering", {"fields": value}) - else: - self._del_sub_prop("clustering") - - @property - def create_disposition(self): - """google.cloud.bigquery.job.CreateDisposition: Specifies behavior - for creating tables. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.create_disposition - """ - return self._get_sub_prop("createDisposition") - - @create_disposition.setter - def create_disposition(self, value): - self._set_sub_prop("createDisposition", value) - - @property - def destination_encryption_configuration(self): - """google.cloud.bigquery.encryption_configuration.EncryptionConfiguration: Custom - encryption configuration for the destination table. - - Custom encryption configuration (e.g., Cloud KMS keys) or :data:`None` - if using default encryption. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.destination_encryption_configuration - """ - prop = self._get_sub_prop("destinationEncryptionConfiguration") - if prop is not None: - prop = EncryptionConfiguration.from_api_repr(prop) - return prop - - @destination_encryption_configuration.setter - def destination_encryption_configuration(self, value): - api_repr = value - if value is not None: - api_repr = value.to_api_repr() - self._set_sub_prop("destinationEncryptionConfiguration", api_repr) - else: - self._del_sub_prop("destinationEncryptionConfiguration") - - @property - def destination_table_description(self): - """Union[str, None] name given to destination table. - - See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#DestinationTableProperties.FIELDS.description - """ - prop = self._get_sub_prop("destinationTableProperties") - if prop is not None: - return prop["description"] - - @destination_table_description.setter - def destination_table_description(self, value): - keys = [self._job_type, "destinationTableProperties", "description"] - if value is not None: - _helpers._set_sub_prop(self._properties, keys, value) - else: - _helpers._del_sub_prop(self._properties, keys) - - @property - def destination_table_friendly_name(self): - """Union[str, None] name given to destination table. - - See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#DestinationTableProperties.FIELDS.friendly_name - """ - prop = self._get_sub_prop("destinationTableProperties") - if prop is not None: - return prop["friendlyName"] - - @destination_table_friendly_name.setter - def destination_table_friendly_name(self, value): - keys = [self._job_type, "destinationTableProperties", "friendlyName"] - if value is not None: - _helpers._set_sub_prop(self._properties, keys, value) - else: - _helpers._del_sub_prop(self._properties, keys) - - @property - def encoding(self): - """google.cloud.bigquery.job.Encoding: The character encoding of the - data. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.encoding - """ - return self._get_sub_prop("encoding") - - @encoding.setter - def encoding(self, value): - self._set_sub_prop("encoding", value) - - @property - def field_delimiter(self): - """str: The separator for fields in a CSV file. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.field_delimiter - """ - return self._get_sub_prop("fieldDelimiter") - - @field_delimiter.setter - def field_delimiter(self, value): - self._set_sub_prop("fieldDelimiter", value) - - @property - def hive_partitioning(self): - """Optional[:class:`~.external_config.HivePartitioningOptions`]: [Beta] When set, \ - it configures hive partitioning support. - - .. note:: - **Experimental**. This feature is experimental and might change or - have limited support. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.hive_partitioning_options - """ - prop = self._get_sub_prop("hivePartitioningOptions") - if prop is None: - return None - return HivePartitioningOptions.from_api_repr(prop) - - @hive_partitioning.setter - def hive_partitioning(self, value): - if value is not None: - if isinstance(value, HivePartitioningOptions): - value = value.to_api_repr() - else: - raise TypeError("Expected a HivePartitioningOptions instance or None.") - - self._set_sub_prop("hivePartitioningOptions", value) - - @property - def ignore_unknown_values(self): - """bool: Ignore extra values not represented in the table schema. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.ignore_unknown_values - """ - return self._get_sub_prop("ignoreUnknownValues") - - @ignore_unknown_values.setter - def ignore_unknown_values(self, value): - self._set_sub_prop("ignoreUnknownValues", value) - - @property - def max_bad_records(self): - """int: Number of invalid rows to ignore. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.max_bad_records - """ - return _helpers._int_or_none(self._get_sub_prop("maxBadRecords")) - - @max_bad_records.setter - def max_bad_records(self, value): - self._set_sub_prop("maxBadRecords", value) - - @property - def null_marker(self): - """str: Represents a null value (CSV only). - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.null_marker - """ - return self._get_sub_prop("nullMarker") - - @null_marker.setter - def null_marker(self, value): - self._set_sub_prop("nullMarker", value) - - @property - def quote_character(self): - """str: Character used to quote data sections (CSV only). - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.quote - """ - return self._get_sub_prop("quote") - - @quote_character.setter - def quote_character(self, value): - self._set_sub_prop("quote", value) - - @property - def range_partitioning(self): - """Optional[google.cloud.bigquery.table.RangePartitioning]: - Configures range-based partitioning for destination table. - - .. note:: - **Beta**. The integer range partitioning feature is in a - pre-release state and might change or have limited support. - - Only specify at most one of - :attr:`~google.cloud.bigquery.job.LoadJobConfig.time_partitioning` or - :attr:`~google.cloud.bigquery.job.LoadJobConfig.range_partitioning`. - - Raises: - ValueError: - If the value is not - :class:`~google.cloud.bigquery.table.RangePartitioning` or - :data:`None`. - """ - resource = self._get_sub_prop("rangePartitioning") - if resource is not None: - return RangePartitioning(_properties=resource) - - @range_partitioning.setter - def range_partitioning(self, value): - resource = value - if isinstance(value, RangePartitioning): - resource = value._properties - elif value is not None: - raise ValueError( - "Expected value to be RangePartitioning or None, got {}.".format(value) - ) - self._set_sub_prop("rangePartitioning", resource) - - @property - def schema(self): - """Sequence[Union[ \ - :class:`~google.cloud.bigquery.schema.SchemaField`, \ - Mapping[str, Any] \ - ]]: Schema of the destination table. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.schema - """ - schema = _helpers._get_sub_prop(self._properties, ["load", "schema", "fields"]) - if schema is None: - return - return [SchemaField.from_api_repr(field) for field in schema] - - @schema.setter - def schema(self, value): - if value is None: - self._del_sub_prop("schema") - return - - value = _to_schema_fields(value) - - _helpers._set_sub_prop( - self._properties, - ["load", "schema", "fields"], - [field.to_api_repr() for field in value], - ) - - @property - def schema_update_options(self): - """List[google.cloud.bigquery.job.SchemaUpdateOption]: Specifies - updates to the destination table schema to allow as a side effect of - the load job. - """ - return self._get_sub_prop("schemaUpdateOptions") - - @schema_update_options.setter - def schema_update_options(self, values): - self._set_sub_prop("schemaUpdateOptions", values) - - @property - def skip_leading_rows(self): - """int: Number of rows to skip when reading data (CSV only). - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.skip_leading_rows - """ - return _helpers._int_or_none(self._get_sub_prop("skipLeadingRows")) - - @skip_leading_rows.setter - def skip_leading_rows(self, value): - self._set_sub_prop("skipLeadingRows", str(value)) - - @property - def source_format(self): - """google.cloud.bigquery.job.SourceFormat: File format of the data. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.source_format - """ - return self._get_sub_prop("sourceFormat") - - @source_format.setter - def source_format(self, value): - self._set_sub_prop("sourceFormat", value) - - @property - def time_partitioning(self): - """google.cloud.bigquery.table.TimePartitioning: Specifies time-based - partitioning for the destination table. - - Only specify at most one of - :attr:`~google.cloud.bigquery.job.LoadJobConfig.time_partitioning` or - :attr:`~google.cloud.bigquery.job.LoadJobConfig.range_partitioning`. - """ - prop = self._get_sub_prop("timePartitioning") - if prop is not None: - prop = TimePartitioning.from_api_repr(prop) - return prop - - @time_partitioning.setter - def time_partitioning(self, value): - api_repr = value - if value is not None: - api_repr = value.to_api_repr() - self._set_sub_prop("timePartitioning", api_repr) - else: - self._del_sub_prop("timePartitioning") - - @property - def use_avro_logical_types(self): - """bool: For loads of Avro data, governs whether Avro logical types are - converted to their corresponding BigQuery types (e.g. TIMESTAMP) rather than - raw types (e.g. INTEGER). - """ - return self._get_sub_prop("useAvroLogicalTypes") - - @use_avro_logical_types.setter - def use_avro_logical_types(self, value): - self._set_sub_prop("useAvroLogicalTypes", bool(value)) - - @property - def write_disposition(self): - """google.cloud.bigquery.job.WriteDisposition: Action that occurs if - the destination table already exists. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.write_disposition - """ - return self._get_sub_prop("writeDisposition") - - @write_disposition.setter - def write_disposition(self, value): - self._set_sub_prop("writeDisposition", value) - - -class LoadJob(_AsyncJob): - """Asynchronous job for loading data into a table. - - Can load from Google Cloud Storage URIs or from a file. - - Args: - job_id (str): the job's ID - - source_uris (Optional[Sequence[str]]): - URIs of one or more data files to be loaded. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.source_uris - for supported URI formats. Pass None for jobs that load from a file. - - destination (google.cloud.bigquery.table.TableReference): reference to table into which data is to be loaded. - - client (google.cloud.bigquery.client.Client): - A client which holds credentials and project configuration - for the dataset (which requires a project). - """ - - _JOB_TYPE = "load" - - def __init__(self, job_id, source_uris, destination, client, job_config=None): - super(LoadJob, self).__init__(job_id, client) - - if job_config is None: - job_config = LoadJobConfig() - - self.source_uris = source_uris - self._destination = destination - self._configuration = job_config - - @property - def destination(self): - """google.cloud.bigquery.table.TableReference: table where loaded rows are written - - See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.destination_table - """ - return self._destination - - @property - def allow_jagged_rows(self): - """See - :attr:`google.cloud.bigquery.job.LoadJobConfig.allow_jagged_rows`. - """ - return self._configuration.allow_jagged_rows - - @property - def allow_quoted_newlines(self): - """See - :attr:`google.cloud.bigquery.job.LoadJobConfig.allow_quoted_newlines`. - """ - return self._configuration.allow_quoted_newlines - - @property - def autodetect(self): - """See - :attr:`google.cloud.bigquery.job.LoadJobConfig.autodetect`. - """ - return self._configuration.autodetect - - @property - def create_disposition(self): - """See - :attr:`google.cloud.bigquery.job.LoadJobConfig.create_disposition`. - """ - return self._configuration.create_disposition - - @property - def encoding(self): - """See - :attr:`google.cloud.bigquery.job.LoadJobConfig.encoding`. - """ - return self._configuration.encoding - - @property - def field_delimiter(self): - """See - :attr:`google.cloud.bigquery.job.LoadJobConfig.field_delimiter`. - """ - return self._configuration.field_delimiter - - @property - def ignore_unknown_values(self): - """See - :attr:`google.cloud.bigquery.job.LoadJobConfig.ignore_unknown_values`. - """ - return self._configuration.ignore_unknown_values - - @property - def max_bad_records(self): - """See - :attr:`google.cloud.bigquery.job.LoadJobConfig.max_bad_records`. - """ - return self._configuration.max_bad_records - - @property - def null_marker(self): - """See - :attr:`google.cloud.bigquery.job.LoadJobConfig.null_marker`. - """ - return self._configuration.null_marker - - @property - def quote_character(self): - """See - :attr:`google.cloud.bigquery.job.LoadJobConfig.quote_character`. - """ - return self._configuration.quote_character - - @property - def skip_leading_rows(self): - """See - :attr:`google.cloud.bigquery.job.LoadJobConfig.skip_leading_rows`. - """ - return self._configuration.skip_leading_rows - - @property - def source_format(self): - """See - :attr:`google.cloud.bigquery.job.LoadJobConfig.source_format`. - """ - return self._configuration.source_format - - @property - def write_disposition(self): - """See - :attr:`google.cloud.bigquery.job.LoadJobConfig.write_disposition`. - """ - return self._configuration.write_disposition - - @property - def schema(self): - """See - :attr:`google.cloud.bigquery.job.LoadJobConfig.schema`. - """ - return self._configuration.schema - - @property - def destination_encryption_configuration(self): - """google.cloud.bigquery.encryption_configuration.EncryptionConfiguration: Custom - encryption configuration for the destination table. - - Custom encryption configuration (e.g., Cloud KMS keys) - or :data:`None` if using default encryption. - - See - :attr:`google.cloud.bigquery.job.LoadJobConfig.destination_encryption_configuration`. - """ - return self._configuration.destination_encryption_configuration - - @property - def destination_table_description(self): - """Optional[str] name given to destination table. - - See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#DestinationTableProperties.FIELDS.description - """ - return self._configuration.destination_table_description - - @property - def destination_table_friendly_name(self): - """Optional[str] name given to destination table. - - See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#DestinationTableProperties.FIELDS.friendly_name - """ - return self._configuration.destination_table_friendly_name - - @property - def range_partitioning(self): - """See - :attr:`google.cloud.bigquery.job.LoadJobConfig.range_partitioning`. - """ - return self._configuration.range_partitioning - - @property - def time_partitioning(self): - """See - :attr:`google.cloud.bigquery.job.LoadJobConfig.time_partitioning`. - """ - return self._configuration.time_partitioning - - @property - def use_avro_logical_types(self): - """See - :attr:`google.cloud.bigquery.job.LoadJobConfig.use_avro_logical_types`. - """ - return self._configuration.use_avro_logical_types - - @property - def clustering_fields(self): - """See - :attr:`google.cloud.bigquery.job.LoadJobConfig.clustering_fields`. - """ - return self._configuration.clustering_fields - - @property - def schema_update_options(self): - """See - :attr:`google.cloud.bigquery.job.LoadJobConfig.schema_update_options`. - """ - return self._configuration.schema_update_options - - @property - def input_file_bytes(self): - """Count of bytes loaded from source files. - - Returns: - Optional[int]: the count (None until set from the server). - - Raises: - ValueError: for invalid value types. - """ - return _helpers._int_or_none( - _helpers._get_sub_prop( - self._properties, ["statistics", "load", "inputFileBytes"] - ) - ) - - @property - def input_files(self): - """Count of source files. - - Returns: - Optional[int]: the count (None until set from the server). - """ - return _helpers._int_or_none( - _helpers._get_sub_prop( - self._properties, ["statistics", "load", "inputFiles"] - ) - ) - - @property - def output_bytes(self): - """Count of bytes saved to destination table. - - Returns: - Optional[int]: the count (None until set from the server). - """ - return _helpers._int_or_none( - _helpers._get_sub_prop( - self._properties, ["statistics", "load", "outputBytes"] - ) - ) - - @property - def output_rows(self): - """Count of rows saved to destination table. - - Returns: - Optional[int]: the count (None until set from the server). - """ - return _helpers._int_or_none( - _helpers._get_sub_prop( - self._properties, ["statistics", "load", "outputRows"] - ) - ) - - def to_api_repr(self): - """Generate a resource for :meth:`_begin`.""" - configuration = self._configuration.to_api_repr() - if self.source_uris is not None: - _helpers._set_sub_prop( - configuration, ["load", "sourceUris"], self.source_uris - ) - _helpers._set_sub_prop( - configuration, ["load", "destinationTable"], self.destination.to_api_repr() - ) - - return { - "jobReference": self._properties["jobReference"], - "configuration": configuration, - } - - def _copy_configuration_properties(self, configuration): - """Helper: assign subclass configuration properties in cleaned.""" - self._configuration._properties = copy.deepcopy(configuration) - - @classmethod - def from_api_repr(cls, resource, client): - """Factory: construct a job given its API representation - - .. note: - - This method assumes that the project found in the resource matches - the client's project. - - Args: - resource (Dict): dataset job representation returned from the API - - client (google.cloud.bigquery.client.Client): - Client which holds credentials and project - configuration for the dataset. - - Returns: - google.cloud.bigquery.job.LoadJob: Job parsed from ``resource``. - """ - config_resource = resource.get("configuration", {}) - config = LoadJobConfig.from_api_repr(config_resource) - # A load job requires a destination table. - dest_config = config_resource["load"]["destinationTable"] - ds_ref = DatasetReference(dest_config["projectId"], dest_config["datasetId"]) - destination = TableReference(ds_ref, dest_config["tableId"]) - # sourceUris will be absent if this is a file upload. - source_uris = _helpers._get_sub_prop(config_resource, ["load", "sourceUris"]) - job_ref = _JobReference._from_api_repr(resource["jobReference"]) - job = cls(job_ref, source_uris, destination, client, config) - job._set_properties(resource) - return job - - -class CopyJobConfig(_JobConfig): - """Configuration options for copy jobs. - - All properties in this class are optional. Values which are :data:`None` -> - server defaults. Set properties on the constructed configuration by using - the property name as the name of a keyword argument. - """ - - def __init__(self, **kwargs): - super(CopyJobConfig, self).__init__("copy", **kwargs) - - @property - def create_disposition(self): - """google.cloud.bigquery.job.CreateDisposition: Specifies behavior - for creating tables. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy.FIELDS.create_disposition - """ - return self._get_sub_prop("createDisposition") - - @create_disposition.setter - def create_disposition(self, value): - self._set_sub_prop("createDisposition", value) - - @property - def write_disposition(self): - """google.cloud.bigquery.job.WriteDisposition: Action that occurs if - the destination table already exists. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy.FIELDS.write_disposition - """ - return self._get_sub_prop("writeDisposition") - - @write_disposition.setter - def write_disposition(self, value): - self._set_sub_prop("writeDisposition", value) - - @property - def destination_encryption_configuration(self): - """google.cloud.bigquery.encryption_configuration.EncryptionConfiguration: Custom - encryption configuration for the destination table. - - Custom encryption configuration (e.g., Cloud KMS keys) or :data:`None` - if using default encryption. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy.FIELDS.destination_encryption_configuration - """ - prop = self._get_sub_prop("destinationEncryptionConfiguration") - if prop is not None: - prop = EncryptionConfiguration.from_api_repr(prop) - return prop - - @destination_encryption_configuration.setter - def destination_encryption_configuration(self, value): - api_repr = value - if value is not None: - api_repr = value.to_api_repr() - self._set_sub_prop("destinationEncryptionConfiguration", api_repr) - - -class CopyJob(_AsyncJob): - """Asynchronous job: copy data into a table from other tables. - - Args: - job_id (str): the job's ID, within the project belonging to ``client``. - - sources (List[google.cloud.bigquery.table.TableReference]): Table from which data is to be loaded. - - destination (google.cloud.bigquery.table.TableReference): Table into which data is to be loaded. - - client (google.cloud.bigquery.client.Client): - A client which holds credentials and project configuration - for the dataset (which requires a project). - - job_config (google.cloud.bigquery.job.CopyJobConfig): - (Optional) Extra configuration options for the copy job. - """ - - _JOB_TYPE = "copy" - - def __init__(self, job_id, sources, destination, client, job_config=None): - super(CopyJob, self).__init__(job_id, client) - - if job_config is None: - job_config = CopyJobConfig() - - self.destination = destination - self.sources = sources - self._configuration = job_config - - @property - def create_disposition(self): - """See - :attr:`google.cloud.bigquery.job.CopyJobConfig.create_disposition`. - """ - return self._configuration.create_disposition - - @property - def write_disposition(self): - """See - :attr:`google.cloud.bigquery.job.CopyJobConfig.write_disposition`. - """ - return self._configuration.write_disposition - - @property - def destination_encryption_configuration(self): - """google.cloud.bigquery.encryption_configuration.EncryptionConfiguration: Custom - encryption configuration for the destination table. - - Custom encryption configuration (e.g., Cloud KMS keys) or :data:`None` - if using default encryption. - - See - :attr:`google.cloud.bigquery.job.CopyJobConfig.destination_encryption_configuration`. - """ - return self._configuration.destination_encryption_configuration - - def to_api_repr(self): - """Generate a resource for :meth:`_begin`.""" - - source_refs = [ - { - "projectId": table.project, - "datasetId": table.dataset_id, - "tableId": table.table_id, - } - for table in self.sources - ] - - configuration = self._configuration.to_api_repr() - _helpers._set_sub_prop(configuration, ["copy", "sourceTables"], source_refs) - _helpers._set_sub_prop( - configuration, - ["copy", "destinationTable"], - { - "projectId": self.destination.project, - "datasetId": self.destination.dataset_id, - "tableId": self.destination.table_id, - }, - ) - - return { - "jobReference": self._properties["jobReference"], - "configuration": configuration, - } - - def _copy_configuration_properties(self, configuration): - """Helper: assign subclass configuration properties in cleaned.""" - self._configuration._properties = copy.deepcopy(configuration) - - @classmethod - def from_api_repr(cls, resource, client): - """Factory: construct a job given its API representation - - .. note: - - This method assumes that the project found in the resource matches - the client's project. - - Args: - resource (Dict): dataset job representation returned from the API - - client (google.cloud.bigquery.client.Client): - Client which holds credentials and project - configuration for the dataset. - - Returns: - google.cloud.bigquery.job.CopyJob: Job parsed from ``resource``. - """ - job_id, config_resource = cls._get_resource_config(resource) - config = CopyJobConfig.from_api_repr(config_resource) - # Copy required fields to the job. - copy_resource = config_resource["copy"] - destination = TableReference.from_api_repr(copy_resource["destinationTable"]) - sources = [] - source_configs = copy_resource.get("sourceTables") - if source_configs is None: - single = copy_resource.get("sourceTable") - if single is None: - raise KeyError("Resource missing 'sourceTables' / 'sourceTable'") - source_configs = [single] - for source_config in source_configs: - table_ref = TableReference.from_api_repr(source_config) - sources.append(table_ref) - job = cls(job_id, sources, destination, client=client, job_config=config) - job._set_properties(resource) - return job - - -class ExtractJobConfig(_JobConfig): - """Configuration options for extract jobs. - - All properties in this class are optional. Values which are :data:`None` -> - server defaults. Set properties on the constructed configuration by using - the property name as the name of a keyword argument. - """ - - def __init__(self, **kwargs): - super(ExtractJobConfig, self).__init__("extract", **kwargs) - - @property - def compression(self): - """google.cloud.bigquery.job.Compression: Compression type to use for - exported files. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationExtract.FIELDS.compression - """ - return self._get_sub_prop("compression") - - @compression.setter - def compression(self, value): - self._set_sub_prop("compression", value) - - @property - def destination_format(self): - """google.cloud.bigquery.job.DestinationFormat: Exported file format. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationExtract.FIELDS.destination_format - """ - return self._get_sub_prop("destinationFormat") - - @destination_format.setter - def destination_format(self, value): - self._set_sub_prop("destinationFormat", value) - - @property - def field_delimiter(self): - """str: Delimiter to use between fields in the exported data. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationExtract.FIELDS.field_delimiter - """ - return self._get_sub_prop("fieldDelimiter") - - @field_delimiter.setter - def field_delimiter(self, value): - self._set_sub_prop("fieldDelimiter", value) - - @property - def print_header(self): - """bool: Print a header row in the exported data. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationExtract.FIELDS.print_header - """ - return self._get_sub_prop("printHeader") - - @print_header.setter - def print_header(self, value): - self._set_sub_prop("printHeader", value) - - @property - def use_avro_logical_types(self): - """bool: For loads of Avro data, governs whether Avro logical types are - converted to their corresponding BigQuery types (e.g. TIMESTAMP) rather than - raw types (e.g. INTEGER). - """ - return self._get_sub_prop("useAvroLogicalTypes") - - @use_avro_logical_types.setter - def use_avro_logical_types(self, value): - self._set_sub_prop("useAvroLogicalTypes", bool(value)) - - -class ExtractJob(_AsyncJob): - """Asynchronous job: extract data from a table into Cloud Storage. - - Args: - job_id (str): the job's ID. - - source (google.cloud.bigquery.table.TableReference): - Table into which data is to be loaded. - - destination_uris (List[str]): - URIs describing where the extracted data will be written in Cloud - Storage, using the format ``gs:///``. - - client (google.cloud.bigquery.client.Client): - A client which holds credentials and project configuration. - - job_config (google.cloud.bigquery.job.ExtractJobConfig): - (Optional) Extra configuration options for the extract job. - """ - - _JOB_TYPE = "extract" - - def __init__(self, job_id, source, destination_uris, client, job_config=None): - super(ExtractJob, self).__init__(job_id, client) - - if job_config is None: - job_config = ExtractJobConfig() - - self.source = source - self.destination_uris = destination_uris - self._configuration = job_config - - @property - def compression(self): - """See - :attr:`google.cloud.bigquery.job.ExtractJobConfig.compression`. - """ - return self._configuration.compression - - @property - def destination_format(self): - """See - :attr:`google.cloud.bigquery.job.ExtractJobConfig.destination_format`. - """ - return self._configuration.destination_format - - @property - def field_delimiter(self): - """See - :attr:`google.cloud.bigquery.job.ExtractJobConfig.field_delimiter`. - """ - return self._configuration.field_delimiter - - @property - def print_header(self): - """See - :attr:`google.cloud.bigquery.job.ExtractJobConfig.print_header`. - """ - return self._configuration.print_header - - @property - def destination_uri_file_counts(self): - """Return file counts from job statistics, if present. - - See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics4.FIELDS.destination_uri_file_counts - - Returns: - List[int]: - A list of integer counts, each representing the number of files - per destination URI or URI pattern specified in the extract - configuration. These values will be in the same order as the URIs - specified in the 'destinationUris' field. Returns None if job is - not yet complete. - """ - counts = self._job_statistics().get("destinationUriFileCounts") - if counts is not None: - return [int(count) for count in counts] - return None - - def to_api_repr(self): - """Generate a resource for :meth:`_begin`.""" - - source_ref = { - "projectId": self.source.project, - "datasetId": self.source.dataset_id, - "tableId": self.source.table_id, - } - - configuration = self._configuration.to_api_repr() - _helpers._set_sub_prop(configuration, ["extract", "sourceTable"], source_ref) - _helpers._set_sub_prop( - configuration, ["extract", "destinationUris"], self.destination_uris - ) - - return { - "jobReference": self._properties["jobReference"], - "configuration": configuration, - } - - def _copy_configuration_properties(self, configuration): - """Helper: assign subclass configuration properties in cleaned.""" - self._configuration._properties = copy.deepcopy(configuration) - - @classmethod - def from_api_repr(cls, resource, client): - """Factory: construct a job given its API representation - - .. note: - - This method assumes that the project found in the resource matches - the client's project. - - Args: - resource (Dict): dataset job representation returned from the API - - client (google.cloud.bigquery.client.Client): - Client which holds credentials and project - configuration for the dataset. - - Returns: - google.cloud.bigquery.job.ExtractJob: Job parsed from ``resource``. - """ - job_id, config_resource = cls._get_resource_config(resource) - config = ExtractJobConfig.from_api_repr(config_resource) - source_config = _helpers._get_sub_prop( - config_resource, ["extract", "sourceTable"] - ) - dataset = DatasetReference( - source_config["projectId"], source_config["datasetId"] - ) - source = dataset.table(source_config["tableId"]) - destination_uris = _helpers._get_sub_prop( - config_resource, ["extract", "destinationUris"] - ) - - job = cls(job_id, source, destination_uris, client=client, job_config=config) - job._set_properties(resource) - return job - - -def _from_api_repr_query_parameters(resource): - return [_query_param_from_api_repr(mapping) for mapping in resource] - - -def _to_api_repr_query_parameters(value): - return [query_parameter.to_api_repr() for query_parameter in value] - - -def _from_api_repr_udf_resources(resource): - udf_resources = [] - for udf_mapping in resource: - for udf_type, udf_value in udf_mapping.items(): - udf_resources.append(UDFResource(udf_type, udf_value)) - return udf_resources - - -def _to_api_repr_udf_resources(value): - return [{udf_resource.udf_type: udf_resource.value} for udf_resource in value] - - -def _from_api_repr_table_defs(resource): - return {k: ExternalConfig.from_api_repr(v) for k, v in resource.items()} - - -def _to_api_repr_table_defs(value): - return {k: ExternalConfig.to_api_repr(v) for k, v in value.items()} - - -class QueryJobConfig(_JobConfig): - """Configuration options for query jobs. - - All properties in this class are optional. Values which are :data:`None` -> - server defaults. Set properties on the constructed configuration by using - the property name as the name of a keyword argument. - """ - - def __init__(self, **kwargs): - super(QueryJobConfig, self).__init__("query", **kwargs) - - @property - def destination_encryption_configuration(self): - """google.cloud.bigquery.encryption_configuration.EncryptionConfiguration: Custom - encryption configuration for the destination table. - - Custom encryption configuration (e.g., Cloud KMS keys) or :data:`None` - if using default encryption. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.destination_encryption_configuration - """ - prop = self._get_sub_prop("destinationEncryptionConfiguration") - if prop is not None: - prop = EncryptionConfiguration.from_api_repr(prop) - return prop - - @destination_encryption_configuration.setter - def destination_encryption_configuration(self, value): - api_repr = value - if value is not None: - api_repr = value.to_api_repr() - self._set_sub_prop("destinationEncryptionConfiguration", api_repr) - - @property - def allow_large_results(self): - """bool: Allow large query results tables (legacy SQL, only) - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.allow_large_results - """ - return self._get_sub_prop("allowLargeResults") - - @allow_large_results.setter - def allow_large_results(self, value): - self._set_sub_prop("allowLargeResults", value) - - @property - def create_disposition(self): - """google.cloud.bigquery.job.CreateDisposition: Specifies behavior - for creating tables. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.create_disposition - """ - return self._get_sub_prop("createDisposition") - - @create_disposition.setter - def create_disposition(self, value): - self._set_sub_prop("createDisposition", value) - - @property - def default_dataset(self): - """google.cloud.bigquery.dataset.DatasetReference: the default dataset - to use for unqualified table names in the query or :data:`None` if not - set. - - The ``default_dataset`` setter accepts: - - - a :class:`~google.cloud.bigquery.dataset.Dataset`, or - - a :class:`~google.cloud.bigquery.dataset.DatasetReference`, or - - a :class:`str` of the fully-qualified dataset ID in standard SQL - format. The value must included a project ID and dataset ID - separated by ``.``. For example: ``your-project.your_dataset``. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.default_dataset - """ - prop = self._get_sub_prop("defaultDataset") - if prop is not None: - prop = DatasetReference.from_api_repr(prop) - return prop - - @default_dataset.setter - def default_dataset(self, value): - if value is None: - self._set_sub_prop("defaultDataset", None) - return - - if isinstance(value, six.string_types): - value = DatasetReference.from_string(value) - - if isinstance(value, (Dataset, DatasetListItem)): - value = value.reference - - resource = value.to_api_repr() - self._set_sub_prop("defaultDataset", resource) - - @property - def destination(self): - """google.cloud.bigquery.table.TableReference: table where results are - written or :data:`None` if not set. - - The ``destination`` setter accepts: - - - a :class:`~google.cloud.bigquery.table.Table`, or - - a :class:`~google.cloud.bigquery.table.TableReference`, or - - a :class:`str` of the fully-qualified table ID in standard SQL - format. The value must included a project ID, dataset ID, and table - ID, each separated by ``.``. For example: - ``your-project.your_dataset.your_table``. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.destination_table - """ - prop = self._get_sub_prop("destinationTable") - if prop is not None: - prop = TableReference.from_api_repr(prop) - return prop - - @destination.setter - def destination(self, value): - if value is None: - self._set_sub_prop("destinationTable", None) - return - - value = _table_arg_to_table_ref(value) - resource = value.to_api_repr() - self._set_sub_prop("destinationTable", resource) - - @property - def dry_run(self): - """bool: :data:`True` if this query should be a dry run to estimate - costs. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfiguration.FIELDS.dry_run - """ - return self._properties.get("dryRun") - - @dry_run.setter - def dry_run(self, value): - self._properties["dryRun"] = value - - @property - def flatten_results(self): - """bool: Flatten nested/repeated fields in results. (Legacy SQL only) - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.flatten_results - """ - return self._get_sub_prop("flattenResults") - - @flatten_results.setter - def flatten_results(self, value): - self._set_sub_prop("flattenResults", value) - - @property - def maximum_billing_tier(self): - """int: Deprecated. Changes the billing tier to allow high-compute - queries. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.maximum_billing_tier - """ - return self._get_sub_prop("maximumBillingTier") - - @maximum_billing_tier.setter - def maximum_billing_tier(self, value): - self._set_sub_prop("maximumBillingTier", value) - - @property - def maximum_bytes_billed(self): - """int: Maximum bytes to be billed for this job or :data:`None` if not set. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.maximum_bytes_billed - """ - return _helpers._int_or_none(self._get_sub_prop("maximumBytesBilled")) - - @maximum_bytes_billed.setter - def maximum_bytes_billed(self, value): - self._set_sub_prop("maximumBytesBilled", str(value)) - - @property - def priority(self): - """google.cloud.bigquery.job.QueryPriority: Priority of the query. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.priority - """ - return self._get_sub_prop("priority") - - @priority.setter - def priority(self, value): - self._set_sub_prop("priority", value) - - @property - def query_parameters(self): - """List[Union[google.cloud.bigquery.query.ArrayQueryParameter, \ - google.cloud.bigquery.query.ScalarQueryParameter, \ - google.cloud.bigquery.query.StructQueryParameter]]: list of parameters - for parameterized query (empty by default) - - See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.query_parameters - """ - prop = self._get_sub_prop("queryParameters", default=[]) - return _from_api_repr_query_parameters(prop) - - @query_parameters.setter - def query_parameters(self, values): - self._set_sub_prop("queryParameters", _to_api_repr_query_parameters(values)) - - @property - def range_partitioning(self): - """Optional[google.cloud.bigquery.table.RangePartitioning]: - Configures range-based partitioning for destination table. - - .. note:: - **Beta**. The integer range partitioning feature is in a - pre-release state and might change or have limited support. - - Only specify at most one of - :attr:`~google.cloud.bigquery.job.LoadJobConfig.time_partitioning` or - :attr:`~google.cloud.bigquery.job.LoadJobConfig.range_partitioning`. - - Raises: - ValueError: - If the value is not - :class:`~google.cloud.bigquery.table.RangePartitioning` or - :data:`None`. - """ - resource = self._get_sub_prop("rangePartitioning") - if resource is not None: - return RangePartitioning(_properties=resource) - - @range_partitioning.setter - def range_partitioning(self, value): - resource = value - if isinstance(value, RangePartitioning): - resource = value._properties - elif value is not None: - raise ValueError( - "Expected value to be RangePartitioning or None, got {}.".format(value) - ) - self._set_sub_prop("rangePartitioning", resource) - - @property - def udf_resources(self): - """List[google.cloud.bigquery.query.UDFResource]: user - defined function resources (empty by default) - - See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.user_defined_function_resources - """ - prop = self._get_sub_prop("userDefinedFunctionResources", default=[]) - return _from_api_repr_udf_resources(prop) - - @udf_resources.setter - def udf_resources(self, values): - self._set_sub_prop( - "userDefinedFunctionResources", _to_api_repr_udf_resources(values) - ) - - @property - def use_legacy_sql(self): - """bool: Use legacy SQL syntax. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.use_legacy_sql - """ - return self._get_sub_prop("useLegacySql") - - @use_legacy_sql.setter - def use_legacy_sql(self, value): - self._set_sub_prop("useLegacySql", value) - - @property - def use_query_cache(self): - """bool: Look for the query result in the cache. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.use_query_cache - """ - return self._get_sub_prop("useQueryCache") - - @use_query_cache.setter - def use_query_cache(self, value): - self._set_sub_prop("useQueryCache", value) - - @property - def write_disposition(self): - """google.cloud.bigquery.job.WriteDisposition: Action that occurs if - the destination table already exists. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.write_disposition - """ - return self._get_sub_prop("writeDisposition") - - @write_disposition.setter - def write_disposition(self, value): - self._set_sub_prop("writeDisposition", value) - - @property - def table_definitions(self): - """Dict[str, google.cloud.bigquery.external_config.ExternalConfig]: - Definitions for external tables or :data:`None` if not set. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.external_table_definitions - """ - prop = self._get_sub_prop("tableDefinitions") - if prop is not None: - prop = _from_api_repr_table_defs(prop) - return prop - - @table_definitions.setter - def table_definitions(self, values): - self._set_sub_prop("tableDefinitions", _to_api_repr_table_defs(values)) - - @property - def time_partitioning(self): - """Optional[google.cloud.bigquery.table.TimePartitioning]: Specifies - time-based partitioning for the destination table. - - Only specify at most one of - :attr:`~google.cloud.bigquery.job.LoadJobConfig.time_partitioning` or - :attr:`~google.cloud.bigquery.job.LoadJobConfig.range_partitioning`. - - Raises: - ValueError: - If the value is not - :class:`~google.cloud.bigquery.table.TimePartitioning` or - :data:`None`. - """ - prop = self._get_sub_prop("timePartitioning") - if prop is not None: - prop = TimePartitioning.from_api_repr(prop) - return prop - - @time_partitioning.setter - def time_partitioning(self, value): - api_repr = value - if value is not None: - api_repr = value.to_api_repr() - self._set_sub_prop("timePartitioning", api_repr) - - @property - def clustering_fields(self): - """Optional[List[str]]: Fields defining clustering for the table - - (Defaults to :data:`None`). - - Clustering fields are immutable after table creation. - - .. note:: - - As of 2018-06-29, clustering fields cannot be set on a table - which does not also have time partioning defined. - """ - prop = self._get_sub_prop("clustering") - if prop is not None: - return list(prop.get("fields", ())) - - @clustering_fields.setter - def clustering_fields(self, value): - """Optional[List[str]]: Fields defining clustering for the table - - (Defaults to :data:`None`). - """ - if value is not None: - self._set_sub_prop("clustering", {"fields": value}) - else: - self._del_sub_prop("clustering") - - @property - def schema_update_options(self): - """List[google.cloud.bigquery.job.SchemaUpdateOption]: Specifies - updates to the destination table schema to allow as a side effect of - the query job. - """ - return self._get_sub_prop("schemaUpdateOptions") - - @schema_update_options.setter - def schema_update_options(self, values): - self._set_sub_prop("schemaUpdateOptions", values) - - def to_api_repr(self): - """Build an API representation of the query job config. - - Returns: - Dict: A dictionary in the format used by the BigQuery API. - """ - resource = copy.deepcopy(self._properties) - - # Query parameters have an addition property associated with them - # to indicate if the query is using named or positional parameters. - query_parameters = resource["query"].get("queryParameters") - if query_parameters: - if query_parameters[0].get("name") is None: - resource["query"]["parameterMode"] = "POSITIONAL" - else: - resource["query"]["parameterMode"] = "NAMED" - - return resource - - -class QueryJob(_AsyncJob): - """Asynchronous job: query tables. - - Args: - job_id (str): the job's ID, within the project belonging to ``client``. - - query (str): SQL query string. - - client (google.cloud.bigquery.client.Client): - A client which holds credentials and project configuration - for the dataset (which requires a project). - - job_config (google.cloud.bigquery.job.QueryJobConfig): - (Optional) Extra configuration options for the query job. - """ - - _JOB_TYPE = "query" - _UDF_KEY = "userDefinedFunctionResources" - - def __init__(self, job_id, query, client, job_config=None): - super(QueryJob, self).__init__(job_id, client) - - if job_config is None: - job_config = QueryJobConfig() - if job_config.use_legacy_sql is None: - job_config.use_legacy_sql = False - - _helpers._set_sub_prop( - self._properties, ["configuration", "query", "query"], query - ) - - self._configuration = job_config - self._query_results = None - self._done_timeout = None - - @property - def allow_large_results(self): - """See - :attr:`google.cloud.bigquery.job.QueryJobConfig.allow_large_results`. - """ - return self._configuration.allow_large_results - - @property - def create_disposition(self): - """See - :attr:`google.cloud.bigquery.job.QueryJobConfig.create_disposition`. - """ - return self._configuration.create_disposition - - @property - def default_dataset(self): - """See - :attr:`google.cloud.bigquery.job.QueryJobConfig.default_dataset`. - """ - return self._configuration.default_dataset - - @property - def destination(self): - """See - :attr:`google.cloud.bigquery.job.QueryJobConfig.destination`. - """ - return self._configuration.destination - - @property - def destination_encryption_configuration(self): - """google.cloud.bigquery.encryption_configuration.EncryptionConfiguration: Custom - encryption configuration for the destination table. - - Custom encryption configuration (e.g., Cloud KMS keys) or :data:`None` - if using default encryption. - - See - :attr:`google.cloud.bigquery.job.QueryJobConfig.destination_encryption_configuration`. - """ - return self._configuration.destination_encryption_configuration - - @property - def dry_run(self): - """See - :attr:`google.cloud.bigquery.job.QueryJobConfig.dry_run`. - """ - return self._configuration.dry_run - - @property - def flatten_results(self): - """See - :attr:`google.cloud.bigquery.job.QueryJobConfig.flatten_results`. - """ - return self._configuration.flatten_results - - @property - def priority(self): - """See - :attr:`google.cloud.bigquery.job.QueryJobConfig.priority`. - """ - return self._configuration.priority - - @property - def query(self): - """str: The query text used in this query job. - - See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.query - """ - return _helpers._get_sub_prop( - self._properties, ["configuration", "query", "query"] - ) - - @property - def query_parameters(self): - """See - :attr:`google.cloud.bigquery.job.QueryJobConfig.query_parameters`. - """ - return self._configuration.query_parameters - - @property - def udf_resources(self): - """See - :attr:`google.cloud.bigquery.job.QueryJobConfig.udf_resources`. - """ - return self._configuration.udf_resources - - @property - def use_legacy_sql(self): - """See - :attr:`google.cloud.bigquery.job.QueryJobConfig.use_legacy_sql`. - """ - return self._configuration.use_legacy_sql - - @property - def use_query_cache(self): - """See - :attr:`google.cloud.bigquery.job.QueryJobConfig.use_query_cache`. - """ - return self._configuration.use_query_cache - - @property - def write_disposition(self): - """See - :attr:`google.cloud.bigquery.job.QueryJobConfig.write_disposition`. - """ - return self._configuration.write_disposition - - @property - def maximum_billing_tier(self): - """See - :attr:`google.cloud.bigquery.job.QueryJobConfig.maximum_billing_tier`. - """ - return self._configuration.maximum_billing_tier - - @property - def maximum_bytes_billed(self): - """See - :attr:`google.cloud.bigquery.job.QueryJobConfig.maximum_bytes_billed`. - """ - return self._configuration.maximum_bytes_billed - - @property - def range_partitioning(self): - """See - :attr:`google.cloud.bigquery.job.QueryJobConfig.range_partitioning`. - """ - return self._configuration.range_partitioning - - @property - def table_definitions(self): - """See - :attr:`google.cloud.bigquery.job.QueryJobConfig.table_definitions`. - """ - return self._configuration.table_definitions - - @property - def time_partitioning(self): - """See - :attr:`google.cloud.bigquery.job.QueryJobConfig.time_partitioning`. - """ - return self._configuration.time_partitioning - - @property - def clustering_fields(self): - """See - :attr:`google.cloud.bigquery.job.QueryJobConfig.clustering_fields`. - """ - return self._configuration.clustering_fields - - @property - def schema_update_options(self): - """See - :attr:`google.cloud.bigquery.job.QueryJobConfig.schema_update_options`. - """ - return self._configuration.schema_update_options - - def to_api_repr(self): - """Generate a resource for :meth:`_begin`.""" - configuration = self._configuration.to_api_repr() - - resource = { - "jobReference": self._properties["jobReference"], - "configuration": configuration, - } - configuration["query"]["query"] = self.query - - return resource - - def _copy_configuration_properties(self, configuration): - """Helper: assign subclass configuration properties in cleaned.""" - self._configuration._properties = copy.deepcopy(configuration) - - @classmethod - def from_api_repr(cls, resource, client): - """Factory: construct a job given its API representation - - Args: - resource (Dict): dataset job representation returned from the API - - client (google.cloud.bigquery.client.Client): - Client which holds credentials and project - configuration for the dataset. - - Returns: - google.cloud.bigquery.job.QueryJob: Job parsed from ``resource``. - """ - job_id, config = cls._get_resource_config(resource) - query = _helpers._get_sub_prop(config, ["query", "query"]) - job = cls(job_id, query, client=client) - job._set_properties(resource) - return job - - @property - def query_plan(self): - """Return query plan from job statistics, if present. - - See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.query_plan - - Returns: - List[QueryPlanEntry]: - mappings describing the query plan, or an empty list - if the query has not yet completed. - """ - plan_entries = self._job_statistics().get("queryPlan", ()) - return [QueryPlanEntry.from_api_repr(entry) for entry in plan_entries] - - @property - def timeline(self): - """List(TimelineEntry): Return the query execution timeline - from job statistics. - """ - raw = self._job_statistics().get("timeline", ()) - return [TimelineEntry.from_api_repr(entry) for entry in raw] - - @property - def total_bytes_processed(self): - """Return total bytes processed from job statistics, if present. - - See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.total_bytes_processed - - Returns: - Optional[int]: - Total bytes processed by the job, or None if job is not - yet complete. - """ - result = self._job_statistics().get("totalBytesProcessed") - if result is not None: - result = int(result) - return result - - @property - def total_bytes_billed(self): - """Return total bytes billed from job statistics, if present. - - See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.total_bytes_billed - - Returns: - Optional[int]: - Total bytes processed by the job, or None if job is not - yet complete. - """ - result = self._job_statistics().get("totalBytesBilled") - if result is not None: - result = int(result) - return result - - @property - def billing_tier(self): - """Return billing tier from job statistics, if present. - - See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.billing_tier - - Returns: - Optional[int]: - Billing tier used by the job, or None if job is not - yet complete. - """ - return self._job_statistics().get("billingTier") - - @property - def cache_hit(self): - """Return whether or not query results were served from cache. - - See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.cache_hit - - Returns: - Optional[bool]: - whether the query results were returned from cache, or None - if job is not yet complete. - """ - return self._job_statistics().get("cacheHit") - - @property - def ddl_operation_performed(self): - """Optional[str]: Return the DDL operation performed. - - See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.ddl_operation_performed - - """ - return self._job_statistics().get("ddlOperationPerformed") - - @property - def ddl_target_routine(self): - """Optional[google.cloud.bigquery.routine.RoutineReference]: Return the DDL target routine, present - for CREATE/DROP FUNCTION/PROCEDURE queries. - - See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.ddl_target_routine - """ - prop = self._job_statistics().get("ddlTargetRoutine") - if prop is not None: - prop = RoutineReference.from_api_repr(prop) - return prop - - @property - def ddl_target_table(self): - """Optional[google.cloud.bigquery.table.TableReference]: Return the DDL target table, present - for CREATE/DROP TABLE/VIEW queries. - - See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.ddl_target_table - """ - prop = self._job_statistics().get("ddlTargetTable") - if prop is not None: - prop = TableReference.from_api_repr(prop) - return prop - - @property - def num_dml_affected_rows(self): - """Return the number of DML rows affected by the job. - - See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.num_dml_affected_rows - - Returns: - Optional[int]: - number of DML rows affected by the job, or None if job is not - yet complete. - """ - result = self._job_statistics().get("numDmlAffectedRows") - if result is not None: - result = int(result) - return result - - @property - def slot_millis(self): - """Union[int, None]: Slot-milliseconds used by this query job.""" - return _helpers._int_or_none(self._job_statistics().get("totalSlotMs")) - - @property - def statement_type(self): - """Return statement type from job statistics, if present. - - See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.statement_type - - Returns: - Optional[str]: - type of statement used by the job, or None if job is not - yet complete. - """ - return self._job_statistics().get("statementType") - - @property - def referenced_tables(self): - """Return referenced tables from job statistics, if present. - - See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.referenced_tables - - Returns: - List[Dict]: - mappings describing the query plan, or an empty list - if the query has not yet completed. - """ - tables = [] - datasets_by_project_name = {} - - for table in self._job_statistics().get("referencedTables", ()): - - t_project = table["projectId"] - - ds_id = table["datasetId"] - t_dataset = datasets_by_project_name.get((t_project, ds_id)) - if t_dataset is None: - t_dataset = DatasetReference(t_project, ds_id) - datasets_by_project_name[(t_project, ds_id)] = t_dataset - - t_name = table["tableId"] - tables.append(t_dataset.table(t_name)) - - return tables - - @property - def undeclared_query_parameters(self): - """Return undeclared query parameters from job statistics, if present. - - See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.undeclared_query_parameters - - Returns: - List[Union[ \ - google.cloud.bigquery.query.ArrayQueryParameter, \ - google.cloud.bigquery.query.ScalarQueryParameter, \ - google.cloud.bigquery.query.StructQueryParameter \ - ]]: - Undeclared parameters, or an empty list if the query has - not yet completed. - """ - parameters = [] - undeclared = self._job_statistics().get("undeclaredQueryParameters", ()) - - for parameter in undeclared: - p_type = parameter["parameterType"] - - if "arrayType" in p_type: - klass = ArrayQueryParameter - elif "structTypes" in p_type: - klass = StructQueryParameter - else: - klass = ScalarQueryParameter - - parameters.append(klass.from_api_repr(parameter)) - - return parameters - - @property - def estimated_bytes_processed(self): - """Return the estimated number of bytes processed by the query. - - See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.estimated_bytes_processed - - Returns: - Optional[int]: - number of DML rows affected by the job, or None if job is not - yet complete. - """ - result = self._job_statistics().get("estimatedBytesProcessed") - if result is not None: - result = int(result) - return result - - def done(self, retry=DEFAULT_RETRY, timeout=None): - """Refresh the job and checks if it is complete. - - Args: - retry (Optional[google.api_core.retry.Retry]): - How to retry the call that retrieves query results. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - - Returns: - bool: True if the job is complete, False otherwise. - """ - # Since the API to getQueryResults can hang up to the timeout value - # (default of 10 seconds), set the timeout parameter to ensure that - # the timeout from the futures API is respected. See: - # https://github.com/GoogleCloudPlatform/google-cloud-python/issues/4135 - timeout_ms = None - if self._done_timeout is not None: - # Subtract a buffer for context switching, network latency, etc. - api_timeout = self._done_timeout - _TIMEOUT_BUFFER_SECS - api_timeout = max(min(api_timeout, 10), 0) - self._done_timeout -= api_timeout - self._done_timeout = max(0, self._done_timeout) - timeout_ms = int(api_timeout * 1000) - - # If the server-side processing timeout (timeout_ms) is specified and - # would be picked as the total request timeout, we want to add a small - # margin to it - we don't want to timeout the connection just as the - # server-side processing might have completed, but instead slightly - # after the server-side deadline. - # However, if `timeout` is specified, and is shorter than the adjusted - # server timeout, the former prevails. - if timeout_ms is not None and timeout_ms > 0: - server_timeout_with_margin = timeout_ms / 1000 + _SERVER_TIMEOUT_MARGIN_SECS - if timeout is not None: - timeout = min(server_timeout_with_margin, timeout) - else: - timeout = server_timeout_with_margin - - # Do not refresh if the state is already done, as the job will not - # change once complete. - if self.state != _DONE_STATE: - self._query_results = self._client._get_query_results( - self.job_id, - retry, - project=self.project, - timeout_ms=timeout_ms, - location=self.location, - timeout=timeout, - ) - - # Only reload the job once we know the query is complete. - # This will ensure that fields such as the destination table are - # correctly populated. - if self._query_results.complete: - self.reload(retry=retry, timeout=timeout) - - return self.state == _DONE_STATE - - def _blocking_poll(self, timeout=None): - self._done_timeout = timeout - super(QueryJob, self)._blocking_poll(timeout=timeout) - - @staticmethod - def _format_for_exception(query, job_id): - """Format a query for the output in exception message. - - Args: - query (str): The SQL query to format. - job_id (str): The ID of the job that ran the query. - - Returns: - str: A formatted query text. - """ - template = "\n\n(job ID: {job_id})\n\n{header}\n\n{ruler}\n{body}\n{ruler}" - - lines = query.splitlines() - max_line_len = max(len(l) for l in lines) - - header = "-----Query Job SQL Follows-----" - header = "{:^{total_width}}".format(header, total_width=max_line_len + 5) - - # Print out a "ruler" above and below the SQL so we can judge columns. - # Left pad for the line numbers (4 digits plus ":"). - ruler = " |" + " . |" * (max_line_len // 10) - - # Put line numbers next to the SQL. - body = "\n".join( - "{:4}:{}".format(n, line) for n, line in enumerate(lines, start=1) - ) - - return template.format(job_id=job_id, header=header, ruler=ruler, body=body) - - def _begin(self, client=None, retry=DEFAULT_RETRY, timeout=None): - """API call: begin the job via a POST request - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/insert - - Args: - client (Optional[google.cloud.bigquery.client.Client]): - The client to use. If not passed, falls back to the ``client`` - associated with the job object or``NoneType``. - retry (Optional[google.api_core.retry.Retry]): - How to retry the RPC. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - - Raises: - ValueError: If the job has already begun. - """ - - try: - super(QueryJob, self)._begin(client=client, retry=retry, timeout=timeout) - except exceptions.GoogleCloudError as exc: - exc.message += self._format_for_exception(self.query, self.job_id) - exc.query_job = self - raise - - def result( - self, page_size=None, max_results=None, retry=DEFAULT_RETRY, timeout=None - ): - """Start the job and wait for it to complete and get the result. - - Args: - page_size (Optional[int]): - The maximum number of rows in each page of results from this - request. Non-positive values are ignored. - max_results (Optional[int]): - The maximum total number of rows from this request. - retry (Optional[google.api_core.retry.Retry]): - How to retry the call that retrieves rows. - timeout (Optional[float]): - The number of seconds to wait for the underlying HTTP transport - before using ``retry``. - If multiple requests are made under the hood, ``timeout`` is - interpreted as the approximate total time of **all** requests. - - Returns: - google.cloud.bigquery.table.RowIterator: - Iterator of row data - :class:`~google.cloud.bigquery.table.Row`-s. During each - page, the iterator will have the ``total_rows`` attribute - set, which counts the total number of rows **in the result - set** (this is distinct from the total number of rows in the - current page: ``iterator.page.num_items``). - - Raises: - google.cloud.exceptions.GoogleCloudError: - If the job failed. - concurrent.futures.TimeoutError: - If the job did not complete in the given timeout. - """ - try: - guard = TimeoutGuard( - timeout, timeout_error_type=concurrent.futures.TimeoutError - ) - with guard: - super(QueryJob, self).result(retry=retry, timeout=timeout) - timeout = guard.remaining_timeout - - # Return an iterator instead of returning the job. - if not self._query_results: - guard = TimeoutGuard( - timeout, timeout_error_type=concurrent.futures.TimeoutError - ) - with guard: - self._query_results = self._client._get_query_results( - self.job_id, - retry, - project=self.project, - location=self.location, - timeout=timeout, - ) - timeout = guard.remaining_timeout - except exceptions.GoogleCloudError as exc: - exc.message += self._format_for_exception(self.query, self.job_id) - exc.query_job = self - raise - except requests.exceptions.Timeout as exc: - six.raise_from(concurrent.futures.TimeoutError, exc) - - # If the query job is complete but there are no query results, this was - # special job, such as a DDL query. Return an empty result set to - # indicate success and avoid calling tabledata.list on a table which - # can't be read (such as a view table). - if self._query_results.total_rows is None: - return _EmptyRowIterator() - - schema = self._query_results.schema - dest_table_ref = self.destination - dest_table = Table(dest_table_ref, schema=schema) - dest_table._properties["numRows"] = self._query_results.total_rows - rows = self._client.list_rows( - dest_table, - page_size=page_size, - max_results=max_results, - retry=retry, - timeout=timeout, - ) - rows._preserve_order = _contains_order_by(self.query) - return rows - - # If changing the signature of this method, make sure to apply the same - # changes to table.RowIterator.to_arrow() - def to_arrow( - self, - progress_bar_type=None, - bqstorage_client=None, - create_bqstorage_client=False, - ): - """[Beta] Create a class:`pyarrow.Table` by loading all pages of a - table or query. - - Args: - progress_bar_type (Optional[str]): - If set, use the `tqdm `_ library to - display a progress bar while the data downloads. Install the - ``tqdm`` package to use this feature. - - Possible values of ``progress_bar_type`` include: - - ``None`` - No progress bar. - ``'tqdm'`` - Use the :func:`tqdm.tqdm` function to print a progress bar - to :data:`sys.stderr`. - ``'tqdm_notebook'`` - Use the :func:`tqdm.tqdm_notebook` function to display a - progress bar as a Jupyter notebook widget. - ``'tqdm_gui'`` - Use the :func:`tqdm.tqdm_gui` function to display a - progress bar as a graphical dialog box. - bqstorage_client (google.cloud.bigquery_storage_v1beta1.BigQueryStorageClient): - **Beta Feature** Optional. A BigQuery Storage API client. If - supplied, use the faster BigQuery Storage API to fetch rows - from BigQuery. This API is a billable API. - - This method requires the ``pyarrow`` and - ``google-cloud-bigquery-storage`` libraries. - - Reading from a specific partition or snapshot is not - currently supported by this method. - create_bqstorage_client (bool): - **Beta Feature** Optional. If ``True``, create a BigQuery - Storage API client using the default API settings. The - BigQuery Storage API is a faster way to fetch rows from - BigQuery. See the ``bqstorage_client`` parameter for more - information. - - This argument does nothing if ``bqstorage_client`` is supplied. - - ..versionadded:: 1.24.0 - - Returns: - pyarrow.Table - A :class:`pyarrow.Table` populated with row data and column - headers from the query results. The column headers are derived - from the destination table's schema. - - Raises: - ValueError: - If the :mod:`pyarrow` library cannot be imported. - - ..versionadded:: 1.17.0 - """ - return self.result().to_arrow( - progress_bar_type=progress_bar_type, - bqstorage_client=bqstorage_client, - create_bqstorage_client=create_bqstorage_client, - ) - - # If changing the signature of this method, make sure to apply the same - # changes to table.RowIterator.to_dataframe() - def to_dataframe( - self, - bqstorage_client=None, - dtypes=None, - progress_bar_type=None, - create_bqstorage_client=False, - ): - """Return a pandas DataFrame from a QueryJob - - Args: - bqstorage_client (google.cloud.bigquery_storage_v1beta1.BigQueryStorageClient): - **Alpha Feature** Optional. A BigQuery Storage API client. If - supplied, use the faster BigQuery Storage API to fetch rows - from BigQuery. This API is a billable API. - - This method requires the ``fastavro`` and - ``google-cloud-bigquery-storage`` libraries. - - Reading from a specific partition or snapshot is not - currently supported by this method. - - **Caution**: There is a known issue reading small anonymous - query result tables with the BQ Storage API. Write your query - results to a destination table to work around this issue. - dtypes (Map[str, Union[str, pandas.Series.dtype]]): - Optional. A dictionary of column names pandas ``dtype``s. The - provided ``dtype`` is used when constructing the series for - the column specified. Otherwise, the default pandas behavior - is used. - progress_bar_type (Optional[str]): - If set, use the `tqdm `_ library to - display a progress bar while the data downloads. Install the - ``tqdm`` package to use this feature. - - See - :func:`~google.cloud.bigquery.table.RowIterator.to_dataframe` - for details. - - ..versionadded:: 1.11.0 - create_bqstorage_client (bool): - **Beta Feature** Optional. If ``True``, create a BigQuery - Storage API client using the default API settings. The - BigQuery Storage API is a faster way to fetch rows from - BigQuery. See the ``bqstorage_client`` parameter for more - information. - - This argument does nothing if ``bqstorage_client`` is supplied. - - ..versionadded:: 1.24.0 - - Returns: - A :class:`~pandas.DataFrame` populated with row data and column - headers from the query results. The column headers are derived - from the destination table's schema. - - Raises: - ValueError: If the `pandas` library cannot be imported. - """ - return self.result().to_dataframe( - bqstorage_client=bqstorage_client, - dtypes=dtypes, - progress_bar_type=progress_bar_type, - create_bqstorage_client=create_bqstorage_client, - ) - - def __iter__(self): - return iter(self.result()) - - -class QueryPlanEntryStep(object): - """Map a single step in a query plan entry. - - Args: - kind (str): step type. - - substeps (List): names of substeps. - """ - - def __init__(self, kind, substeps): - self.kind = kind - self.substeps = list(substeps) - - @classmethod - def from_api_repr(cls, resource): - """Factory: construct instance from the JSON repr. - - Args: - resource (Dict): JSON representation of the entry. - - Returns: - QueryPlanEntryStep: new instance built from the resource. - """ - return cls(kind=resource.get("kind"), substeps=resource.get("substeps", ())) - - def __eq__(self, other): - if not isinstance(other, self.__class__): - return NotImplemented - return self.kind == other.kind and self.substeps == other.substeps - - -class QueryPlanEntry(object): - """QueryPlanEntry represents a single stage of a query execution plan. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#ExplainQueryStage - for the underlying API representation within query statistics. - """ - - def __init__(self): - self._properties = {} - - @classmethod - def from_api_repr(cls, resource): - """Factory: construct instance from the JSON repr. - - Args: - resource(Dict[str: object]): - ExplainQueryStage representation returned from API. - - Returns: - google.cloud.bigquery.QueryPlanEntry: - Query plan entry parsed from ``resource``. - """ - entry = cls() - entry._properties = resource - return entry - - @property - def name(self): - """Optional[str]: Human-readable name of the stage.""" - return self._properties.get("name") - - @property - def entry_id(self): - """Optional[str]: Unique ID for the stage within the plan.""" - return self._properties.get("id") - - @property - def start(self): - """Optional[Datetime]: Datetime when the stage started.""" - if self._properties.get("startMs") is None: - return None - return _helpers._datetime_from_microseconds( - int(self._properties.get("startMs")) * 1000.0 - ) - - @property - def end(self): - """Optional[Datetime]: Datetime when the stage ended.""" - if self._properties.get("endMs") is None: - return None - return _helpers._datetime_from_microseconds( - int(self._properties.get("endMs")) * 1000.0 - ) - - @property - def input_stages(self): - """List(int): Entry IDs for stages that were inputs for this stage.""" - if self._properties.get("inputStages") is None: - return [] - return [ - _helpers._int_or_none(entry) - for entry in self._properties.get("inputStages") - ] - - @property - def parallel_inputs(self): - """Optional[int]: Number of parallel input segments within - the stage. - """ - return _helpers._int_or_none(self._properties.get("parallelInputs")) - - @property - def completed_parallel_inputs(self): - """Optional[int]: Number of parallel input segments completed.""" - return _helpers._int_or_none(self._properties.get("completedParallelInputs")) - - @property - def wait_ms_avg(self): - """Optional[int]: Milliseconds the average worker spent waiting to - be scheduled. - """ - return _helpers._int_or_none(self._properties.get("waitMsAvg")) - - @property - def wait_ms_max(self): - """Optional[int]: Milliseconds the slowest worker spent waiting to - be scheduled. - """ - return _helpers._int_or_none(self._properties.get("waitMsMax")) - - @property - def wait_ratio_avg(self): - """Optional[float]: Ratio of time the average worker spent waiting - to be scheduled, relative to the longest time spent by any worker in - any stage of the overall plan. - """ - return self._properties.get("waitRatioAvg") - - @property - def wait_ratio_max(self): - """Optional[float]: Ratio of time the slowest worker spent waiting - to be scheduled, relative to the longest time spent by any worker in - any stage of the overall plan. - """ - return self._properties.get("waitRatioMax") - - @property - def read_ms_avg(self): - """Optional[int]: Milliseconds the average worker spent reading - input. - """ - return _helpers._int_or_none(self._properties.get("readMsAvg")) - - @property - def read_ms_max(self): - """Optional[int]: Milliseconds the slowest worker spent reading - input. - """ - return _helpers._int_or_none(self._properties.get("readMsMax")) - - @property - def read_ratio_avg(self): - """Optional[float]: Ratio of time the average worker spent reading - input, relative to the longest time spent by any worker in any stage - of the overall plan. - """ - return self._properties.get("readRatioAvg") - - @property - def read_ratio_max(self): - """Optional[float]: Ratio of time the slowest worker spent reading - to be scheduled, relative to the longest time spent by any worker in - any stage of the overall plan. - """ - return self._properties.get("readRatioMax") - - @property - def compute_ms_avg(self): - """Optional[int]: Milliseconds the average worker spent on CPU-bound - processing. - """ - return _helpers._int_or_none(self._properties.get("computeMsAvg")) - - @property - def compute_ms_max(self): - """Optional[int]: Milliseconds the slowest worker spent on CPU-bound - processing. - """ - return _helpers._int_or_none(self._properties.get("computeMsMax")) - - @property - def compute_ratio_avg(self): - """Optional[float]: Ratio of time the average worker spent on - CPU-bound processing, relative to the longest time spent by any - worker in any stage of the overall plan. - """ - return self._properties.get("computeRatioAvg") - - @property - def compute_ratio_max(self): - """Optional[float]: Ratio of time the slowest worker spent on - CPU-bound processing, relative to the longest time spent by any - worker in any stage of the overall plan. - """ - return self._properties.get("computeRatioMax") - - @property - def write_ms_avg(self): - """Optional[int]: Milliseconds the average worker spent writing - output data. - """ - return _helpers._int_or_none(self._properties.get("writeMsAvg")) - - @property - def write_ms_max(self): - """Optional[int]: Milliseconds the slowest worker spent writing - output data. - """ - return _helpers._int_or_none(self._properties.get("writeMsMax")) - - @property - def write_ratio_avg(self): - """Optional[float]: Ratio of time the average worker spent writing - output data, relative to the longest time spent by any worker in any - stage of the overall plan. - """ - return self._properties.get("writeRatioAvg") - - @property - def write_ratio_max(self): - """Optional[float]: Ratio of time the slowest worker spent writing - output data, relative to the longest time spent by any worker in any - stage of the overall plan. - """ - return self._properties.get("writeRatioMax") - - @property - def records_read(self): - """Optional[int]: Number of records read by this stage.""" - return _helpers._int_or_none(self._properties.get("recordsRead")) - - @property - def records_written(self): - """Optional[int]: Number of records written by this stage.""" - return _helpers._int_or_none(self._properties.get("recordsWritten")) - - @property - def status(self): - """Optional[str]: status of this stage.""" - return self._properties.get("status") - - @property - def shuffle_output_bytes(self): - """Optional[int]: Number of bytes written by this stage to - intermediate shuffle. - """ - return _helpers._int_or_none(self._properties.get("shuffleOutputBytes")) - - @property - def shuffle_output_bytes_spilled(self): - """Optional[int]: Number of bytes written by this stage to - intermediate shuffle and spilled to disk. - """ - return _helpers._int_or_none(self._properties.get("shuffleOutputBytesSpilled")) - - @property - def steps(self): - """List(QueryPlanEntryStep): List of step operations performed by - each worker in the stage. - """ - return [ - QueryPlanEntryStep.from_api_repr(step) - for step in self._properties.get("steps", []) - ] - - -class TimelineEntry(object): - """TimelineEntry represents progress of a query job at a particular - point in time. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#querytimelinesample - for the underlying API representation within query statistics. - """ - - def __init__(self): - self._properties = {} - - @classmethod - def from_api_repr(cls, resource): - """Factory: construct instance from the JSON repr. - - Args: - resource(Dict[str: object]): - QueryTimelineSample representation returned from API. - - Returns: - google.cloud.bigquery.TimelineEntry: - Timeline sample parsed from ``resource``. - """ - entry = cls() - entry._properties = resource - return entry - - @property - def elapsed_ms(self): - """Optional[int]: Milliseconds elapsed since start of query - execution.""" - return _helpers._int_or_none(self._properties.get("elapsedMs")) - - @property - def active_units(self): - """Optional[int]: Current number of input units being processed - by workers, reported as largest value since the last sample.""" - return _helpers._int_or_none(self._properties.get("activeUnits")) - - @property - def pending_units(self): - """Optional[int]: Current number of input units remaining for - query stages active at this sample time.""" - return _helpers._int_or_none(self._properties.get("pendingUnits")) - - @property - def completed_units(self): - """Optional[int]: Current number of input units completed by - this query.""" - return _helpers._int_or_none(self._properties.get("completedUnits")) - - @property - def slot_millis(self): - """Optional[int]: Cumulative slot-milliseconds consumed by - this query.""" - return _helpers._int_or_none(self._properties.get("totalSlotMs")) - - -class UnknownJob(_AsyncJob): - """A job whose type cannot be determined.""" - - @classmethod - def from_api_repr(cls, resource, client): - """Construct an UnknownJob from the JSON representation. - - Args: - resource (Dict): JSON representation of a job. - client (google.cloud.bigquery.client.Client): - Client connected to BigQuery API. - - Returns: - UnknownJob: Job corresponding to the resource. - """ - job_ref_properties = resource.get("jobReference", {"projectId": client.project}) - job_ref = _JobReference._from_api_repr(job_ref_properties) - job = cls(job_ref, client) - # Populate the job reference with the project, even if it has been - # redacted, because we know it should equal that of the request. - resource["jobReference"] = job_ref_properties - job._properties = resource - return job - - -class ScriptStackFrame(object): - """Stack frame showing the line/column/procedure name where the current - evaluation happened. - - Args: - resource (Map[str, Any]): JSON representation of object. - """ - - def __init__(self, resource): - self._properties = resource - - @property - def procedure_id(self): - """Optional[str]: Name of the active procedure. - - Omitted if in a top-level script. - """ - return self._properties.get("procedureId") - - @property - def text(self): - """str: Text of the current statement/expression.""" - return self._properties.get("text") - - @property - def start_line(self): - """int: One-based start line.""" - return _helpers._int_or_none(self._properties.get("startLine")) - - @property - def start_column(self): - """int: One-based start column.""" - return _helpers._int_or_none(self._properties.get("startColumn")) - - @property - def end_line(self): - """int: One-based end line.""" - return _helpers._int_or_none(self._properties.get("endLine")) - - @property - def end_column(self): - """int: One-based end column.""" - return _helpers._int_or_none(self._properties.get("endColumn")) - - -class ScriptStatistics(object): - """Statistics for a child job of a script. - - Args: - resource (Map[str, Any]): JSON representation of object. - """ - - def __init__(self, resource): - self._properties = resource - - @property - def stack_frames(self): - """List[ScriptStackFrame]: Stack trace where the current evaluation - happened. - - Shows line/column/procedure name of each frame on the stack at the - point where the current evaluation happened. - - The leaf frame is first, the primary script is last. - """ - return [ - ScriptStackFrame(frame) for frame in self._properties.get("stackFrames", []) - ] - - @property - def evaluation_kind(self): - """str: Indicates the type of child job. - - Possible values include ``STATEMENT`` and ``EXPRESSION``. - """ - return self._properties.get("evaluationKind") diff --git a/bigquery/google/cloud/bigquery/magics.py b/bigquery/google/cloud/bigquery/magics.py deleted file mode 100644 index 39608b19fcde..000000000000 --- a/bigquery/google/cloud/bigquery/magics.py +++ /dev/null @@ -1,628 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""IPython Magics - -.. function:: %%bigquery - - IPython cell magic to run a query and display the result as a DataFrame - - .. code-block:: python - - %%bigquery [] [--project ] [--use_legacy_sql] - [--verbose] [--params ] - - - Parameters: - - * ```` (optional, line argument): - variable to store the query results. The results are not displayed if - this parameter is used. If an error occurs during the query execution, - the corresponding ``QueryJob`` instance (if available) is stored in - the variable instead. - * ``--destination_table`` (optional, line argument): - A dataset and table to store the query results. If table does not exists, - it will be created. If table already exists, its data will be overwritten. - Variable should be in a format .. - * ``--project `` (optional, line argument): - Project to use for running the query. Defaults to the context - :attr:`~google.cloud.bigquery.magics.Context.project`. - * ``--use_bqstorage_api`` (optional, line argument): - Downloads the DataFrame using the BigQuery Storage API. To use this - option, install the ``google-cloud-bigquery-storage`` and ``fastavro`` - packages, and `enable the BigQuery Storage API - `_. - * ``--use_legacy_sql`` (optional, line argument): - Runs the query using Legacy SQL syntax. Defaults to Standard SQL if - this argument not used. - * ``--verbose`` (optional, line argument): - If this flag is used, information including the query job ID and the - amount of time for the query to complete will not be cleared after the - query is finished. By default, this information will be displayed but - will be cleared after the query is finished. - * ``--params `` (optional, line argument): - If present, the argument following the ``--params`` flag must be - either: - - * :class:`str` - A JSON string representation of a dictionary in the - format ``{"param_name": "param_value"}`` (ex. ``{"num": 17}``). Use - of the parameter in the query should be indicated with - ``@param_name``. See ``In[5]`` in the Examples section below. - - * :class:`dict` reference - A reference to a ``dict`` in the format - ``{"param_name": "param_value"}``, where the value types must be JSON - serializable. The variable reference is indicated by a ``$`` before - the variable name (ex. ``$my_dict_var``). See ``In[6]`` and ``In[7]`` - in the Examples section below. - * ```` (required, cell argument): - SQL query to run. - - Returns: - A :class:`pandas.DataFrame` with the query results. - - .. note:: - All queries run using this magic will run using the context - :attr:`~google.cloud.bigquery.magics.Context.credentials`. - - Examples: - The following examples can be run in an IPython notebook after loading - the bigquery IPython extension (see ``In[1]``) and setting up - Application Default Credentials. - - .. code-block:: none - - In [1]: %load_ext google.cloud.bigquery - - In [2]: %%bigquery - ...: SELECT name, SUM(number) as count - ...: FROM `bigquery-public-data.usa_names.usa_1910_current` - ...: GROUP BY name - ...: ORDER BY count DESC - ...: LIMIT 3 - - Out[2]: name count - ...: ------------------- - ...: 0 James 4987296 - ...: 1 John 4866302 - ...: 2 Robert 4738204 - - In [3]: %%bigquery df --project my-alternate-project --verbose - ...: SELECT name, SUM(number) as count - ...: FROM `bigquery-public-data.usa_names.usa_1910_current` - ...: WHERE gender = 'F' - ...: GROUP BY name - ...: ORDER BY count DESC - ...: LIMIT 3 - Executing query with job ID: bf633912-af2c-4780-b568-5d868058632b - Query executing: 2.61s - Query complete after 2.92s - - In [4]: df - - Out[4]: name count - ...: ---------------------- - ...: 0 Mary 3736239 - ...: 1 Patricia 1568495 - ...: 2 Elizabeth 1519946 - - In [5]: %%bigquery --params {"num": 17} - ...: SELECT @num AS num - - Out[5]: num - ...: ------- - ...: 0 17 - - In [6]: params = {"num": 17} - - In [7]: %%bigquery --params $params - ...: SELECT @num AS num - - Out[7]: num - ...: ------- - ...: 0 17 -""" - -from __future__ import print_function - -import re -import ast -import functools -import sys -import time -from concurrent import futures - -try: - import IPython - from IPython import display - from IPython.core import magic_arguments -except ImportError: # pragma: NO COVER - raise ImportError("This module can only be loaded in IPython.") - -from google.api_core import client_info -from google.api_core.exceptions import NotFound -import google.auth -from google.cloud import bigquery -import google.cloud.bigquery.dataset -from google.cloud.bigquery.dbapi import _helpers -import six - - -IPYTHON_USER_AGENT = "ipython-{}".format(IPython.__version__) - - -class Context(object): - """Storage for objects to be used throughout an IPython notebook session. - - A Context object is initialized when the ``magics`` module is imported, - and can be found at ``google.cloud.bigquery.magics.context``. - """ - - def __init__(self): - self._credentials = None - self._project = None - self._connection = None - self._use_bqstorage_api = None - self._default_query_job_config = bigquery.QueryJobConfig() - - @property - def credentials(self): - """google.auth.credentials.Credentials: Credentials to use for queries - performed through IPython magics - - Note: - These credentials do not need to be explicitly defined if you are - using Application Default Credentials. If you are not using - Application Default Credentials, manually construct a - :class:`google.auth.credentials.Credentials` object and set it as - the context credentials as demonstrated in the example below. See - `auth docs`_ for more information on obtaining credentials. - - Example: - Manually setting the context credentials: - - >>> from google.cloud.bigquery import magics - >>> from google.oauth2 import service_account - >>> credentials = (service_account - ... .Credentials.from_service_account_file( - ... '/path/to/key.json')) - >>> magics.context.credentials = credentials - - - .. _auth docs: http://google-auth.readthedocs.io - /en/latest/user-guide.html#obtaining-credentials - """ - if self._credentials is None: - self._credentials, _ = google.auth.default() - return self._credentials - - @credentials.setter - def credentials(self, value): - self._credentials = value - - @property - def project(self): - """str: Default project to use for queries performed through IPython - magics - - Note: - The project does not need to be explicitly defined if you have an - environment default project set. If you do not have a default - project set in your environment, manually assign the project as - demonstrated in the example below. - - Example: - Manually setting the context project: - - >>> from google.cloud.bigquery import magics - >>> magics.context.project = 'my-project' - """ - if self._project is None: - _, self._project = google.auth.default() - return self._project - - @project.setter - def project(self, value): - self._project = value - - @property - def use_bqstorage_api(self): - """bool: [Beta] Set to True to use the BigQuery Storage API to - download query results - - To use this option, install the ``google-cloud-bigquery-storage`` and - ``fastavro`` packages, and `enable the BigQuery Storage API - `_. - """ - return self._use_bqstorage_api - - @use_bqstorage_api.setter - def use_bqstorage_api(self, value): - self._use_bqstorage_api = value - - @property - def default_query_job_config(self): - """google.cloud.bigquery.job.QueryJobConfig: Default job - configuration for queries. - - The context's :class:`~google.cloud.bigquery.job.QueryJobConfig` is - used for queries. Some properties can be overridden with arguments to - the magics. - - Example: - Manually setting the default value for ``maximum_bytes_billed`` - to 100 MB: - - >>> from google.cloud.bigquery import magics - >>> magics.context.default_query_job_config.maximum_bytes_billed = 100000000 - """ - return self._default_query_job_config - - @default_query_job_config.setter - def default_query_job_config(self, value): - self._default_query_job_config = value - - -context = Context() - - -def _handle_error(error, destination_var=None): - """Process a query execution error. - - Args: - error (Exception): - An exception that ocurred during the query exectution. - destination_var (Optional[str]): - The name of the IPython session variable to store the query job. - """ - if destination_var: - query_job = getattr(error, "query_job", None) - - if query_job is not None: - IPython.get_ipython().push({destination_var: query_job}) - else: - # this is the case when previewing table rows by providing just - # table ID to cell magic - print( - "Could not save output to variable '{}'.".format(destination_var), - file=sys.stderr, - ) - - print("\nERROR:\n", str(error), file=sys.stderr) - - -def _run_query(client, query, job_config=None): - """Runs a query while printing status updates - - Args: - client (google.cloud.bigquery.client.Client): - Client to bundle configuration needed for API requests. - query (str): - SQL query to be executed. Defaults to the standard SQL dialect. - Use the ``job_config`` parameter to change dialects. - job_config (google.cloud.bigquery.job.QueryJobConfig, optional): - Extra configuration options for the job. - - Returns: - google.cloud.bigquery.job.QueryJob: the query job created - - Example: - >>> client = bigquery.Client() - >>> _run_query(client, "SELECT 17") - Executing query with job ID: bf633912-af2c-4780-b568-5d868058632b - Query executing: 1.66s - Query complete after 2.07s - 'bf633912-af2c-4780-b568-5d868058632b' - """ - start_time = time.time() - query_job = client.query(query, job_config=job_config) - - if job_config and job_config.dry_run: - return query_job - - print("Executing query with job ID: {}".format(query_job.job_id)) - - while True: - print("\rQuery executing: {:0.2f}s".format(time.time() - start_time), end="") - try: - query_job.result(timeout=0.5) - break - except futures.TimeoutError: - continue - print("\nQuery complete after {:0.2f}s".format(time.time() - start_time)) - return query_job - - -def _create_dataset_if_necessary(client, dataset_id): - """Create a dataset in the current project if it doesn't exist. - - Args: - client (google.cloud.bigquery.client.Client): - Client to bundle configuration needed for API requests. - dataset_id (str): - Dataset id. - """ - dataset_reference = bigquery.dataset.DatasetReference(client.project, dataset_id) - try: - dataset = client.get_dataset(dataset_reference) - return - except NotFound: - pass - dataset = bigquery.Dataset(dataset_reference) - dataset.location = client.location - print("Creating dataset: {}".format(dataset_id)) - dataset = client.create_dataset(dataset) - - -@magic_arguments.magic_arguments() -@magic_arguments.argument( - "destination_var", - nargs="?", - help=("If provided, save the output to this variable instead of displaying it."), -) -@magic_arguments.argument( - "--destination_table", - type=str, - default=None, - help=( - "If provided, save the output of the query to a new BigQuery table. " - "Variable should be in a format .. " - "If table does not exists, it will be created. " - "If table already exists, its data will be overwritten." - ), -) -@magic_arguments.argument( - "--project", - type=str, - default=None, - help=("Project to use for executing this query. Defaults to the context project."), -) -@magic_arguments.argument( - "--max_results", - default=None, - help=( - "Maximum number of rows in dataframe returned from executing the query." - "Defaults to returning all rows." - ), -) -@magic_arguments.argument( - "--maximum_bytes_billed", - default=None, - help=( - "maximum_bytes_billed to use for executing this query. Defaults to " - "the context default_query_job_config.maximum_bytes_billed." - ), -) -@magic_arguments.argument( - "--dry_run", - action="store_true", - default=False, - help=( - "Sets query to be a dry run to estimate costs. " - "Defaults to executing the query instead of dry run if this argument is not used." - ), -) -@magic_arguments.argument( - "--use_legacy_sql", - action="store_true", - default=False, - help=( - "Sets query to use Legacy SQL instead of Standard SQL. Defaults to " - "Standard SQL if this argument is not used." - ), -) -@magic_arguments.argument( - "--use_bqstorage_api", - action="store_true", - default=False, - help=( - "[Beta] Use the BigQuery Storage API to download large query results. " - "To use this option, install the google-cloud-bigquery-storage and " - "fastavro packages, and enable the BigQuery Storage API." - ), -) -@magic_arguments.argument( - "--verbose", - action="store_true", - default=False, - help=( - "If set, print verbose output, including the query job ID and the " - "amount of time for the query to finish. By default, this " - "information will be displayed as the query runs, but will be " - "cleared after the query is finished." - ), -) -@magic_arguments.argument( - "--params", - nargs="+", - default=None, - help=( - "Parameters to format the query string. If present, the --params " - "flag should be followed by a string representation of a dictionary " - "in the format {'param_name': 'param_value'} (ex. {\"num\": 17}), " - "or a reference to a dictionary in the same format. The dictionary " - "reference can be made by including a '$' before the variable " - "name (ex. $my_dict_var)." - ), -) -def _cell_magic(line, query): - """Underlying function for bigquery cell magic - - Note: - This function contains the underlying logic for the 'bigquery' cell - magic. This function is not meant to be called directly. - - Args: - line (str): "%%bigquery" followed by arguments as required - query (str): SQL query to run - - Returns: - pandas.DataFrame: the query results. - """ - args = magic_arguments.parse_argstring(_cell_magic, line) - - params = [] - if args.params is not None: - try: - params = _helpers.to_query_parameters( - ast.literal_eval("".join(args.params)) - ) - except Exception: - raise SyntaxError( - "--params is not a correctly formatted JSON string or a JSON " - "serializable dictionary" - ) - - project = args.project or context.project - client = bigquery.Client( - project=project, - credentials=context.credentials, - default_query_job_config=context.default_query_job_config, - client_info=client_info.ClientInfo(user_agent=IPYTHON_USER_AGENT), - ) - if context._connection: - client._connection = context._connection - bqstorage_client = _make_bqstorage_client( - args.use_bqstorage_api or context.use_bqstorage_api, context.credentials - ) - - close_transports = functools.partial(_close_transports, client, bqstorage_client) - - try: - if args.max_results: - max_results = int(args.max_results) - else: - max_results = None - - query = query.strip() - - # Any query that does not contain whitespace (aside from leading and trailing whitespace) - # is assumed to be a table id - if not re.search(r"\s", query): - try: - rows = client.list_rows(query, max_results=max_results) - except Exception as ex: - _handle_error(ex, args.destination_var) - return - - result = rows.to_dataframe(bqstorage_client=bqstorage_client) - if args.destination_var: - IPython.get_ipython().push({args.destination_var: result}) - return - else: - return result - - job_config = bigquery.job.QueryJobConfig() - job_config.query_parameters = params - job_config.use_legacy_sql = args.use_legacy_sql - job_config.dry_run = args.dry_run - - if args.destination_table: - split = args.destination_table.split(".") - if len(split) != 2: - raise ValueError( - "--destination_table should be in a . format." - ) - dataset_id, table_id = split - job_config.allow_large_results = True - dataset_ref = bigquery.dataset.DatasetReference(client.project, dataset_id) - destination_table_ref = dataset_ref.table(table_id) - job_config.destination = destination_table_ref - job_config.create_disposition = "CREATE_IF_NEEDED" - job_config.write_disposition = "WRITE_TRUNCATE" - _create_dataset_if_necessary(client, dataset_id) - - if args.maximum_bytes_billed == "None": - job_config.maximum_bytes_billed = 0 - elif args.maximum_bytes_billed is not None: - value = int(args.maximum_bytes_billed) - job_config.maximum_bytes_billed = value - - try: - query_job = _run_query(client, query, job_config=job_config) - except Exception as ex: - _handle_error(ex, args.destination_var) - return - - if not args.verbose: - display.clear_output() - - if args.dry_run and args.destination_var: - IPython.get_ipython().push({args.destination_var: query_job}) - return - elif args.dry_run: - print( - "Query validated. This query will process {} bytes.".format( - query_job.total_bytes_processed - ) - ) - return query_job - - if max_results: - result = query_job.result(max_results=max_results).to_dataframe( - bqstorage_client=bqstorage_client - ) - else: - result = query_job.to_dataframe(bqstorage_client=bqstorage_client) - - if args.destination_var: - IPython.get_ipython().push({args.destination_var: result}) - else: - return result - finally: - close_transports() - - -def _make_bqstorage_client(use_bqstorage_api, credentials): - if not use_bqstorage_api: - return None - - try: - from google.cloud import bigquery_storage_v1beta1 - except ImportError as err: - customized_error = ImportError( - "Install the google-cloud-bigquery-storage and pyarrow packages " - "to use the BigQuery Storage API." - ) - six.raise_from(customized_error, err) - - try: - from google.api_core.gapic_v1 import client_info as gapic_client_info - except ImportError as err: - customized_error = ImportError( - "Install the grpcio package to use the BigQuery Storage API." - ) - six.raise_from(customized_error, err) - - return bigquery_storage_v1beta1.BigQueryStorageClient( - credentials=credentials, - client_info=gapic_client_info.ClientInfo(user_agent=IPYTHON_USER_AGENT), - ) - - -def _close_transports(client, bqstorage_client): - """Close the given clients' underlying transport channels. - - Closing the transport is needed to release system resources, namely open - sockets. - - Args: - client (:class:`~google.cloud.bigquery.client.Client`): - bqstorage_client - (Optional[:class:`~google.cloud.bigquery_storage_v1beta1.BigQueryStorageClient`]): - A client for the BigQuery Storage API. - - """ - client.close() - if bqstorage_client is not None: - bqstorage_client.transport.channel.close() diff --git a/bigquery/google/cloud/bigquery/model.py b/bigquery/google/cloud/bigquery/model.py deleted file mode 100644 index d39ec5f2f60c..000000000000 --- a/bigquery/google/cloud/bigquery/model.py +++ /dev/null @@ -1,435 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Define resources for the BigQuery ML Models API.""" - -import copy - -from google.protobuf import json_format -import six - -import google.cloud._helpers -from google.api_core import datetime_helpers -from google.cloud.bigquery import _helpers -from google.cloud.bigquery_v2 import types -from google.cloud.bigquery.encryption_configuration import EncryptionConfiguration - - -class Model(object): - """Model represents a machine learning model resource. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/models - - Args: - model_ref (Union[google.cloud.bigquery.model.ModelReference, str]): - A pointer to a model. If ``model_ref`` is a string, it must - included a project ID, dataset ID, and model ID, each separated - by ``.``. - """ - - _PROPERTY_TO_API_FIELD = { - "expires": "expirationTime", - "friendly_name": "friendlyName", - # Even though it's not necessary for field mapping to map when the - # property name equals the resource name, we add these here so that we - # have an exhaustive list of all mutable properties. - "labels": "labels", - "description": "description", - "encryption_configuration": "encryptionConfiguration", - } - - def __init__(self, model_ref): - # Use _proto on read-only properties to use it's built-in type - # conversion. - self._proto = types.Model() - - # Use _properties on read-write properties to match the REST API - # semantics. The BigQuery API makes a distinction between an unset - # value, a null value, and a default value (0 or ""), but the protocol - # buffer classes do not. - self._properties = {} - - if isinstance(model_ref, six.string_types): - model_ref = ModelReference.from_string(model_ref) - - if model_ref: - self._proto.model_reference.CopyFrom(model_ref._proto) - - @property - def reference(self): - """A :class:`~google.cloud.bigquery.model.ModelReference` pointing to - this model. - - Read-only. - - Returns: - google.cloud.bigquery.model.ModelReference: pointer to this model. - """ - ref = ModelReference() - ref._proto = self._proto.model_reference - return ref - - @property - def project(self): - """str: Project bound to the model""" - return self.reference.project - - @property - def dataset_id(self): - """str: ID of dataset containing the model.""" - return self.reference.dataset_id - - @property - def model_id(self): - """str: The model ID.""" - return self.reference.model_id - - @property - def path(self): - """str: URL path for the model's APIs.""" - return self.reference.path - - @property - def location(self): - """str: The geographic location where the model resides. This value - is inherited from the dataset. - - Read-only. - """ - return self._proto.location - - @property - def etag(self): - """str: ETag for the model resource (:data:`None` until - set from the server). - - Read-only. - """ - return self._proto.etag - - @property - def created(self): - """Union[datetime.datetime, None]: Datetime at which the model was - created (:data:`None` until set from the server). - - Read-only. - """ - value = self._proto.creation_time - if value is not None and value != 0: - # value will be in milliseconds. - return google.cloud._helpers._datetime_from_microseconds( - 1000.0 * float(value) - ) - - @property - def modified(self): - """Union[datetime.datetime, None]: Datetime at which the model was last - modified (:data:`None` until set from the server). - - Read-only. - """ - value = self._proto.last_modified_time - if value is not None and value != 0: - # value will be in milliseconds. - return google.cloud._helpers._datetime_from_microseconds( - 1000.0 * float(value) - ) - - @property - def model_type(self): - """google.cloud.bigquery_v2.gapic.enums.Model.ModelType: Type of the - model resource. - - Read-only. - - The value is one of elements of the - :class:`~google.cloud.bigquery_v2.gapic.enums.Model.ModelType` - enumeration. - """ - return self._proto.model_type - - @property - def training_runs(self): - """Sequence[google.cloud.bigquery_v2.types.Model.TrainingRun]: Information - for all training runs in increasing order of start time. - - Read-only. - - An iterable of :class:`~google.cloud.bigquery_v2.types.Model.TrainingRun`. - """ - return self._proto.training_runs - - @property - def feature_columns(self): - """Sequence[google.cloud.bigquery_v2.types.StandardSqlField]: Input - feature columns that were used to train this model. - - Read-only. - - An iterable of :class:`~google.cloud.bigquery_v2.types.StandardSqlField`. - """ - return self._proto.feature_columns - - @property - def label_columns(self): - """Sequence[google.cloud.bigquery_v2.types.StandardSqlField]: Label - columns that were used to train this model. The output of the model - will have a ``predicted_`` prefix to these columns. - - Read-only. - - An iterable of :class:`~google.cloud.bigquery_v2.types.StandardSqlField`. - """ - return self._proto.label_columns - - @property - def expires(self): - """Union[datetime.datetime, None]: The datetime when this model - expires. If not present, the model will persist indefinitely. Expired - models will be deleted and their storage reclaimed. - """ - value = self._properties.get("expirationTime") - if value is not None: - # value will be in milliseconds. - return google.cloud._helpers._datetime_from_microseconds( - 1000.0 * float(value) - ) - - @expires.setter - def expires(self, value): - if value is not None: - value = str(google.cloud._helpers._millis_from_datetime(value)) - self._properties["expirationTime"] = value - - @property - def description(self): - """Optional[str]: Description of the model (defaults to - :data:`None`). - """ - return self._properties.get("description") - - @description.setter - def description(self, value): - self._properties["description"] = value - - @property - def friendly_name(self): - """Union[str, None]: Title of the table (defaults to :data:`None`). - - Raises: - ValueError: For invalid value types. - """ - return self._properties.get("friendlyName") - - @friendly_name.setter - def friendly_name(self, value): - self._properties["friendlyName"] = value - - @property - def labels(self): - """Dict[str, str]: Labels for the table. - - This method always returns a dict. To change a model's labels, - modify the dict, then call ``Client.update_model``. To delete a - label, set its value to :data:`None` before updating. - """ - return self._properties.setdefault("labels", {}) - - @labels.setter - def labels(self, value): - if value is None: - value = {} - self._properties["labels"] = value - - @property - def encryption_configuration(self): - """google.cloud.bigquery.encryption_configuration.EncryptionConfiguration: Custom - encryption configuration for the model. - - Custom encryption configuration (e.g., Cloud KMS keys) or :data:`None` - if using default encryption. - - See `protecting data with Cloud KMS keys - `_ - in the BigQuery documentation. - """ - prop = self._properties.get("encryptionConfiguration") - if prop: - prop = EncryptionConfiguration.from_api_repr(prop) - return prop - - @encryption_configuration.setter - def encryption_configuration(self, value): - api_repr = value - if value: - api_repr = value.to_api_repr() - self._properties["encryptionConfiguration"] = api_repr - - @classmethod - def from_api_repr(cls, resource): - """Factory: construct a model resource given its API representation - - Args: - resource (Dict[str, object]): - Model resource representation from the API - - Returns: - google.cloud.bigquery.model.Model: Model parsed from ``resource``. - """ - this = cls(None) - # Keep a reference to the resource as a workaround to find unknown - # field values. - this._properties = resource - - # Convert from millis-from-epoch to timestamp well-known type. - # TODO: Remove this hack once CL 238585470 hits prod. - resource = copy.deepcopy(resource) - for training_run in resource.get("trainingRuns", ()): - start_time = training_run.get("startTime") - if not start_time or "-" in start_time: # Already right format? - continue - start_time = datetime_helpers.from_microseconds(1e3 * float(start_time)) - training_run["startTime"] = datetime_helpers.to_rfc3339(start_time) - - this._proto = json_format.ParseDict( - resource, types.Model(), ignore_unknown_fields=True - ) - return this - - def _build_resource(self, filter_fields): - """Generate a resource for ``update``.""" - return _helpers._build_resource_from_properties(self, filter_fields) - - def __repr__(self): - return "Model(reference={})".format(repr(self.reference)) - - -class ModelReference(object): - """ModelReferences are pointers to models. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/models#modelreference - """ - - def __init__(self): - self._proto = types.ModelReference() - self._properties = {} - - @property - def project(self): - """str: Project bound to the model""" - return self._proto.project_id - - @property - def dataset_id(self): - """str: ID of dataset containing the model.""" - return self._proto.dataset_id - - @property - def model_id(self): - """str: The model ID.""" - return self._proto.model_id - - @property - def path(self): - """str: URL path for the model's APIs.""" - return "/projects/%s/datasets/%s/models/%s" % ( - self._proto.project_id, - self._proto.dataset_id, - self._proto.model_id, - ) - - @classmethod - def from_api_repr(cls, resource): - """Factory: construct a model reference given its API representation - - Args: - resource (Dict[str, object]): - Model reference representation returned from the API - - Returns: - google.cloud.bigquery.model.ModelReference: - Model reference parsed from ``resource``. - """ - ref = cls() - # Keep a reference to the resource as a workaround to find unknown - # field values. - ref._properties = resource - ref._proto = json_format.ParseDict( - resource, types.ModelReference(), ignore_unknown_fields=True - ) - return ref - - @classmethod - def from_string(cls, model_id, default_project=None): - """Construct a model reference from model ID string. - - Args: - model_id (str): - A model ID in standard SQL format. If ``default_project`` - is not specified, this must included a project ID, dataset - ID, and model ID, each separated by ``.``. - default_project (str): - Optional. The project ID to use when ``model_id`` does not - include a project ID. - - Returns: - google.cloud.bigquery.model.ModelReference: - Model reference parsed from ``model_id``. - - Raises: - ValueError: - If ``model_id`` is not a fully-qualified table ID in - standard SQL format. - """ - proj, dset, model = _helpers._parse_3_part_id( - model_id, default_project=default_project, property_name="model_id" - ) - return cls.from_api_repr( - {"projectId": proj, "datasetId": dset, "modelId": model} - ) - - def to_api_repr(self): - """Construct the API resource representation of this model reference. - - Returns: - Dict[str, object]: Model reference represented as an API resource - """ - return json_format.MessageToDict(self._proto) - - def _key(self): - """Unique key for this model. - - This is used for hashing a ModelReference. - """ - return self.project, self.dataset_id, self.model_id - - def __eq__(self, other): - if not isinstance(other, ModelReference): - return NotImplemented - return self._proto == other._proto - - def __ne__(self, other): - return not self == other - - def __hash__(self): - return hash(self._key()) - - def __repr__(self): - return "ModelReference(project='{}', dataset_id='{}', project_id='{}')".format( - self.project, self.dataset_id, self.model_id - ) diff --git a/bigquery/google/cloud/bigquery/query.py b/bigquery/google/cloud/bigquery/query.py deleted file mode 100644 index 925f3e29d298..000000000000 --- a/bigquery/google/cloud/bigquery/query.py +++ /dev/null @@ -1,633 +0,0 @@ -# Copyright 2015 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""BigQuery query processing.""" - -from collections import OrderedDict -import copy - -from google.cloud.bigquery.table import _parse_schema_resource -from google.cloud.bigquery._helpers import _rows_from_json -from google.cloud.bigquery._helpers import _QUERY_PARAMS_FROM_JSON -from google.cloud.bigquery._helpers import _SCALAR_VALUE_TO_JSON_PARAM - - -class UDFResource(object): - """Describe a single user-defined function (UDF) resource. - - Args: - udf_type (str): the type of the resource ('inlineCode' or 'resourceUri') - - value (str): the inline code or resource URI. - - See - https://cloud.google.com/bigquery/user-defined-functions#api - """ - - def __init__(self, udf_type, value): - self.udf_type = udf_type - self.value = value - - def __eq__(self, other): - if not isinstance(other, UDFResource): - return NotImplemented - return self.udf_type == other.udf_type and self.value == other.value - - def __ne__(self, other): - return not self == other - - -class _AbstractQueryParameter(object): - """Base class for named / positional query parameters. - """ - - @classmethod - def from_api_repr(cls, resource): - """Factory: construct parameter from JSON resource. - - Args: - resource (Dict): JSON mapping of parameter - - Returns: - google.cloud.bigquery.query.ScalarQueryParameter - """ - raise NotImplementedError - - def to_api_repr(self): - """Construct JSON API representation for the parameter. - - Returns: - Dict: JSON representation for the parameter. - """ - raise NotImplementedError - - -class ScalarQueryParameter(_AbstractQueryParameter): - """Named / positional query parameters for scalar values. - - Args: - name (Optional[str]): - Parameter name, used via ``@foo`` syntax. If None, the - parameter can only be addressed via position (``?``). - - type_ (str): - name of parameter type. One of 'STRING', 'INT64', - 'FLOAT64', 'NUMERIC', 'BOOL', 'TIMESTAMP', 'DATETIME', or - 'DATE'. - - value (Union[str, int, float, decimal.Decimal, bool, - datetime.datetime, datetime.date]): the scalar parameter value. - """ - - def __init__(self, name, type_, value): - self.name = name - self.type_ = type_ - self.value = value - - @classmethod - def positional(cls, type_, value): - """Factory for positional paramater. - - Args: - type_ (str): - name of parameter type. One of 'STRING', 'INT64', - 'FLOAT64', 'NUMERIC', 'BOOL', 'TIMESTAMP', 'DATETIME', or - 'DATE'. - - value (Union[str, int, float, decimal.Decimal, bool, - datetime.datetime, - datetime.date]): the scalar parameter value. - - Returns: - google.cloud.bigquery.query.ScalarQueryParameter: instance without name - """ - return cls(None, type_, value) - - @classmethod - def from_api_repr(cls, resource): - """Factory: construct parameter from JSON resource. - - Args: - resource (Dict): JSON mapping of parameter - - Returns: - google.cloud.bigquery.query.ScalarQueryParameter: instance - """ - name = resource.get("name") - type_ = resource["parameterType"]["type"] - - # parameterValue might not be present if JSON resource originates - # from the back-end - the latter omits it for None values. - value = resource.get("parameterValue", {}).get("value") - if value is not None: - converted = _QUERY_PARAMS_FROM_JSON[type_](value, None) - else: - converted = None - - return cls(name, type_, converted) - - def to_api_repr(self): - """Construct JSON API representation for the parameter. - - Returns: - Dict: JSON mapping - """ - value = self.value - converter = _SCALAR_VALUE_TO_JSON_PARAM.get(self.type_) - if converter is not None: - value = converter(value) - resource = { - "parameterType": {"type": self.type_}, - "parameterValue": {"value": value}, - } - if self.name is not None: - resource["name"] = self.name - return resource - - def _key(self): - """A tuple key that uniquely describes this field. - - Used to compute this instance's hashcode and evaluate equality. - - Returns: - Tuple: The contents of this :class:`~google.cloud.bigquery.query.ScalarQueryParameter`. - """ - return (self.name, self.type_.upper(), self.value) - - def __eq__(self, other): - if not isinstance(other, ScalarQueryParameter): - return NotImplemented - return self._key() == other._key() - - def __ne__(self, other): - return not self == other - - def __repr__(self): - return "ScalarQueryParameter{}".format(self._key()) - - -class ArrayQueryParameter(_AbstractQueryParameter): - """Named / positional query parameters for array values. - - Args: - name (Optional[str]): - Parameter name, used via ``@foo`` syntax. If None, the - parameter can only be addressed via position (``?``). - - array_type (str): - name of type of array elements. One of `'STRING'`, `'INT64'`, - `'FLOAT64'`, `'NUMERIC'`, `'BOOL'`, `'TIMESTAMP'`, or `'DATE'`. - - values (List[appropriate scalar type]): the parameter array values. - """ - - def __init__(self, name, array_type, values): - self.name = name - self.array_type = array_type - self.values = values - - @classmethod - def positional(cls, array_type, values): - """Factory for positional parameters. - - Args: - array_type (str): - name of type of array elements. One of `'STRING'`, `'INT64'`, - `'FLOAT64'`, `'NUMERIC'`, `'BOOL'`, `'TIMESTAMP'`, or `'DATE'`. - - values (List[appropriate scalar type]): the parameter array values. - - Returns: - google.cloud.bigquery.query.ArrayQueryParameter: instance without name - """ - return cls(None, array_type, values) - - @classmethod - def _from_api_repr_struct(cls, resource): - name = resource.get("name") - converted = [] - # We need to flatten the array to use the StructQueryParameter - # parse code. - resource_template = { - # The arrayType includes all the types of the fields of the STRUCT - "parameterType": resource["parameterType"]["arrayType"] - } - for array_value in resource["parameterValue"]["arrayValues"]: - struct_resource = copy.deepcopy(resource_template) - struct_resource["parameterValue"] = array_value - struct_value = StructQueryParameter.from_api_repr(struct_resource) - converted.append(struct_value) - return cls(name, "STRUCT", converted) - - @classmethod - def _from_api_repr_scalar(cls, resource): - name = resource.get("name") - array_type = resource["parameterType"]["arrayType"]["type"] - parameter_value = resource.get("parameterValue", {}) - array_values = parameter_value.get("arrayValues", ()) - values = [value["value"] for value in array_values] - converted = [ - _QUERY_PARAMS_FROM_JSON[array_type](value, None) for value in values - ] - return cls(name, array_type, converted) - - @classmethod - def from_api_repr(cls, resource): - """Factory: construct parameter from JSON resource. - - Args: - resource (Dict): JSON mapping of parameter - - Returns: - google.cloud.bigquery.query.ArrayQueryParameter: instance - """ - array_type = resource["parameterType"]["arrayType"]["type"] - if array_type == "STRUCT": - return cls._from_api_repr_struct(resource) - return cls._from_api_repr_scalar(resource) - - def to_api_repr(self): - """Construct JSON API representation for the parameter. - - Returns: - Dict: JSON mapping - """ - values = self.values - if self.array_type == "RECORD" or self.array_type == "STRUCT": - reprs = [value.to_api_repr() for value in values] - a_type = reprs[0]["parameterType"] - a_values = [repr_["parameterValue"] for repr_ in reprs] - else: - a_type = {"type": self.array_type} - converter = _SCALAR_VALUE_TO_JSON_PARAM.get(self.array_type) - if converter is not None: - values = [converter(value) for value in values] - a_values = [{"value": value} for value in values] - resource = { - "parameterType": {"type": "ARRAY", "arrayType": a_type}, - "parameterValue": {"arrayValues": a_values}, - } - if self.name is not None: - resource["name"] = self.name - return resource - - def _key(self): - """A tuple key that uniquely describes this field. - - Used to compute this instance's hashcode and evaluate equality. - - Returns: - Tuple: The contents of this :class:`~google.cloud.bigquery.query.ArrayQueryParameter`. - """ - return (self.name, self.array_type.upper(), self.values) - - def __eq__(self, other): - if not isinstance(other, ArrayQueryParameter): - return NotImplemented - return self._key() == other._key() - - def __ne__(self, other): - return not self == other - - def __repr__(self): - return "ArrayQueryParameter{}".format(self._key()) - - -class StructQueryParameter(_AbstractQueryParameter): - """Named / positional query parameters for struct values. - - Args: - name (Optional[str]): - Parameter name, used via ``@foo`` syntax. If None, the - parameter can only be addressed via position (``?``). - - sub_params (Union[Tuple[ - google.cloud.bigquery.query.ScalarQueryParameter, - google.cloud.bigquery.query.ArrayQueryParameter, - google.cloud.bigquery.query.StructQueryParameter - ]]): the sub-parameters for the struct - """ - - def __init__(self, name, *sub_params): - self.name = name - types = self.struct_types = OrderedDict() - values = self.struct_values = {} - for sub in sub_params: - if isinstance(sub, self.__class__): - types[sub.name] = "STRUCT" - values[sub.name] = sub - elif isinstance(sub, ArrayQueryParameter): - types[sub.name] = "ARRAY" - values[sub.name] = sub - else: - types[sub.name] = sub.type_ - values[sub.name] = sub.value - - @classmethod - def positional(cls, *sub_params): - """Factory for positional parameters. - - Args: - sub_params (Union[Tuple[ - google.cloud.bigquery.query.ScalarQueryParameter, - google.cloud.bigquery.query.ArrayQueryParameter, - google.cloud.bigquery.query.StructQueryParameter - ]]): the sub-parameters for the struct - - Returns: - google.cloud.bigquery.query.StructQueryParameter: instance without name - """ - return cls(None, *sub_params) - - @classmethod - def from_api_repr(cls, resource): - """Factory: construct parameter from JSON resource. - - Args: - resource (Dict): JSON mapping of parameter - - Returns: - google.cloud.bigquery.query.StructQueryParameter: instance - """ - name = resource.get("name") - instance = cls(name) - type_resources = {} - types = instance.struct_types - for item in resource["parameterType"]["structTypes"]: - types[item["name"]] = item["type"]["type"] - type_resources[item["name"]] = item["type"] - struct_values = resource["parameterValue"]["structValues"] - for key, value in struct_values.items(): - type_ = types[key] - converted = None - if type_ == "STRUCT": - struct_resource = { - "name": key, - "parameterType": type_resources[key], - "parameterValue": value, - } - converted = StructQueryParameter.from_api_repr(struct_resource) - elif type_ == "ARRAY": - struct_resource = { - "name": key, - "parameterType": type_resources[key], - "parameterValue": value, - } - converted = ArrayQueryParameter.from_api_repr(struct_resource) - else: - value = value["value"] - converted = _QUERY_PARAMS_FROM_JSON[type_](value, None) - instance.struct_values[key] = converted - return instance - - def to_api_repr(self): - """Construct JSON API representation for the parameter. - - Returns: - Dict: JSON mapping - """ - s_types = {} - values = {} - for name, value in self.struct_values.items(): - type_ = self.struct_types[name] - if type_ in ("STRUCT", "ARRAY"): - repr_ = value.to_api_repr() - s_types[name] = {"name": name, "type": repr_["parameterType"]} - values[name] = repr_["parameterValue"] - else: - s_types[name] = {"name": name, "type": {"type": type_}} - converter = _SCALAR_VALUE_TO_JSON_PARAM.get(type_) - if converter is not None: - value = converter(value) - values[name] = {"value": value} - - resource = { - "parameterType": { - "type": "STRUCT", - "structTypes": [s_types[key] for key in self.struct_types], - }, - "parameterValue": {"structValues": values}, - } - if self.name is not None: - resource["name"] = self.name - return resource - - def _key(self): - """A tuple key that uniquely describes this field. - - Used to compute this instance's hashcode and evaluate equality. - - Returns: - Tuple: The contents of this :class:`~google.cloud.biquery.ArrayQueryParameter`. - """ - return (self.name, self.struct_types, self.struct_values) - - def __eq__(self, other): - if not isinstance(other, StructQueryParameter): - return NotImplemented - return self._key() == other._key() - - def __ne__(self, other): - return not self == other - - def __repr__(self): - return "StructQueryParameter{}".format(self._key()) - - -class _QueryResults(object): - """Results of a query. - - See: - https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs/getQueryResults - """ - - def __init__(self, properties): - self._properties = {} - self._set_properties(properties) - - @classmethod - def from_api_repr(cls, api_response): - return cls(api_response) - - @property - def project(self): - """Project bound to the query job. - - Returns: - str: The project that the query job is associated with. - """ - return self._properties.get("jobReference", {}).get("projectId") - - @property - def cache_hit(self): - """Query results served from cache. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#body.QueryResponse.FIELDS.cache_hit - - Returns: - Optional[bool]: - True if the query results were served from cache (None - until set by the server). - """ - return self._properties.get("cacheHit") - - @property - def complete(self): - """Server completed query. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#body.QueryResponse.FIELDS.job_complete - - Returns: - Optional[bool]: - True if the query completed on the server (None - until set by the server). - """ - return self._properties.get("jobComplete") - - @property - def errors(self): - """Errors generated by the query. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#body.QueryResponse.FIELDS.errors - - Returns: - Optional[List[Mapping]]: - Mappings describing errors generated on the server (None - until set by the server). - """ - return self._properties.get("errors") - - @property - def job_id(self): - """Job ID of the query job these results are from. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#body.QueryResponse.FIELDS.job_reference - - Returns: - str: Job ID of the query job. - """ - return self._properties.get("jobReference", {}).get("jobId") - - @property - def page_token(self): - """Token for fetching next bach of results. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#body.QueryResponse.FIELDS.page_token - - Returns: - Optional[str]: Token generated on the server (None until set by the server). - """ - return self._properties.get("pageToken") - - @property - def total_rows(self): - """Total number of rows returned by the query. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#body.QueryResponse.FIELDS.total_rows - - Returns: - Optional[int}: Count generated on the server (None until set by the server). - """ - total_rows = self._properties.get("totalRows") - if total_rows is not None: - return int(total_rows) - - @property - def total_bytes_processed(self): - """Total number of bytes processed by the query. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#body.QueryResponse.FIELDS.total_bytes_processed - - Returns: - Optional[int]: Count generated on the server (None until set by the server). - """ - total_bytes_processed = self._properties.get("totalBytesProcessed") - if total_bytes_processed is not None: - return int(total_bytes_processed) - - @property - def num_dml_affected_rows(self): - """Total number of rows affected by a DML query. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#body.QueryResponse.FIELDS.num_dml_affected_rows - - Returns: - Optional[int]: Count generated on the server (None until set by the server). - """ - num_dml_affected_rows = self._properties.get("numDmlAffectedRows") - if num_dml_affected_rows is not None: - return int(num_dml_affected_rows) - - @property - def rows(self): - """Query results. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#body.QueryResponse.FIELDS.rows - - Returns: - Optional[List[google.cloud.bigquery.table.Row]]: - Fields describing the schema (None until set by the server). - """ - return _rows_from_json(self._properties.get("rows", ()), self.schema) - - @property - def schema(self): - """Schema for query results. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#body.QueryResponse.FIELDS.schema - - Returns: - Optional[List[SchemaField]]: - Fields describing the schema (None until set by the server). - """ - return _parse_schema_resource(self._properties.get("schema", {})) - - def _set_properties(self, api_response): - """Update properties from resource in body of ``api_response`` - - Args: - api_response (Dict): response returned from an API call - """ - job_id_present = ( - "jobReference" in api_response - and "jobId" in api_response["jobReference"] - and "projectId" in api_response["jobReference"] - ) - if not job_id_present: - raise ValueError("QueryResult requires a job reference") - - self._properties.clear() - self._properties.update(copy.deepcopy(api_response)) - - -def _query_param_from_api_repr(resource): - """Helper: construct concrete query parameter from JSON resource.""" - qp_type = resource["parameterType"] - if "arrayType" in qp_type: - klass = ArrayQueryParameter - elif "structTypes" in qp_type: - klass = StructQueryParameter - else: - klass = ScalarQueryParameter - return klass.from_api_repr(resource) diff --git a/bigquery/google/cloud/bigquery/retry.py b/bigquery/google/cloud/bigquery/retry.py deleted file mode 100644 index 4bc4b757f45d..000000000000 --- a/bigquery/google/cloud/bigquery/retry.py +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from google.api_core import exceptions -from google.api_core import retry - - -_RETRYABLE_REASONS = frozenset( - ["rateLimitExceeded", "backendError", "internalError", "badGateway"] -) - -_UNSTRUCTURED_RETRYABLE_TYPES = ( - exceptions.TooManyRequests, - exceptions.InternalServerError, - exceptions.BadGateway, -) - - -def _should_retry(exc): - """Predicate for determining when to retry. - - We retry if and only if the 'reason' is 'backendError' - or 'rateLimitExceeded'. - """ - if not hasattr(exc, "errors"): - return False - - if len(exc.errors) == 0: - # Check for unstructured error returns, e.g. from GFE - return isinstance(exc, _UNSTRUCTURED_RETRYABLE_TYPES) - - reason = exc.errors[0]["reason"] - return reason in _RETRYABLE_REASONS - - -DEFAULT_RETRY = retry.Retry(predicate=_should_retry) -"""The default retry object. - -Any method with a ``retry`` parameter will be retried automatically, -with reasonable defaults. To disable retry, pass ``retry=None``. -To modify the default retry behavior, call a ``with_XXX`` method -on ``DEFAULT_RETRY``. For example, to change the deadline to 30 seconds, -pass ``retry=bigquery.DEFAULT_RETRY.with_deadline(30)``. -""" diff --git a/bigquery/google/cloud/bigquery/routine.py b/bigquery/google/cloud/bigquery/routine.py deleted file mode 100644 index e99d9c6fa162..000000000000 --- a/bigquery/google/cloud/bigquery/routine.py +++ /dev/null @@ -1,518 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Define resources for the BigQuery Routines API.""" - -from google.protobuf import json_format -import six - -import google.cloud._helpers -from google.cloud.bigquery import _helpers -import google.cloud.bigquery_v2.types - - -class Routine(object): - """Resource representing a user-defined routine. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/routines - - Args: - routine_ref (Union[str, google.cloud.bigquery.routine.RoutineReference]): - A pointer to a routine. If ``routine_ref`` is a string, it must - included a project ID, dataset ID, and routine ID, each separated - by ``.``. - ``**kwargs`` (Dict): - Initial property values. - """ - - _PROPERTY_TO_API_FIELD = { - "arguments": "arguments", - "body": "definitionBody", - "created": "creationTime", - "etag": "etag", - "imported_libraries": "importedLibraries", - "language": "language", - "modified": "lastModifiedTime", - "reference": "routineReference", - "return_type": "returnType", - "type_": "routineType", - "description": "description", - } - - def __init__(self, routine_ref, **kwargs): - if isinstance(routine_ref, six.string_types): - routine_ref = RoutineReference.from_string(routine_ref) - - self._properties = {"routineReference": routine_ref.to_api_repr()} - for property_name in kwargs: - setattr(self, property_name, kwargs[property_name]) - - @property - def reference(self): - """google.cloud.bigquery.routine.RoutineReference: Reference - describing the ID of this routine. - """ - return RoutineReference.from_api_repr( - self._properties[self._PROPERTY_TO_API_FIELD["reference"]] - ) - - @property - def path(self): - """str: URL path for the routine's APIs.""" - return self.reference.path - - @property - def project(self): - """str: ID of the project containing the routine.""" - return self.reference.project - - @property - def dataset_id(self): - """str: ID of dataset containing the routine.""" - return self.reference.dataset_id - - @property - def routine_id(self): - """str: The routine ID.""" - return self.reference.routine_id - - @property - def etag(self): - """str: ETag for the resource (:data:`None` until set from the - server). - - Read-only. - """ - return self._properties.get(self._PROPERTY_TO_API_FIELD["etag"]) - - @property - def type_(self): - """str: The fine-grained type of the routine. - - See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/routines#RoutineType - """ - return self._properties.get(self._PROPERTY_TO_API_FIELD["type_"]) - - @type_.setter - def type_(self, value): - self._properties[self._PROPERTY_TO_API_FIELD["type_"]] = value - - @property - def created(self): - """Optional[datetime.datetime]: Datetime at which the routine was - created (:data:`None` until set from the server). - - Read-only. - """ - value = self._properties.get(self._PROPERTY_TO_API_FIELD["created"]) - if value is not None and value != 0: - # value will be in milliseconds. - return google.cloud._helpers._datetime_from_microseconds( - 1000.0 * float(value) - ) - - @property - def modified(self): - """Optional[datetime.datetime]: Datetime at which the routine was - last modified (:data:`None` until set from the server). - - Read-only. - """ - value = self._properties.get(self._PROPERTY_TO_API_FIELD["modified"]) - if value is not None and value != 0: - # value will be in milliseconds. - return google.cloud._helpers._datetime_from_microseconds( - 1000.0 * float(value) - ) - - @property - def language(self): - """Optional[str]: The language of the routine. - - Defaults to ``SQL``. - """ - return self._properties.get(self._PROPERTY_TO_API_FIELD["language"]) - - @language.setter - def language(self, value): - self._properties[self._PROPERTY_TO_API_FIELD["language"]] = value - - @property - def arguments(self): - """List[google.cloud.bigquery.routine.RoutineArgument]: Input/output - argument of a function or a stored procedure. - - In-place modification is not supported. To set, replace the entire - property value with the modified list of - :class:`~google.cloud.bigquery.routine.RoutineArgument` objects. - """ - resources = self._properties.get(self._PROPERTY_TO_API_FIELD["arguments"], []) - return [RoutineArgument.from_api_repr(resource) for resource in resources] - - @arguments.setter - def arguments(self, value): - if not value: - resource = [] - else: - resource = [argument.to_api_repr() for argument in value] - self._properties[self._PROPERTY_TO_API_FIELD["arguments"]] = resource - - @property - def return_type(self): - """google.cloud.bigquery_v2.types.StandardSqlDataType: Return type of - the routine. - - If absent, the return type is inferred from - :attr:`~google.cloud.bigquery.routine.Routine.body` at query time in - each query that references this routine. If present, then the - evaluated result will be cast to the specified returned type at query - time. - - See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/routines#Routine.FIELDS.return_type - """ - resource = self._properties.get(self._PROPERTY_TO_API_FIELD["return_type"]) - if not resource: - return resource - output = google.cloud.bigquery_v2.types.StandardSqlDataType() - output = json_format.ParseDict(resource, output, ignore_unknown_fields=True) - return output - - @return_type.setter - def return_type(self, value): - if value: - resource = json_format.MessageToDict(value) - else: - resource = None - self._properties[self._PROPERTY_TO_API_FIELD["return_type"]] = resource - - @property - def imported_libraries(self): - """List[str]: The path of the imported JavaScript libraries. - - The :attr:`~google.cloud.bigquery.routine.Routine.language` must - equal ``JAVACRIPT``. - - Examples: - Set the ``imported_libraries`` to a list of Google Cloud Storage - URIs. - - .. code-block:: python - - routine = bigquery.Routine("proj.dataset.routine_id") - routine.imported_libraries = [ - "gs://cloud-samples-data/bigquery/udfs/max-value.js", - ] - """ - return self._properties.get( - self._PROPERTY_TO_API_FIELD["imported_libraries"], [] - ) - - @imported_libraries.setter - def imported_libraries(self, value): - if not value: - resource = [] - else: - resource = value - self._properties[self._PROPERTY_TO_API_FIELD["imported_libraries"]] = resource - - @property - def body(self): - """str: The body of the routine.""" - return self._properties.get(self._PROPERTY_TO_API_FIELD["body"]) - - @body.setter - def body(self, value): - self._properties[self._PROPERTY_TO_API_FIELD["body"]] = value - - @property - def description(self): - """Optional[str]: Description of the routine (defaults to - :data:`None`). - """ - return self._properties.get(self._PROPERTY_TO_API_FIELD["description"]) - - @description.setter - def description(self, value): - self._properties[self._PROPERTY_TO_API_FIELD["description"]] = value - - @classmethod - def from_api_repr(cls, resource): - """Factory: construct a routine given its API representation. - - Args: - resource (Dict[str, object]): - Resource, as returned from the API. - - Returns: - google.cloud.bigquery.routine.Routine: - Python object, as parsed from ``resource``. - """ - ref = cls(RoutineReference.from_api_repr(resource["routineReference"])) - ref._properties = resource - return ref - - def to_api_repr(self): - """Construct the API resource representation of this routine. - - Returns: - Dict[str, object]: Routine represented as an API resource. - """ - return self._properties - - def _build_resource(self, filter_fields): - """Generate a resource for ``update``.""" - return _helpers._build_resource_from_properties(self, filter_fields) - - def __repr__(self): - return "Routine('{}.{}.{}')".format( - self.project, self.dataset_id, self.routine_id - ) - - -class RoutineArgument(object): - """Input/output argument of a function or a stored procedure. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/routines#argument - - Args: - ``**kwargs`` (Dict): - Initial property values. - """ - - _PROPERTY_TO_API_FIELD = { - "data_type": "dataType", - "kind": "argumentKind", - # Even though it's not necessary for field mapping to map when the - # property name equals the resource name, we add these here so that we - # have an exhaustive list of all properties. - "name": "name", - "mode": "mode", - } - - def __init__(self, **kwargs): - self._properties = {} - for property_name in kwargs: - setattr(self, property_name, kwargs[property_name]) - - @property - def name(self): - """Optional[str]: Name of this argument. - - Can be absent for function return argument. - """ - return self._properties.get(self._PROPERTY_TO_API_FIELD["name"]) - - @name.setter - def name(self, value): - self._properties[self._PROPERTY_TO_API_FIELD["name"]] = value - - @property - def kind(self): - """Optional[str]: The kind of argument, for example ``FIXED_TYPE`` or - ``ANY_TYPE``. - - See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/routines#Argument.FIELDS.argument_kind - """ - return self._properties.get(self._PROPERTY_TO_API_FIELD["kind"]) - - @kind.setter - def kind(self, value): - self._properties[self._PROPERTY_TO_API_FIELD["kind"]] = value - - @property - def mode(self): - """Optional[str]: The input/output mode of the argument.""" - return self._properties.get(self._PROPERTY_TO_API_FIELD["mode"]) - - @mode.setter - def mode(self, value): - self._properties[self._PROPERTY_TO_API_FIELD["mode"]] = value - - @property - def data_type(self): - """Optional[google.cloud.bigquery_v2.types.StandardSqlDataType]: Type - of a variable, e.g., a function argument. - - See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/routines#Argument.FIELDS.data_type - """ - resource = self._properties.get(self._PROPERTY_TO_API_FIELD["data_type"]) - if not resource: - return resource - output = google.cloud.bigquery_v2.types.StandardSqlDataType() - output = json_format.ParseDict(resource, output, ignore_unknown_fields=True) - return output - - @data_type.setter - def data_type(self, value): - if value: - resource = json_format.MessageToDict(value) - else: - resource = None - self._properties[self._PROPERTY_TO_API_FIELD["data_type"]] = resource - - @classmethod - def from_api_repr(cls, resource): - """Factory: construct a routine argument given its API representation. - - Args: - resource (Dict[str, object]): Resource, as returned from the API. - - Returns: - google.cloud.bigquery.routine.RoutineArgument: - Python object, as parsed from ``resource``. - """ - ref = cls() - ref._properties = resource - return ref - - def to_api_repr(self): - """Construct the API resource representation of this routine argument. - - Returns: - Dict[str, object]: Routine argument represented as an API resource. - """ - return self._properties - - def __eq__(self, other): - if not isinstance(other, RoutineArgument): - return NotImplemented - return self._properties == other._properties - - def __ne__(self, other): - return not self == other - - def __repr__(self): - all_properties = [ - "{}={}".format(property_name, repr(getattr(self, property_name))) - for property_name in sorted(self._PROPERTY_TO_API_FIELD) - ] - return "RoutineArgument({})".format(", ".join(all_properties)) - - -class RoutineReference(object): - """A pointer to a routine. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/routines#routinereference - """ - - def __init__(self): - self._properties = {} - - @property - def project(self): - """str: ID of the project containing the routine.""" - return self._properties["projectId"] - - @property - def dataset_id(self): - """str: ID of dataset containing the routine.""" - return self._properties["datasetId"] - - @property - def routine_id(self): - """str: The routine ID.""" - return self._properties["routineId"] - - @property - def path(self): - """str: URL path for the routine's APIs.""" - return "/projects/%s/datasets/%s/routines/%s" % ( - self.project, - self.dataset_id, - self.routine_id, - ) - - @classmethod - def from_api_repr(cls, resource): - """Factory: construct a routine reference given its API representation. - - Args: - resource (Dict[str, object]): - Routine reference representation returned from the API. - - Returns: - google.cloud.bigquery.routine.RoutineReference: - Routine reference parsed from ``resource``. - """ - ref = cls() - ref._properties = resource - return ref - - @classmethod - def from_string(cls, routine_id, default_project=None): - """Factory: construct a routine reference from routine ID string. - - Args: - routine_id (str): - A routine ID in standard SQL format. If ``default_project`` - is not specified, this must included a project ID, dataset - ID, and routine ID, each separated by ``.``. - default_project (str): - Optional. The project ID to use when ``routine_id`` does not - include a project ID. - - Returns: - google.cloud.bigquery.routine.RoutineReference: - Routine reference parsed from ``routine_id``. - - Raises: - ValueError: - If ``routine_id`` is not a fully-qualified routine ID in - standard SQL format. - """ - proj, dset, routine = _helpers._parse_3_part_id( - routine_id, default_project=default_project, property_name="routine_id" - ) - return cls.from_api_repr( - {"projectId": proj, "datasetId": dset, "routineId": routine} - ) - - def to_api_repr(self): - """Construct the API resource representation of this routine reference. - - Returns: - Dict[str, object]: Routine reference represented as an API resource. - """ - return self._properties - - def __eq__(self, other): - """Two RoutineReferences are equal if they point to the same routine.""" - if not isinstance(other, RoutineReference): - return NotImplemented - return str(self) == str(other) - - def __hash__(self): - return hash(str(self)) - - def __ne__(self, other): - return not self == other - - def __repr__(self): - return "RoutineReference.from_string('{}')".format(str(self)) - - def __str__(self): - """String representation of the reference. - - This is a fully-qualified ID, including the project ID and dataset ID. - """ - return "{}.{}.{}".format(self.project, self.dataset_id, self.routine_id) diff --git a/bigquery/google/cloud/bigquery/schema.py b/bigquery/google/cloud/bigquery/schema.py deleted file mode 100644 index 3878a80a9f94..000000000000 --- a/bigquery/google/cloud/bigquery/schema.py +++ /dev/null @@ -1,293 +0,0 @@ -# Copyright 2015 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Schemas for BigQuery tables / queries.""" - -from six.moves import collections_abc - -from google.cloud.bigquery_v2 import types - - -_STRUCT_TYPES = ("RECORD", "STRUCT") - -# SQL types reference: -# https://cloud.google.com/bigquery/data-types#legacy_sql_data_types -# https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types -LEGACY_TO_STANDARD_TYPES = { - "STRING": types.StandardSqlDataType.STRING, - "BYTES": types.StandardSqlDataType.BYTES, - "INTEGER": types.StandardSqlDataType.INT64, - "INT64": types.StandardSqlDataType.INT64, - "FLOAT": types.StandardSqlDataType.FLOAT64, - "FLOAT64": types.StandardSqlDataType.FLOAT64, - "NUMERIC": types.StandardSqlDataType.NUMERIC, - "BOOLEAN": types.StandardSqlDataType.BOOL, - "BOOL": types.StandardSqlDataType.BOOL, - "GEOGRAPHY": types.StandardSqlDataType.GEOGRAPHY, - "RECORD": types.StandardSqlDataType.STRUCT, - "STRUCT": types.StandardSqlDataType.STRUCT, - "TIMESTAMP": types.StandardSqlDataType.TIMESTAMP, - "DATE": types.StandardSqlDataType.DATE, - "TIME": types.StandardSqlDataType.TIME, - "DATETIME": types.StandardSqlDataType.DATETIME, - # no direct conversion from ARRAY, the latter is represented by mode="REPEATED" -} -"""String names of the legacy SQL types to integer codes of Standard SQL types.""" - - -class SchemaField(object): - """Describe a single field within a table schema. - - Args: - name (str): the name of the field. - - field_type (str): the type of the field. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#TableFieldSchema.FIELDS.type - - mode (str): the mode of the field. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#TableFieldSchema.FIELDS.mode - - description (Optional[str]): description for the field. - - fields (Tuple[google.cloud.bigquery.schema.SchemaField]): - subfields (requires ``field_type`` of 'RECORD'). - """ - - def __init__(self, name, field_type, mode="NULLABLE", description=None, fields=()): - self._name = name - self._field_type = field_type - self._mode = mode - self._description = description - self._fields = tuple(fields) - - @classmethod - def from_api_repr(cls, api_repr): - """Return a ``SchemaField`` object deserialized from a dictionary. - - Args: - api_repr (Mapping[str, str]): The serialized representation - of the SchemaField, such as what is output by - :meth:`to_api_repr`. - - Returns: - google.cloud.biquery.schema.SchemaField: The ``SchemaField`` object. - """ - # Handle optional properties with default values - mode = api_repr.get("mode", "NULLABLE") - description = api_repr.get("description") - fields = api_repr.get("fields", ()) - return cls( - field_type=api_repr["type"].upper(), - fields=[cls.from_api_repr(f) for f in fields], - mode=mode.upper(), - description=description, - name=api_repr["name"], - ) - - @property - def name(self): - """str: The name of the field.""" - return self._name - - @property - def field_type(self): - """str: The type of the field. - - See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#TableFieldSchema.FIELDS.type - """ - return self._field_type - - @property - def mode(self): - """str: The mode of the field. - - See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#TableFieldSchema.FIELDS.mode - """ - return self._mode - - @property - def is_nullable(self): - """bool: whether 'mode' is 'nullable'.""" - return self._mode == "NULLABLE" - - @property - def description(self): - """Optional[str]: description for the field.""" - return self._description - - @property - def fields(self): - """tuple: Subfields contained in this field. - - Must be empty unset if ``field_type`` is not 'RECORD'. - """ - return self._fields - - def to_api_repr(self): - """Return a dictionary representing this schema field. - - Returns: - Dict: A dictionary representing the SchemaField in a serialized form. - """ - # Put together the basic representation. See http://bit.ly/2hOAT5u. - answer = { - "mode": self.mode.upper(), - "name": self.name, - "type": self.field_type.upper(), - "description": self.description, - } - - # If this is a RECORD type, then sub-fields are also included, - # add this to the serialized representation. - if self.field_type.upper() in _STRUCT_TYPES: - answer["fields"] = [f.to_api_repr() for f in self.fields] - - # Done; return the serialized dictionary. - return answer - - def _key(self): - """A tuple key that uniquely describes this field. - - Used to compute this instance's hashcode and evaluate equality. - - Returns: - Tuple: The contents of this :class:`~google.cloud.bigquery.schema.SchemaField`. - """ - return ( - self._name, - self._field_type.upper(), - self._mode.upper(), - self._description, - self._fields, - ) - - def to_standard_sql(self): - """Return the field as the standard SQL field representation object. - - Returns: - An instance of :class:`~google.cloud.bigquery_v2.types.StandardSqlField`. - """ - sql_type = types.StandardSqlDataType() - - if self.mode == "REPEATED": - sql_type.type_kind = types.StandardSqlDataType.ARRAY - else: - sql_type.type_kind = LEGACY_TO_STANDARD_TYPES.get( - self.field_type, types.StandardSqlDataType.TYPE_KIND_UNSPECIFIED - ) - - if sql_type.type_kind == types.StandardSqlDataType.ARRAY: # noqa: E721 - array_element_type = LEGACY_TO_STANDARD_TYPES.get( - self.field_type, types.StandardSqlDataType.TYPE_KIND_UNSPECIFIED - ) - sql_type.array_element_type.type_kind = array_element_type - - # ARRAY cannot directly contain other arrays, only scalar types and STRUCTs - # https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#array-type - if array_element_type == types.StandardSqlDataType.STRUCT: # noqa: E721 - sql_type.array_element_type.struct_type.fields.extend( - field.to_standard_sql() for field in self.fields - ) - - elif sql_type.type_kind == types.StandardSqlDataType.STRUCT: # noqa: E721 - sql_type.struct_type.fields.extend( - field.to_standard_sql() for field in self.fields - ) - - return types.StandardSqlField(name=self.name, type=sql_type) - - def __eq__(self, other): - if not isinstance(other, SchemaField): - return NotImplemented - return self._key() == other._key() - - def __ne__(self, other): - return not self == other - - def __hash__(self): - return hash(self._key()) - - def __repr__(self): - return "SchemaField{}".format(self._key()) - - -def _parse_schema_resource(info): - """Parse a resource fragment into a schema field. - - Args: - info: (Mapping[str, Dict]): should contain a "fields" key to be parsed - - Returns: - Optional[Sequence[google.cloud.bigquery.schema.SchemaField`]: - A list of parsed fields, or ``None`` if no "fields" key found. - """ - if "fields" not in info: - return () - - schema = [] - for r_field in info["fields"]: - name = r_field["name"] - field_type = r_field["type"] - mode = r_field.get("mode", "NULLABLE") - description = r_field.get("description") - sub_fields = _parse_schema_resource(r_field) - schema.append(SchemaField(name, field_type, mode, description, sub_fields)) - return schema - - -def _build_schema_resource(fields): - """Generate a resource fragment for a schema. - - Args: - fields (Sequence[google.cloud.bigquery.schema.SchemaField): schema to be dumped. - - Returns: - Sequence[Dict]: Mappings describing the schema of the supplied fields. - """ - return [field.to_api_repr() for field in fields] - - -def _to_schema_fields(schema): - """Coerce `schema` to a list of schema field instances. - - Args: - schema(Sequence[Union[ \ - :class:`~google.cloud.bigquery.schema.SchemaField`, \ - Mapping[str, Any] \ - ]]): - Table schema to convert. If some items are passed as mappings, - their content must be compatible with - :meth:`~google.cloud.bigquery.schema.SchemaField.from_api_repr`. - - Returns: - Sequence[:class:`~google.cloud.bigquery.schema.SchemaField`] - - Raises: - Exception: If ``schema`` is not a sequence, or if any item in the - sequence is not a :class:`~google.cloud.bigquery.schema.SchemaField` - instance or a compatible mapping representation of the field. - """ - for field in schema: - if not isinstance(field, (SchemaField, collections_abc.Mapping)): - raise ValueError( - "Schema items must either be fields or compatible " - "mapping representations." - ) - - return [ - field if isinstance(field, SchemaField) else SchemaField.from_api_repr(field) - for field in schema - ] diff --git a/bigquery/google/cloud/bigquery/table.py b/bigquery/google/cloud/bigquery/table.py deleted file mode 100644 index 555f529f3670..000000000000 --- a/bigquery/google/cloud/bigquery/table.py +++ /dev/null @@ -1,2221 +0,0 @@ -# Copyright 2015 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Define API Tables.""" - -from __future__ import absolute_import - -import copy -import datetime -import functools -import logging -import operator -import warnings - -import six - -try: - from google.cloud import bigquery_storage_v1beta1 -except ImportError: # pragma: NO COVER - bigquery_storage_v1beta1 = None - -try: - import pandas -except ImportError: # pragma: NO COVER - pandas = None - -try: - import pyarrow -except ImportError: # pragma: NO COVER - pyarrow = None - -try: - import tqdm -except ImportError: # pragma: NO COVER - tqdm = None - -import google.api_core.exceptions -from google.api_core.page_iterator import HTTPIterator - -import google.cloud._helpers -from google.cloud.bigquery import _helpers -from google.cloud.bigquery import _pandas_helpers -from google.cloud.bigquery.schema import _build_schema_resource -from google.cloud.bigquery.schema import _parse_schema_resource -from google.cloud.bigquery.schema import _to_schema_fields -from google.cloud.bigquery.external_config import ExternalConfig -from google.cloud.bigquery.encryption_configuration import EncryptionConfiguration - - -_LOGGER = logging.getLogger(__name__) - -_NO_BQSTORAGE_ERROR = ( - "The google-cloud-bigquery-storage library is not installed, " - "please install google-cloud-bigquery-storage to use bqstorage features." -) -_NO_PANDAS_ERROR = ( - "The pandas library is not installed, please install " - "pandas to use the to_dataframe() function." -) -_NO_PYARROW_ERROR = ( - "The pyarrow library is not installed, please install " - "pyarrow to use the to_arrow() function." -) -_NO_TQDM_ERROR = ( - "A progress bar was requested, but there was an error loading the tqdm " - "library. Please install tqdm to use the progress bar functionality." -) -_TABLE_HAS_NO_SCHEMA = 'Table has no schema: call "client.get_table()"' - - -def _reference_getter(table): - """A :class:`~google.cloud.bigquery.table.TableReference` pointing to - this table. - - Returns: - google.cloud.bigquery.table.TableReference: pointer to this table. - """ - from google.cloud.bigquery import dataset - - dataset_ref = dataset.DatasetReference(table.project, table.dataset_id) - return TableReference(dataset_ref, table.table_id) - - -def _view_use_legacy_sql_getter(table): - """bool: Specifies whether to execute the view with Legacy or Standard SQL. - - This boolean specifies whether to execute the view with Legacy SQL - (:data:`True`) or Standard SQL (:data:`False`). The client side default is - :data:`False`. The server-side default is :data:`True`. If this table is - not a view, :data:`None` is returned. - - Raises: - ValueError: For invalid value types. - """ - view = table._properties.get("view") - if view is not None: - # The server-side default for useLegacySql is True. - return view.get("useLegacySql", True) - # In some cases, such as in a table list no view object is present, but the - # resource still represents a view. Use the type as a fallback. - if table.table_type == "VIEW": - # The server-side default for useLegacySql is True. - return True - - -class TableReference(object): - """TableReferences are pointers to tables. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#tablereference - - Args: - dataset_ref (google.cloud.bigquery.dataset.DatasetReference): - A pointer to the dataset - table_id (str): The ID of the table - """ - - def __init__(self, dataset_ref, table_id): - self._project = dataset_ref.project - self._dataset_id = dataset_ref.dataset_id - self._table_id = table_id - - @property - def project(self): - """str: Project bound to the table""" - return self._project - - @property - def dataset_id(self): - """str: ID of dataset containing the table.""" - return self._dataset_id - - @property - def table_id(self): - """str: The table ID.""" - return self._table_id - - @property - def path(self): - """str: URL path for the table's APIs.""" - return "/projects/%s/datasets/%s/tables/%s" % ( - self._project, - self._dataset_id, - self._table_id, - ) - - @classmethod - def from_string(cls, table_id, default_project=None): - """Construct a table reference from table ID string. - - Args: - table_id (str): - A table ID in standard SQL format. If ``default_project`` - is not specified, this must included a project ID, dataset - ID, and table ID, each separated by ``.``. - default_project (str): - Optional. The project ID to use when ``table_id`` does not - include a project ID. - - Returns: - TableReference: Table reference parsed from ``table_id``. - - Examples: - >>> TableReference.from_string('my-project.mydataset.mytable') - TableRef...(DatasetRef...('my-project', 'mydataset'), 'mytable') - - Raises: - ValueError: - If ``table_id`` is not a fully-qualified table ID in - standard SQL format. - """ - from google.cloud.bigquery.dataset import DatasetReference - - ( - output_project_id, - output_dataset_id, - output_table_id, - ) = _helpers._parse_3_part_id( - table_id, default_project=default_project, property_name="table_id" - ) - - return cls( - DatasetReference(output_project_id, output_dataset_id), output_table_id - ) - - @classmethod - def from_api_repr(cls, resource): - """Factory: construct a table reference given its API representation - - Args: - resource (Dict[str, object]): - Table reference representation returned from the API - - Returns: - google.cloud.bigquery.table.TableReference: - Table reference parsed from ``resource``. - """ - from google.cloud.bigquery.dataset import DatasetReference - - project = resource["projectId"] - dataset_id = resource["datasetId"] - table_id = resource["tableId"] - return cls(DatasetReference(project, dataset_id), table_id) - - def to_api_repr(self): - """Construct the API resource representation of this table reference. - - Returns: - Dict[str, object]: Table reference represented as an API resource - """ - return { - "projectId": self._project, - "datasetId": self._dataset_id, - "tableId": self._table_id, - } - - def to_bqstorage(self): - """Construct a BigQuery Storage API representation of this table. - - Install the ``google-cloud-bigquery-storage`` package to use this - feature. - - If the ``table_id`` contains a partition identifier (e.g. - ``my_table$201812``) or a snapshot identifier (e.g. - ``mytable@1234567890``), it is ignored. Use - :class:`google.cloud.bigquery_storage_v1beta1.types.TableReadOptions` - to filter rows by partition. Use - :class:`google.cloud.bigquery_storage_v1beta1.types.TableModifiers` - to select a specific snapshot to read from. - - Returns: - google.cloud.bigquery_storage_v1beta1.types.TableReference: - A reference to this table in the BigQuery Storage API. - - Raises: - ValueError: - If the :mod:`google.cloud.bigquery_storage_v1beta1` module - cannot be imported. - """ - if bigquery_storage_v1beta1 is None: - raise ValueError(_NO_BQSTORAGE_ERROR) - - table_ref = bigquery_storage_v1beta1.types.TableReference() - table_ref.project_id = self._project - table_ref.dataset_id = self._dataset_id - table_id = self._table_id - - if "@" in table_id: - table_id = table_id.split("@")[0] - - if "$" in table_id: - table_id = table_id.split("$")[0] - - table_ref.table_id = table_id - - return table_ref - - def _key(self): - """A tuple key that uniquely describes this field. - - Used to compute this instance's hashcode and evaluate equality. - - Returns: - Tuple[str]: The contents of this :class:`DatasetReference`. - """ - return (self._project, self._dataset_id, self._table_id) - - def __eq__(self, other): - if not isinstance(other, TableReference): - return NotImplemented - return self._key() == other._key() - - def __ne__(self, other): - return not self == other - - def __hash__(self): - return hash(self._key()) - - def __repr__(self): - from google.cloud.bigquery.dataset import DatasetReference - - dataset_ref = DatasetReference(self._project, self._dataset_id) - return "TableReference({}, '{}')".format(repr(dataset_ref), self._table_id) - - -class Table(object): - """Tables represent a set of rows whose values correspond to a schema. - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#resource-table - - Args: - table_ref (Union[google.cloud.bigquery.table.TableReference, str]): - A pointer to a table. If ``table_ref`` is a string, it must - included a project ID, dataset ID, and table ID, each separated - by ``.``. - schema (Optional[Sequence[Union[ \ - :class:`~google.cloud.bigquery.schema.SchemaField`, \ - Mapping[str, Any] \ - ]]]): - The table's schema. If any item is a mapping, its content must be - compatible with - :meth:`~google.cloud.bigquery.schema.SchemaField.from_api_repr`. - """ - - _PROPERTY_TO_API_FIELD = { - "friendly_name": "friendlyName", - "expires": "expirationTime", - "time_partitioning": "timePartitioning", - "partitioning_type": "timePartitioning", - "partition_expiration": "timePartitioning", - "view_use_legacy_sql": "view", - "view_query": "view", - "external_data_configuration": "externalDataConfiguration", - "encryption_configuration": "encryptionConfiguration", - "require_partition_filter": "requirePartitionFilter", - } - - def __init__(self, table_ref, schema=None): - table_ref = _table_arg_to_table_ref(table_ref) - self._properties = {"tableReference": table_ref.to_api_repr(), "labels": {}} - # Let the @property do validation. - if schema is not None: - self.schema = schema - - @property - def project(self): - """str: Project bound to the table.""" - return self._properties["tableReference"]["projectId"] - - @property - def dataset_id(self): - """str: ID of dataset containing the table.""" - return self._properties["tableReference"]["datasetId"] - - @property - def table_id(self): - """str: ID of the table.""" - return self._properties["tableReference"]["tableId"] - - reference = property(_reference_getter) - - @property - def path(self): - """str: URL path for the table's APIs.""" - return "/projects/%s/datasets/%s/tables/%s" % ( - self.project, - self.dataset_id, - self.table_id, - ) - - @property - def require_partition_filter(self): - """bool: If set to true, queries over the partitioned table require a - partition filter that can be used for partition elimination to be - specified. - """ - return self._properties.get("requirePartitionFilter") - - @require_partition_filter.setter - def require_partition_filter(self, value): - self._properties["requirePartitionFilter"] = value - - @property - def schema(self): - """Sequence[Union[ \ - :class:`~google.cloud.bigquery.schema.SchemaField`, \ - Mapping[str, Any] \ - ]]: - Table's schema. - - Raises: - Exception: - If ``schema`` is not a sequence, or if any item in the sequence - is not a :class:`~google.cloud.bigquery.schema.SchemaField` - instance or a compatible mapping representation of the field. - """ - prop = self._properties.get("schema") - if not prop: - return [] - else: - return _parse_schema_resource(prop) - - @schema.setter - def schema(self, value): - if value is None: - self._properties["schema"] = None - else: - value = _to_schema_fields(value) - self._properties["schema"] = {"fields": _build_schema_resource(value)} - - @property - def labels(self): - """Dict[str, str]: Labels for the table. - - This method always returns a dict. To change a table's labels, - modify the dict, then call ``Client.update_table``. To delete a - label, set its value to :data:`None` before updating. - - Raises: - ValueError: If ``value`` type is invalid. - """ - return self._properties.setdefault("labels", {}) - - @labels.setter - def labels(self, value): - if not isinstance(value, dict): - raise ValueError("Pass a dict") - self._properties["labels"] = value - - @property - def encryption_configuration(self): - """google.cloud.bigquery.encryption_configuration.EncryptionConfiguration: Custom - encryption configuration for the table. - - Custom encryption configuration (e.g., Cloud KMS keys) or :data:`None` - if using default encryption. - - See `protecting data with Cloud KMS keys - `_ - in the BigQuery documentation. - """ - prop = self._properties.get("encryptionConfiguration") - if prop is not None: - prop = EncryptionConfiguration.from_api_repr(prop) - return prop - - @encryption_configuration.setter - def encryption_configuration(self, value): - api_repr = value - if value is not None: - api_repr = value.to_api_repr() - self._properties["encryptionConfiguration"] = api_repr - - @property - def created(self): - """Union[datetime.datetime, None]: Datetime at which the table was - created (:data:`None` until set from the server). - """ - creation_time = self._properties.get("creationTime") - if creation_time is not None: - # creation_time will be in milliseconds. - return google.cloud._helpers._datetime_from_microseconds( - 1000.0 * float(creation_time) - ) - - @property - def etag(self): - """Union[str, None]: ETag for the table resource (:data:`None` until - set from the server). - """ - return self._properties.get("etag") - - @property - def modified(self): - """Union[datetime.datetime, None]: Datetime at which the table was last - modified (:data:`None` until set from the server). - """ - modified_time = self._properties.get("lastModifiedTime") - if modified_time is not None: - # modified_time will be in milliseconds. - return google.cloud._helpers._datetime_from_microseconds( - 1000.0 * float(modified_time) - ) - - @property - def num_bytes(self): - """Union[int, None]: The size of the table in bytes (:data:`None` until - set from the server). - """ - return _helpers._int_or_none(self._properties.get("numBytes")) - - @property - def num_rows(self): - """Union[int, None]: The number of rows in the table (:data:`None` - until set from the server). - """ - return _helpers._int_or_none(self._properties.get("numRows")) - - @property - def self_link(self): - """Union[str, None]: URL for the table resource (:data:`None` until set - from the server). - """ - return self._properties.get("selfLink") - - @property - def full_table_id(self): - """Union[str, None]: ID for the table (:data:`None` until set from the - server). - - In the format ``project_id:dataset_id.table_id``. - """ - return self._properties.get("id") - - @property - def table_type(self): - """Union[str, None]: The type of the table (:data:`None` until set from - the server). - - Possible values are ``'TABLE'``, ``'VIEW'``, or ``'EXTERNAL'``. - """ - return self._properties.get("type") - - @property - def range_partitioning(self): - """Optional[google.cloud.bigquery.table.RangePartitioning]: - Configures range-based partitioning for a table. - - .. note:: - **Beta**. The integer range partitioning feature is in a - pre-release state and might change or have limited support. - - Only specify at most one of - :attr:`~google.cloud.bigquery.table.Table.time_partitioning` or - :attr:`~google.cloud.bigquery.table.Table.range_partitioning`. - - Raises: - ValueError: - If the value is not - :class:`~google.cloud.bigquery.table.RangePartitioning` or - :data:`None`. - """ - resource = self._properties.get("rangePartitioning") - if resource is not None: - return RangePartitioning(_properties=resource) - - @range_partitioning.setter - def range_partitioning(self, value): - resource = value - if isinstance(value, RangePartitioning): - resource = value._properties - elif value is not None: - raise ValueError( - "Expected value to be RangePartitioning or None, got {}.".format(value) - ) - self._properties["rangePartitioning"] = resource - - @property - def time_partitioning(self): - """Optional[google.cloud.bigquery.table.TimePartitioning]: Configures time-based - partitioning for a table. - - Only specify at most one of - :attr:`~google.cloud.bigquery.table.Table.time_partitioning` or - :attr:`~google.cloud.bigquery.table.Table.range_partitioning`. - - Raises: - ValueError: - If the value is not - :class:`~google.cloud.bigquery.table.TimePartitioning` or - :data:`None`. - """ - prop = self._properties.get("timePartitioning") - if prop is not None: - return TimePartitioning.from_api_repr(prop) - - @time_partitioning.setter - def time_partitioning(self, value): - api_repr = value - if isinstance(value, TimePartitioning): - api_repr = value.to_api_repr() - elif value is not None: - raise ValueError( - "value must be google.cloud.bigquery.table.TimePartitioning " "or None" - ) - self._properties["timePartitioning"] = api_repr - - @property - def partitioning_type(self): - """Union[str, None]: Time partitioning of the table if it is - partitioned (Defaults to :data:`None`). - - The only partitioning type that is currently supported is - :attr:`~google.cloud.bigquery.table.TimePartitioningType.DAY`. - """ - warnings.warn( - "This method will be deprecated in future versions. Please use " - "Table.time_partitioning.type_ instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - if self.time_partitioning is not None: - return self.time_partitioning.type_ - - @partitioning_type.setter - def partitioning_type(self, value): - warnings.warn( - "This method will be deprecated in future versions. Please use " - "Table.time_partitioning.type_ instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - if self.time_partitioning is None: - self._properties["timePartitioning"] = {} - self._properties["timePartitioning"]["type"] = value - - @property - def partition_expiration(self): - """Union[int, None]: Expiration time in milliseconds for a partition. - - If :attr:`partition_expiration` is set and :attr:`type_` is - not set, :attr:`type_` will default to - :attr:`~google.cloud.bigquery.table.TimePartitioningType.DAY`. - """ - warnings.warn( - "This method will be deprecated in future versions. Please use " - "Table.time_partitioning.expiration_ms instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - if self.time_partitioning is not None: - return self.time_partitioning.expiration_ms - - @partition_expiration.setter - def partition_expiration(self, value): - warnings.warn( - "This method will be deprecated in future versions. Please use " - "Table.time_partitioning.expiration_ms instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - if self.time_partitioning is None: - self._properties["timePartitioning"] = {"type": TimePartitioningType.DAY} - self._properties["timePartitioning"]["expirationMs"] = str(value) - - @property - def clustering_fields(self): - """Union[List[str], None]: Fields defining clustering for the table - - (Defaults to :data:`None`). - - Clustering fields are immutable after table creation. - - .. note:: - - As of 2018-06-29, clustering fields cannot be set on a table - which does not also have time partioning defined. - """ - prop = self._properties.get("clustering") - if prop is not None: - return list(prop.get("fields", ())) - - @clustering_fields.setter - def clustering_fields(self, value): - """Union[List[str], None]: Fields defining clustering for the table - - (Defaults to :data:`None`). - """ - if value is not None: - prop = self._properties.setdefault("clustering", {}) - prop["fields"] = value - else: - if "clustering" in self._properties: - del self._properties["clustering"] - - @property - def description(self): - """Union[str, None]: Description of the table (defaults to - :data:`None`). - - Raises: - ValueError: For invalid value types. - """ - return self._properties.get("description") - - @description.setter - def description(self, value): - if not isinstance(value, six.string_types) and value is not None: - raise ValueError("Pass a string, or None") - self._properties["description"] = value - - @property - def expires(self): - """Union[datetime.datetime, None]: Datetime at which the table will be - deleted. - - Raises: - ValueError: For invalid value types. - """ - expiration_time = self._properties.get("expirationTime") - if expiration_time is not None: - # expiration_time will be in milliseconds. - return google.cloud._helpers._datetime_from_microseconds( - 1000.0 * float(expiration_time) - ) - - @expires.setter - def expires(self, value): - if not isinstance(value, datetime.datetime) and value is not None: - raise ValueError("Pass a datetime, or None") - value_ms = google.cloud._helpers._millis_from_datetime(value) - self._properties["expirationTime"] = _helpers._str_or_none(value_ms) - - @property - def friendly_name(self): - """Union[str, None]: Title of the table (defaults to :data:`None`). - - Raises: - ValueError: For invalid value types. - """ - return self._properties.get("friendlyName") - - @friendly_name.setter - def friendly_name(self, value): - if not isinstance(value, six.string_types) and value is not None: - raise ValueError("Pass a string, or None") - self._properties["friendlyName"] = value - - @property - def location(self): - """Union[str, None]: Location in which the table is hosted - - Defaults to :data:`None`. - """ - return self._properties.get("location") - - @property - def view_query(self): - """Union[str, None]: SQL query defining the table as a view (defaults - to :data:`None`). - - By default, the query is treated as Standard SQL. To use Legacy - SQL, set :attr:`view_use_legacy_sql` to :data:`True`. - - Raises: - ValueError: For invalid value types. - """ - view = self._properties.get("view") - if view is not None: - return view.get("query") - - @view_query.setter - def view_query(self, value): - if not isinstance(value, six.string_types): - raise ValueError("Pass a string") - view = self._properties.get("view") - if view is None: - view = self._properties["view"] = {} - view["query"] = value - # The service defaults useLegacySql to True, but this - # client uses Standard SQL by default. - if view.get("useLegacySql") is None: - view["useLegacySql"] = False - - @view_query.deleter - def view_query(self): - """Delete SQL query defining the table as a view.""" - self._properties.pop("view", None) - - view_use_legacy_sql = property(_view_use_legacy_sql_getter) - - @view_use_legacy_sql.setter - def view_use_legacy_sql(self, value): - if not isinstance(value, bool): - raise ValueError("Pass a boolean") - if self._properties.get("view") is None: - self._properties["view"] = {} - self._properties["view"]["useLegacySql"] = value - - @property - def streaming_buffer(self): - """google.cloud.bigquery.StreamingBuffer: Information about a table's - streaming buffer. - """ - sb = self._properties.get("streamingBuffer") - if sb is not None: - return StreamingBuffer(sb) - - @property - def external_data_configuration(self): - """Union[google.cloud.bigquery.ExternalConfig, None]: Configuration for - an external data source (defaults to :data:`None`). - - Raises: - ValueError: For invalid value types. - """ - prop = self._properties.get("externalDataConfiguration") - if prop is not None: - prop = ExternalConfig.from_api_repr(prop) - return prop - - @external_data_configuration.setter - def external_data_configuration(self, value): - if not (value is None or isinstance(value, ExternalConfig)): - raise ValueError("Pass an ExternalConfig or None") - api_repr = value - if value is not None: - api_repr = value.to_api_repr() - self._properties["externalDataConfiguration"] = api_repr - - @classmethod - def from_string(cls, full_table_id): - """Construct a table from fully-qualified table ID. - - Args: - full_table_id (str): - A fully-qualified table ID in standard SQL format. Must - included a project ID, dataset ID, and table ID, each - separated by ``.``. - - Returns: - Table: Table parsed from ``full_table_id``. - - Examples: - >>> Table.from_string('my-project.mydataset.mytable') - Table(TableRef...(D...('my-project', 'mydataset'), 'mytable')) - - Raises: - ValueError: - If ``full_table_id`` is not a fully-qualified table ID in - standard SQL format. - """ - return cls(TableReference.from_string(full_table_id)) - - @classmethod - def from_api_repr(cls, resource): - """Factory: construct a table given its API representation - - Args: - resource (Dict[str, object]): - Table resource representation from the API - - Returns: - google.cloud.bigquery.table.Table: Table parsed from ``resource``. - - Raises: - KeyError: - If the ``resource`` lacks the key ``'tableReference'``, or if - the ``dict`` stored within the key ``'tableReference'`` lacks - the keys ``'tableId'``, ``'projectId'``, or ``'datasetId'``. - """ - from google.cloud.bigquery import dataset - - if ( - "tableReference" not in resource - or "tableId" not in resource["tableReference"] - ): - raise KeyError( - "Resource lacks required identity information:" - '["tableReference"]["tableId"]' - ) - project_id = resource["tableReference"]["projectId"] - table_id = resource["tableReference"]["tableId"] - dataset_id = resource["tableReference"]["datasetId"] - dataset_ref = dataset.DatasetReference(project_id, dataset_id) - - table = cls(dataset_ref.table(table_id)) - table._properties = resource - - return table - - def to_api_repr(self): - """Constructs the API resource of this table - - Returns: - Dict[str, object]: Table represented as an API resource - """ - return copy.deepcopy(self._properties) - - def to_bqstorage(self): - """Construct a BigQuery Storage API representation of this table. - - Returns: - google.cloud.bigquery_storage_v1beta1.types.TableReference: - A reference to this table in the BigQuery Storage API. - """ - return self.reference.to_bqstorage() - - def _build_resource(self, filter_fields): - """Generate a resource for ``update``.""" - return _helpers._build_resource_from_properties(self, filter_fields) - - def __repr__(self): - return "Table({})".format(repr(self.reference)) - - -class TableListItem(object): - """A read-only table resource from a list operation. - - For performance reasons, the BigQuery API only includes some of the table - properties when listing tables. Notably, - :attr:`~google.cloud.bigquery.table.Table.schema` and - :attr:`~google.cloud.bigquery.table.Table.num_rows` are missing. - - For a full list of the properties that the BigQuery API returns, see the - `REST documentation for tables.list - `_. - - - Args: - resource (Dict[str, object]): - A table-like resource object from a table list response. A - ``tableReference`` property is required. - - Raises: - ValueError: - If ``tableReference`` or one of its required members is missing - from ``resource``. - """ - - def __init__(self, resource): - if "tableReference" not in resource: - raise ValueError("resource must contain a tableReference value") - if "projectId" not in resource["tableReference"]: - raise ValueError( - "resource['tableReference'] must contain a projectId value" - ) - if "datasetId" not in resource["tableReference"]: - raise ValueError( - "resource['tableReference'] must contain a datasetId value" - ) - if "tableId" not in resource["tableReference"]: - raise ValueError("resource['tableReference'] must contain a tableId value") - - self._properties = resource - - @property - def created(self): - """Union[datetime.datetime, None]: Datetime at which the table was - created (:data:`None` until set from the server). - """ - creation_time = self._properties.get("creationTime") - if creation_time is not None: - # creation_time will be in milliseconds. - return google.cloud._helpers._datetime_from_microseconds( - 1000.0 * float(creation_time) - ) - - @property - def expires(self): - """Union[datetime.datetime, None]: Datetime at which the table will be - deleted. - """ - expiration_time = self._properties.get("expirationTime") - if expiration_time is not None: - # expiration_time will be in milliseconds. - return google.cloud._helpers._datetime_from_microseconds( - 1000.0 * float(expiration_time) - ) - - @property - def project(self): - """str: Project bound to the table.""" - return self._properties["tableReference"]["projectId"] - - @property - def dataset_id(self): - """str: ID of dataset containing the table.""" - return self._properties["tableReference"]["datasetId"] - - @property - def table_id(self): - """str: ID of the table.""" - return self._properties["tableReference"]["tableId"] - - reference = property(_reference_getter) - - @property - def labels(self): - """Dict[str, str]: Labels for the table. - - This method always returns a dict. To change a table's labels, - modify the dict, then call ``Client.update_table``. To delete a - label, set its value to :data:`None` before updating. - """ - return self._properties.setdefault("labels", {}) - - @property - def full_table_id(self): - """Union[str, None]: ID for the table (:data:`None` until set from the - server). - - In the format ``project_id:dataset_id.table_id``. - """ - return self._properties.get("id") - - @property - def table_type(self): - """Union[str, None]: The type of the table (:data:`None` until set from - the server). - - Possible values are ``'TABLE'``, ``'VIEW'``, or ``'EXTERNAL'``. - """ - return self._properties.get("type") - - @property - def time_partitioning(self): - """google.cloud.bigquery.table.TimePartitioning: Configures time-based - partitioning for a table. - """ - prop = self._properties.get("timePartitioning") - if prop is not None: - return TimePartitioning.from_api_repr(prop) - - @property - def partitioning_type(self): - """Union[str, None]: Time partitioning of the table if it is - partitioned (Defaults to :data:`None`). - """ - warnings.warn( - "This method will be deprecated in future versions. Please use " - "TableListItem.time_partitioning.type_ instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - if self.time_partitioning is not None: - return self.time_partitioning.type_ - - @property - def partition_expiration(self): - """Union[int, None]: Expiration time in milliseconds for a partition. - - If this property is set and :attr:`type_` is not set, :attr:`type_` - will default to :attr:`TimePartitioningType.DAY`. - """ - warnings.warn( - "This method will be deprecated in future versions. Please use " - "TableListItem.time_partitioning.expiration_ms instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - if self.time_partitioning is not None: - return self.time_partitioning.expiration_ms - - @property - def friendly_name(self): - """Union[str, None]: Title of the table (defaults to :data:`None`).""" - return self._properties.get("friendlyName") - - view_use_legacy_sql = property(_view_use_legacy_sql_getter) - - @property - def clustering_fields(self): - """Union[List[str], None]: Fields defining clustering for the table - - (Defaults to :data:`None`). - - Clustering fields are immutable after table creation. - - .. note:: - - As of 2018-06-29, clustering fields cannot be set on a table - which does not also have time partioning defined. - """ - prop = self._properties.get("clustering") - if prop is not None: - return list(prop.get("fields", ())) - - @classmethod - def from_string(cls, full_table_id): - """Construct a table from fully-qualified table ID. - - Args: - full_table_id (str): - A fully-qualified table ID in standard SQL format. Must - included a project ID, dataset ID, and table ID, each - separated by ``.``. - - Returns: - Table: Table parsed from ``full_table_id``. - - Examples: - >>> Table.from_string('my-project.mydataset.mytable') - Table(TableRef...(D...('my-project', 'mydataset'), 'mytable')) - - Raises: - ValueError: - If ``full_table_id`` is not a fully-qualified table ID in - standard SQL format. - """ - return cls( - {"tableReference": TableReference.from_string(full_table_id).to_api_repr()} - ) - - def to_bqstorage(self): - """Construct a BigQuery Storage API representation of this table. - - Returns: - google.cloud.bigquery_storage_v1beta1.types.TableReference: - A reference to this table in the BigQuery Storage API. - """ - return self.reference.to_bqstorage() - - -def _row_from_mapping(mapping, schema): - """Convert a mapping to a row tuple using the schema. - - Args: - mapping (Dict[str, object]) - Mapping of row data: must contain keys for all required fields in - the schema. Keys which do not correspond to a field in the schema - are ignored. - schema (List[google.cloud.bigquery.schema.SchemaField]): - The schema of the table destination for the rows - - Returns: - Tuple[object]: - Tuple whose elements are ordered according to the schema. - - Raises: - ValueError: If schema is empty. - """ - if len(schema) == 0: - raise ValueError(_TABLE_HAS_NO_SCHEMA) - - row = [] - for field in schema: - if field.mode == "REQUIRED": - row.append(mapping[field.name]) - elif field.mode == "REPEATED": - row.append(mapping.get(field.name, ())) - elif field.mode == "NULLABLE": - row.append(mapping.get(field.name)) - else: - raise ValueError("Unknown field mode: {}".format(field.mode)) - return tuple(row) - - -class StreamingBuffer(object): - """Information about a table's streaming buffer. - - See https://cloud.google.com/bigquery/streaming-data-into-bigquery. - - Args: - resource (Dict[str, object]): - streaming buffer representation returned from the API - """ - - def __init__(self, resource): - self.estimated_bytes = int(resource["estimatedBytes"]) - self.estimated_rows = int(resource["estimatedRows"]) - # time is in milliseconds since the epoch. - self.oldest_entry_time = google.cloud._helpers._datetime_from_microseconds( - 1000.0 * int(resource["oldestEntryTime"]) - ) - - -class Row(object): - """A BigQuery row. - - Values can be accessed by position (index), by key like a dict, - or as properties. - - Args: - values (Sequence[object]): The row values - field_to_index (Dict[str, int]): - A mapping from schema field names to indexes - """ - - # Choose unusual field names to try to avoid conflict with schema fields. - __slots__ = ("_xxx_values", "_xxx_field_to_index") - - def __init__(self, values, field_to_index): - self._xxx_values = values - self._xxx_field_to_index = field_to_index - - def values(self): - """Return the values included in this row. - - Returns: - Sequence[object]: A sequence of length ``len(row)``. - """ - return copy.deepcopy(self._xxx_values) - - def keys(self): - """Return the keys for using a row as a dict. - - Returns: - Iterable[str]: The keys corresponding to the columns of a row - - Examples: - - >>> list(Row(('a', 'b'), {'x': 0, 'y': 1}).keys()) - ['x', 'y'] - """ - return six.iterkeys(self._xxx_field_to_index) - - def items(self): - """Return items as ``(key, value)`` pairs. - - Returns: - Iterable[Tuple[str, object]]: - The ``(key, value)`` pairs representing this row. - - Examples: - - >>> list(Row(('a', 'b'), {'x': 0, 'y': 1}).items()) - [('x', 'a'), ('y', 'b')] - """ - for key, index in six.iteritems(self._xxx_field_to_index): - yield (key, copy.deepcopy(self._xxx_values[index])) - - def get(self, key, default=None): - """Return a value for key, with a default value if it does not exist. - - Args: - key (str): The key of the column to access - default (object): - The default value to use if the key does not exist. (Defaults - to :data:`None`.) - - Returns: - object: - The value associated with the provided key, or a default value. - - Examples: - When the key exists, the value associated with it is returned. - - >>> Row(('a', 'b'), {'x': 0, 'y': 1}).get('x') - 'a' - - The default value is :data:`None` when the key does not exist. - - >>> Row(('a', 'b'), {'x': 0, 'y': 1}).get('z') - None - - The default value can be overrided with the ``default`` parameter. - - >>> Row(('a', 'b'), {'x': 0, 'y': 1}).get('z', '') - '' - - >>> Row(('a', 'b'), {'x': 0, 'y': 1}).get('z', default = '') - '' - """ - index = self._xxx_field_to_index.get(key) - if index is None: - return default - return self._xxx_values[index] - - def __getattr__(self, name): - value = self._xxx_field_to_index.get(name) - if value is None: - raise AttributeError("no row field {!r}".format(name)) - return self._xxx_values[value] - - def __len__(self): - return len(self._xxx_values) - - def __getitem__(self, key): - if isinstance(key, six.string_types): - value = self._xxx_field_to_index.get(key) - if value is None: - raise KeyError("no row field {!r}".format(key)) - key = value - return self._xxx_values[key] - - def __eq__(self, other): - if not isinstance(other, Row): - return NotImplemented - return ( - self._xxx_values == other._xxx_values - and self._xxx_field_to_index == other._xxx_field_to_index - ) - - def __ne__(self, other): - return not self == other - - def __repr__(self): - # sort field dict by value, for determinism - items = sorted(self._xxx_field_to_index.items(), key=operator.itemgetter(1)) - f2i = "{" + ", ".join("%r: %d" % item for item in items) + "}" - return "Row({}, {})".format(self._xxx_values, f2i) - - -class _NoopProgressBarQueue(object): - """A fake Queue class that does nothing. - - This is used when there is no progress bar to send updates to. - """ - - def put_nowait(self, item): - """Don't actually do anything with the item.""" - - -class RowIterator(HTTPIterator): - """A class for iterating through HTTP/JSON API row list responses. - - Args: - client (google.cloud.bigquery.Client): The API client. - api_request (Callable[google.cloud._http.JSONConnection.api_request]): - The function to use to make API requests. - path (str): The method path to query for the list of items. - schema (Sequence[Union[ \ - :class:`~google.cloud.bigquery.schema.SchemaField`, \ - Mapping[str, Any] \ - ]]): - The table's schema. If any item is a mapping, its content must be - compatible with - :meth:`~google.cloud.bigquery.schema.SchemaField.from_api_repr`. - page_token (str): A token identifying a page in a result set to start - fetching results from. - max_results (int, optional): The maximum number of results to fetch. - page_size (int, optional): The maximum number of rows in each page - of results from this request. Non-positive values are ignored. - Defaults to a sensible value set by the API. - extra_params (Dict[str, object]): - Extra query string parameters for the API call. - table (Union[ \ - google.cloud.bigquery.table.Table, \ - google.cloud.bigquery.table.TableReference, \ - ]): - Optional. The table which these rows belong to, or a reference to - it. Used to call the BigQuery Storage API to fetch rows. - selected_fields (Sequence[google.cloud.bigquery.schema.SchemaField]): - Optional. A subset of columns to select from this table. - - """ - - def __init__( - self, - client, - api_request, - path, - schema, - page_token=None, - max_results=None, - page_size=None, - extra_params=None, - table=None, - selected_fields=None, - ): - super(RowIterator, self).__init__( - client, - api_request, - path, - item_to_value=_item_to_row, - items_key="rows", - page_token=page_token, - max_results=max_results, - extra_params=extra_params, - page_start=_rows_page_start, - next_token="pageToken", - ) - schema = _to_schema_fields(schema) - self._field_to_index = _helpers._field_to_index_mapping(schema) - self._page_size = page_size - self._preserve_order = False - self._project = client.project - self._schema = schema - self._selected_fields = selected_fields - self._table = table - self._total_rows = getattr(table, "num_rows", None) - - def _get_next_page_response(self): - """Requests the next page from the path provided. - - Returns: - Dict[str, object]: - The parsed JSON response of the next page's contents. - """ - params = self._get_query_params() - if self._page_size is not None: - params["maxResults"] = self._page_size - return self.api_request( - method=self._HTTP_METHOD, path=self.path, query_params=params - ) - - @property - def schema(self): - """List[google.cloud.bigquery.schema.SchemaField]: The subset of - columns to be read from the table.""" - return list(self._schema) - - @property - def total_rows(self): - """int: The total number of rows in the table.""" - return self._total_rows - - def _get_progress_bar(self, progress_bar_type): - """Construct a tqdm progress bar object, if tqdm is installed.""" - if tqdm is None: - if progress_bar_type is not None: - warnings.warn(_NO_TQDM_ERROR, UserWarning, stacklevel=3) - return None - - description = "Downloading" - unit = "rows" - - try: - if progress_bar_type == "tqdm": - return tqdm.tqdm(desc=description, total=self.total_rows, unit=unit) - elif progress_bar_type == "tqdm_notebook": - return tqdm.tqdm_notebook( - desc=description, total=self.total_rows, unit=unit - ) - elif progress_bar_type == "tqdm_gui": - return tqdm.tqdm_gui(desc=description, total=self.total_rows, unit=unit) - except (KeyError, TypeError): - # Protect ourselves from any tqdm errors. In case of - # unexpected tqdm behavior, just fall back to showing - # no progress bar. - warnings.warn(_NO_TQDM_ERROR, UserWarning, stacklevel=3) - return None - - def _to_page_iterable( - self, bqstorage_download, tabledata_list_download, bqstorage_client=None - ): - if bqstorage_client is not None: - try: - # Iterate over the stream so that read errors are raised (and - # the method can then fallback to tabledata.list). - for item in bqstorage_download(): - yield item - return - except google.api_core.exceptions.Forbidden: - # Don't hide errors such as insufficient permissions to create - # a read session, or the API is not enabled. Both of those are - # clearly problems if the developer has explicitly asked for - # BigQuery Storage API support. - raise - except google.api_core.exceptions.GoogleAPICallError: - # There is a known issue with reading from small anonymous - # query results tables, so some errors are expected. Rather - # than throw those errors, try reading the DataFrame again, but - # with the tabledata.list API. - pass - - _LOGGER.debug( - "Started reading table '{}.{}.{}' with tabledata.list.".format( - self._table.project, self._table.dataset_id, self._table.table_id - ) - ) - for item in tabledata_list_download(): - yield item - - def _to_arrow_iterable(self, bqstorage_client=None): - """Create an iterable of arrow RecordBatches, to process the table as a stream.""" - bqstorage_download = functools.partial( - _pandas_helpers.download_arrow_bqstorage, - self._project, - self._table, - bqstorage_client, - preserve_order=self._preserve_order, - selected_fields=self._selected_fields, - ) - tabledata_list_download = functools.partial( - _pandas_helpers.download_arrow_tabledata_list, iter(self.pages), self.schema - ) - return self._to_page_iterable( - bqstorage_download, - tabledata_list_download, - bqstorage_client=bqstorage_client, - ) - - # If changing the signature of this method, make sure to apply the same - # changes to job.QueryJob.to_arrow() - def to_arrow( - self, - progress_bar_type=None, - bqstorage_client=None, - create_bqstorage_client=False, - ): - """[Beta] Create a class:`pyarrow.Table` by loading all pages of a - table or query. - - Args: - progress_bar_type (Optional[str]): - If set, use the `tqdm `_ library to - display a progress bar while the data downloads. Install the - ``tqdm`` package to use this feature. - - Possible values of ``progress_bar_type`` include: - - ``None`` - No progress bar. - ``'tqdm'`` - Use the :func:`tqdm.tqdm` function to print a progress bar - to :data:`sys.stderr`. - ``'tqdm_notebook'`` - Use the :func:`tqdm.tqdm_notebook` function to display a - progress bar as a Jupyter notebook widget. - ``'tqdm_gui'`` - Use the :func:`tqdm.tqdm_gui` function to display a - progress bar as a graphical dialog box. - bqstorage_client (google.cloud.bigquery_storage_v1beta1.BigQueryStorageClient): - **Beta Feature** Optional. A BigQuery Storage API client. If - supplied, use the faster BigQuery Storage API to fetch rows - from BigQuery. This API is a billable API. - - This method requires the ``pyarrow`` and - ``google-cloud-bigquery-storage`` libraries. - - Reading from a specific partition or snapshot is not - currently supported by this method. - create_bqstorage_client (bool): - **Beta Feature** Optional. If ``True``, create a BigQuery - Storage API client using the default API settings. The - BigQuery Storage API is a faster way to fetch rows from - BigQuery. See the ``bqstorage_client`` parameter for more - information. - - This argument does nothing if ``bqstorage_client`` is supplied. - - ..versionadded:: 1.24.0 - - Returns: - pyarrow.Table - A :class:`pyarrow.Table` populated with row data and column - headers from the query results. The column headers are derived - from the destination table's schema. - - Raises: - ValueError: If the :mod:`pyarrow` library cannot be imported. - - ..versionadded:: 1.17.0 - """ - if pyarrow is None: - raise ValueError(_NO_PYARROW_ERROR) - - if ( - bqstorage_client or create_bqstorage_client - ) and self.max_results is not None: - warnings.warn( - "Cannot use bqstorage_client if max_results is set, " - "reverting to fetching data with the tabledata.list endpoint.", - stacklevel=2, - ) - create_bqstorage_client = False - bqstorage_client = None - - owns_bqstorage_client = False - if not bqstorage_client and create_bqstorage_client: - owns_bqstorage_client = True - bqstorage_client = self.client._create_bqstorage_client() - - try: - progress_bar = self._get_progress_bar(progress_bar_type) - - record_batches = [] - for record_batch in self._to_arrow_iterable( - bqstorage_client=bqstorage_client - ): - record_batches.append(record_batch) - - if progress_bar is not None: - # In some cases, the number of total rows is not populated - # until the first page of rows is fetched. Update the - # progress bar's total to keep an accurate count. - progress_bar.total = progress_bar.total or self.total_rows - progress_bar.update(record_batch.num_rows) - - if progress_bar is not None: - # Indicate that the download has finished. - progress_bar.close() - finally: - if owns_bqstorage_client: - bqstorage_client.transport.channel.close() - - if record_batches: - return pyarrow.Table.from_batches(record_batches) - else: - # No records, use schema based on BigQuery schema. - arrow_schema = _pandas_helpers.bq_to_arrow_schema(self._schema) - return pyarrow.Table.from_batches(record_batches, schema=arrow_schema) - - def to_dataframe_iterable(self, bqstorage_client=None, dtypes=None): - """Create an iterable of pandas DataFrames, to process the table as a stream. - - Args: - bqstorage_client (google.cloud.bigquery_storage_v1beta1.BigQueryStorageClient): - **Beta Feature** Optional. A BigQuery Storage API client. If - supplied, use the faster BigQuery Storage API to fetch rows - from BigQuery. - - This method requires the ``pyarrow`` and - ``google-cloud-bigquery-storage`` libraries. - - Reading from a specific partition or snapshot is not - currently supported by this method. - - **Caution**: There is a known issue reading small anonymous - query result tables with the BQ Storage API. When a problem - is encountered reading a table, the tabledata.list method - from the BigQuery API is used, instead. - dtypes (Map[str, Union[str, pandas.Series.dtype]]): - Optional. A dictionary of column names pandas ``dtype``s. The - provided ``dtype`` is used when constructing the series for - the column specified. Otherwise, the default pandas behavior - is used. - - Returns: - pandas.DataFrame: - A generator of :class:`~pandas.DataFrame`. - - Raises: - ValueError: - If the :mod:`pandas` library cannot be imported. - """ - if pandas is None: - raise ValueError(_NO_PANDAS_ERROR) - if dtypes is None: - dtypes = {} - - column_names = [field.name for field in self._schema] - bqstorage_download = functools.partial( - _pandas_helpers.download_dataframe_bqstorage, - self._project, - self._table, - bqstorage_client, - column_names, - dtypes, - preserve_order=self._preserve_order, - selected_fields=self._selected_fields, - ) - tabledata_list_download = functools.partial( - _pandas_helpers.download_dataframe_tabledata_list, - iter(self.pages), - self.schema, - dtypes, - ) - return self._to_page_iterable( - bqstorage_download, - tabledata_list_download, - bqstorage_client=bqstorage_client, - ) - - # If changing the signature of this method, make sure to apply the same - # changes to job.QueryJob.to_dataframe() - def to_dataframe( - self, - bqstorage_client=None, - dtypes=None, - progress_bar_type=None, - create_bqstorage_client=False, - ): - """Create a pandas DataFrame by loading all pages of a query. - - Args: - bqstorage_client (google.cloud.bigquery_storage_v1beta1.BigQueryStorageClient): - **Beta Feature** Optional. A BigQuery Storage API client. If - supplied, use the faster BigQuery Storage API to fetch rows - from BigQuery. - - This method requires the ``pyarrow`` and - ``google-cloud-bigquery-storage`` libraries. - - Reading from a specific partition or snapshot is not - currently supported by this method. - - **Caution**: There is a known issue reading small anonymous - query result tables with the BQ Storage API. When a problem - is encountered reading a table, the tabledata.list method - from the BigQuery API is used, instead. - dtypes (Map[str, Union[str, pandas.Series.dtype]]): - Optional. A dictionary of column names pandas ``dtype``s. The - provided ``dtype`` is used when constructing the series for - the column specified. Otherwise, the default pandas behavior - is used. - progress_bar_type (Optional[str]): - If set, use the `tqdm `_ library to - display a progress bar while the data downloads. Install the - ``tqdm`` package to use this feature. - - Possible values of ``progress_bar_type`` include: - - ``None`` - No progress bar. - ``'tqdm'`` - Use the :func:`tqdm.tqdm` function to print a progress bar - to :data:`sys.stderr`. - ``'tqdm_notebook'`` - Use the :func:`tqdm.tqdm_notebook` function to display a - progress bar as a Jupyter notebook widget. - ``'tqdm_gui'`` - Use the :func:`tqdm.tqdm_gui` function to display a - progress bar as a graphical dialog box. - - ..versionadded:: 1.11.0 - create_bqstorage_client (bool): - **Beta Feature** Optional. If ``True``, create a BigQuery - Storage API client using the default API settings. The - BigQuery Storage API is a faster way to fetch rows from - BigQuery. See the ``bqstorage_client`` parameter for more - information. - - This argument does nothing if ``bqstorage_client`` is supplied. - - ..versionadded:: 1.24.0 - - Returns: - pandas.DataFrame: - A :class:`~pandas.DataFrame` populated with row data and column - headers from the query results. The column headers are derived - from the destination table's schema. - - Raises: - ValueError: - If the :mod:`pandas` library cannot be imported, or the - :mod:`google.cloud.bigquery_storage_v1beta1` module is - required but cannot be imported. - - """ - if pandas is None: - raise ValueError(_NO_PANDAS_ERROR) - if dtypes is None: - dtypes = {} - - if ( - bqstorage_client or create_bqstorage_client - ) and self.max_results is not None: - warnings.warn( - "Cannot use bqstorage_client if max_results is set, " - "reverting to fetching data with the tabledata.list endpoint.", - stacklevel=2, - ) - create_bqstorage_client = False - bqstorage_client = None - - if pyarrow is not None: - # If pyarrow is available, calling to_arrow, then converting to a - # pandas dataframe is about 2x faster. This is because pandas.concat is - # rarely no-copy, whereas pyarrow.Table.from_batches + to_pandas is - # usually no-copy. - record_batch = self.to_arrow( - progress_bar_type=progress_bar_type, - bqstorage_client=bqstorage_client, - create_bqstorage_client=create_bqstorage_client, - ) - df = record_batch.to_pandas() - for column in dtypes: - df[column] = pandas.Series(df[column], dtype=dtypes[column]) - return df - - # The bqstorage_client is only used if pyarrow is available, so the - # rest of this method only needs to account for tabledata.list. - progress_bar = self._get_progress_bar(progress_bar_type) - - frames = [] - for frame in self.to_dataframe_iterable(dtypes=dtypes): - frames.append(frame) - - if progress_bar is not None: - # In some cases, the number of total rows is not populated - # until the first page of rows is fetched. Update the - # progress bar's total to keep an accurate count. - progress_bar.total = progress_bar.total or self.total_rows - progress_bar.update(len(frame)) - - if progress_bar is not None: - # Indicate that the download has finished. - progress_bar.close() - - # Avoid concatting an empty list. - if not frames: - column_names = [field.name for field in self._schema] - return pandas.DataFrame(columns=column_names) - return pandas.concat(frames, ignore_index=True) - - -class _EmptyRowIterator(object): - """An empty row iterator. - - This class prevents API requests when there are no rows to fetch or rows - are impossible to fetch, such as with query results for DDL CREATE VIEW - statements. - """ - - schema = () - pages = () - total_rows = 0 - - def to_arrow( - self, - progress_bar_type=None, - bqstorage_client=None, - create_bqstorage_client=False, - ): - """[Beta] Create an empty class:`pyarrow.Table`. - - Args: - progress_bar_type (Optional[str]): Ignored. Added for compatibility with RowIterator. - bqstorage_client (Any): Ignored. Added for compatibility with RowIterator. - create_bqstorage_client (bool): Ignored. Added for compatibility with RowIterator. - - Returns: - pyarrow.Table: An empty :class:`pyarrow.Table`. - """ - if pyarrow is None: - raise ValueError(_NO_PYARROW_ERROR) - return pyarrow.Table.from_arrays(()) - - def to_dataframe( - self, - bqstorage_client=None, - dtypes=None, - progress_bar_type=None, - create_bqstorage_client=False, - ): - """Create an empty dataframe. - - Args: - bqstorage_client (Any): Ignored. Added for compatibility with RowIterator. - dtypes (Any): Ignored. Added for compatibility with RowIterator. - progress_bar_type (Any): Ignored. Added for compatibility with RowIterator. - create_bqstorage_client (bool): Ignored. Added for compatibility with RowIterator. - - Returns: - pandas.DataFrame: An empty :class:`~pandas.DataFrame`. - """ - if pandas is None: - raise ValueError(_NO_PANDAS_ERROR) - return pandas.DataFrame() - - def __iter__(self): - return iter(()) - - -class PartitionRange(object): - """Definition of the ranges for range partitioning. - - .. note:: - **Beta**. The integer range partitioning feature is in a pre-release - state and might change or have limited support. - - Args: - start (Optional[int]): - Sets the - :attr:`~google.cloud.bigquery.table.PartitionRange.start` - property. - end (Optional[int]): - Sets the - :attr:`~google.cloud.bigquery.table.PartitionRange.end` - property. - interval (Optional[int]): - Sets the - :attr:`~google.cloud.bigquery.table.PartitionRange.interval` - property. - _properties (Optional[dict]): - Private. Used to construct object from API resource. - """ - - def __init__(self, start=None, end=None, interval=None, _properties=None): - if _properties is None: - _properties = {} - self._properties = _properties - - if start is not None: - self.start = start - if end is not None: - self.end = end - if interval is not None: - self.interval = interval - - @property - def start(self): - """int: The start of range partitioning, inclusive.""" - return _helpers._int_or_none(self._properties.get("start")) - - @start.setter - def start(self, value): - self._properties["start"] = _helpers._str_or_none(value) - - @property - def end(self): - """int: The end of range partitioning, exclusive.""" - return _helpers._int_or_none(self._properties.get("end")) - - @end.setter - def end(self, value): - self._properties["end"] = _helpers._str_or_none(value) - - @property - def interval(self): - """int: The width of each interval.""" - return _helpers._int_or_none(self._properties.get("interval")) - - @interval.setter - def interval(self, value): - self._properties["interval"] = _helpers._str_or_none(value) - - def _key(self): - return tuple(sorted(self._properties.items())) - - def __repr__(self): - key_vals = ["{}={}".format(key, val) for key, val in self._key()] - return "PartitionRange({})".format(", ".join(key_vals)) - - -class RangePartitioning(object): - """Range-based partitioning configuration for a table. - - .. note:: - **Beta**. The integer range partitioning feature is in a pre-release - state and might change or have limited support. - - Args: - range_ (Optional[google.cloud.bigquery.table.PartitionRange]): - Sets the - :attr:`google.cloud.bigquery.table.RangePartitioning.range_` - property. - field (Optional[str]): - Sets the - :attr:`google.cloud.bigquery.table.RangePartitioning.field` - property. - _properties (Optional[dict]): - Private. Used to construct object from API resource. - """ - - def __init__(self, range_=None, field=None, _properties=None): - if _properties is None: - _properties = {} - self._properties = _properties - - if range_ is not None: - self.range_ = range_ - if field is not None: - self.field = field - - # Trailing underscore to prevent conflict with built-in range() function. - @property - def range_(self): - """google.cloud.bigquery.table.PartitionRange: Defines the - ranges for range partitioning. - - Raises: - ValueError: - If the value is not a :class:`PartitionRange`. - """ - range_properties = self._properties.setdefault("range", {}) - return PartitionRange(_properties=range_properties) - - @range_.setter - def range_(self, value): - if not isinstance(value, PartitionRange): - raise ValueError("Expected a PartitionRange, but got {}.".format(value)) - self._properties["range"] = value._properties - - @property - def field(self): - """str: The table is partitioned by this field. - - The field must be a top-level ``NULLABLE`` / ``REQUIRED`` field. The - only supported type is ``INTEGER`` / ``INT64``. - """ - return self._properties.get("field") - - @field.setter - def field(self, value): - self._properties["field"] = value - - def _key(self): - return (("field", self.field), ("range_", self.range_)) - - def __repr__(self): - key_vals = ["{}={}".format(key, repr(val)) for key, val in self._key()] - return "RangePartitioning({})".format(", ".join(key_vals)) - - -class TimePartitioningType(object): - """Specifies the type of time partitioning to perform.""" - - DAY = "DAY" - """str: Generates one partition per day.""" - - -class TimePartitioning(object): - """Configures time-based partitioning for a table. - - Args: - type_ (google.cloud.bigquery.table.TimePartitioningType, optional): - Specifies the type of time partitioning to perform. Defaults to - :attr:`~google.cloud.bigquery.table.TimePartitioningType.DAY`, - which is the only currently supported type. - field (str, optional): - If set, the table is partitioned by this field. If not set, the - table is partitioned by pseudo column ``_PARTITIONTIME``. The field - must be a top-level ``TIMESTAMP`` or ``DATE`` field. Its mode must - be ``NULLABLE`` or ``REQUIRED``. - expiration_ms(int, optional): - Number of milliseconds for which to keep the storage for a - partition. - require_partition_filter (bool, optional): - DEPRECATED: Use - :attr:`~google.cloud.bigquery.table.Table.require_partition_filter`, - instead. - """ - - def __init__( - self, type_=None, field=None, expiration_ms=None, require_partition_filter=None - ): - self._properties = {} - if type_ is None: - self.type_ = TimePartitioningType.DAY - else: - self.type_ = type_ - if field is not None: - self.field = field - if expiration_ms is not None: - self.expiration_ms = expiration_ms - if require_partition_filter is not None: - self.require_partition_filter = require_partition_filter - - @property - def type_(self): - """google.cloud.bigquery.table.TimePartitioningType: The type of time - partitioning to use. - """ - return self._properties.get("type") - - @type_.setter - def type_(self, value): - self._properties["type"] = value - - @property - def field(self): - """str: Field in the table to use for partitioning""" - return self._properties.get("field") - - @field.setter - def field(self, value): - self._properties["field"] = value - - @property - def expiration_ms(self): - """int: Number of milliseconds to keep the storage for a partition.""" - return _helpers._int_or_none(self._properties.get("expirationMs")) - - @expiration_ms.setter - def expiration_ms(self, value): - if value is not None: - # Allow explicitly setting the expiration to None. - value = str(value) - self._properties["expirationMs"] = value - - @property - def require_partition_filter(self): - """bool: Specifies whether partition filters are required for queries - - DEPRECATED: Use - :attr:`~google.cloud.bigquery.table.Table.require_partition_filter`, - instead. - """ - warnings.warn( - ( - "TimePartitioning.require_partition_filter will be removed in " - "future versions. Please use Table.require_partition_filter " - "instead." - ), - PendingDeprecationWarning, - stacklevel=2, - ) - return self._properties.get("requirePartitionFilter") - - @require_partition_filter.setter - def require_partition_filter(self, value): - warnings.warn( - ( - "TimePartitioning.require_partition_filter will be removed in " - "future versions. Please use Table.require_partition_filter " - "instead." - ), - PendingDeprecationWarning, - stacklevel=2, - ) - self._properties["requirePartitionFilter"] = value - - @classmethod - def from_api_repr(cls, api_repr): - """Return a :class:`TimePartitioning` object deserialized from a dict. - - This method creates a new ``TimePartitioning`` instance that points to - the ``api_repr`` parameter as its internal properties dict. This means - that when a ``TimePartitioning`` instance is stored as a property of - another object, any changes made at the higher level will also appear - here:: - - >>> time_partitioning = TimePartitioning() - >>> table.time_partitioning = time_partitioning - >>> table.time_partitioning.field = 'timecolumn' - >>> time_partitioning.field - 'timecolumn' - - Args: - api_repr (Mapping[str, str]): - The serialized representation of the TimePartitioning, such as - what is output by :meth:`to_api_repr`. - - Returns: - google.cloud.bigquery.table.TimePartitioning: - The ``TimePartitioning`` object. - """ - instance = cls() - instance._properties = api_repr - return instance - - def to_api_repr(self): - """Return a dictionary representing this object. - - This method returns the properties dict of the ``TimePartitioning`` - instance rather than making a copy. This means that when a - ``TimePartitioning`` instance is stored as a property of another - object, any changes made at the higher level will also appear here. - - Returns: - dict: - A dictionary representing the TimePartitioning object in - serialized form. - """ - return self._properties - - def _key(self): - return tuple(sorted(self._properties.items())) - - def __eq__(self, other): - if not isinstance(other, TimePartitioning): - return NotImplemented - return self._key() == other._key() - - def __ne__(self, other): - return not self == other - - def __hash__(self): - return hash(self._key()) - - def __repr__(self): - key_vals = ["{}={}".format(key, val) for key, val in self._key()] - return "TimePartitioning({})".format(",".join(key_vals)) - - -def _item_to_row(iterator, resource): - """Convert a JSON row to the native object. - - .. note:: - - This assumes that the ``schema`` attribute has been - added to the iterator after being created, which - should be done by the caller. - - Args: - iterator (google.api_core.page_iterator.Iterator): The iterator that is currently in use. - resource (Dict): An item to be converted to a row. - - Returns: - google.cloud.bigquery.table.Row: The next row in the page. - """ - return Row( - _helpers._row_tuple_from_json(resource, iterator.schema), - iterator._field_to_index, - ) - - -def _tabledata_list_page_columns(schema, response): - """Make a generator of all the columns in a page from tabledata.list. - - This enables creating a :class:`pandas.DataFrame` and other - column-oriented data structures such as :class:`pyarrow.RecordBatch` - """ - columns = [] - rows = response.get("rows", []) - - def get_column_data(field_index, field): - for row in rows: - yield _helpers._field_from_json(row["f"][field_index]["v"], field) - - for field_index, field in enumerate(schema): - columns.append(get_column_data(field_index, field)) - - return columns - - -# pylint: disable=unused-argument -def _rows_page_start(iterator, page, response): - """Grab total rows when :class:`~google.cloud.iterator.Page` starts. - - Args: - iterator (google.api_core.page_iterator.Iterator): The iterator that is currently in use. - page (google.api_core.page_iterator.Page): The page that was just created. - response (Dict): The JSON API response for a page of rows in a table. - """ - # Make a (lazy) copy of the page in column-oriented format for use in data - # science packages. - page._columns = _tabledata_list_page_columns(iterator._schema, response) - - total_rows = response.get("totalRows") - if total_rows is not None: - total_rows = int(total_rows) - iterator._total_rows = total_rows - - -# pylint: enable=unused-argument - - -def _table_arg_to_table_ref(value, default_project=None): - """Helper to convert a string or Table to TableReference. - - This function keeps TableReference and other kinds of objects unchanged. - """ - if isinstance(value, six.string_types): - value = TableReference.from_string(value, default_project=default_project) - if isinstance(value, (Table, TableListItem)): - value = value.reference - return value - - -def _table_arg_to_table(value, default_project=None): - """Helper to convert a string or TableReference to a Table. - - This function keeps Table and other kinds of objects unchanged. - """ - if isinstance(value, six.string_types): - value = TableReference.from_string(value, default_project=default_project) - if isinstance(value, TableReference): - value = Table(value) - if isinstance(value, TableListItem): - newvalue = Table(value.reference) - newvalue._properties = value._properties - value = newvalue - - return value diff --git a/bigquery/google/cloud/bigquery_v2/__init__.py b/bigquery/google/cloud/bigquery_v2/__init__.py deleted file mode 100644 index e582214329f3..000000000000 --- a/bigquery/google/cloud/bigquery_v2/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import - -import pkg_resources - -__version__ = pkg_resources.get_distribution("google-cloud-bigquery").version # noqa - -from google.cloud.bigquery_v2 import types -from google.cloud.bigquery_v2.gapic import enums - - -__all__ = ( - # google.cloud.bigquery_v2 - "__version__", - "types", - # google.cloud.bigquery_v2 - "enums", -) diff --git a/bigquery/google/cloud/bigquery_v2/gapic/__init__.py b/bigquery/google/cloud/bigquery_v2/gapic/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/bigquery/google/cloud/bigquery_v2/gapic/enums.py b/bigquery/google/cloud/bigquery_v2/gapic/enums.py deleted file mode 100644 index 97059414f368..000000000000 --- a/bigquery/google/cloud/bigquery_v2/gapic/enums.py +++ /dev/null @@ -1,171 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Wrappers for protocol buffer enum types.""" - -import enum - - -class Model(object): - class DataSplitMethod(enum.IntEnum): - """ - Indicates the method to split input data into multiple tables. - - Attributes: - DATA_SPLIT_METHOD_UNSPECIFIED (int) - RANDOM (int): Splits data randomly. - CUSTOM (int): Splits data with the user provided tags. - SEQUENTIAL (int): Splits data sequentially. - NO_SPLIT (int): Data split will be skipped. - AUTO_SPLIT (int): Splits data automatically: Uses NO\_SPLIT if the data size is small. - Otherwise uses RANDOM. - """ - - DATA_SPLIT_METHOD_UNSPECIFIED = 0 - RANDOM = 1 - CUSTOM = 2 - SEQUENTIAL = 3 - NO_SPLIT = 4 - AUTO_SPLIT = 5 - - class DistanceType(enum.IntEnum): - """ - Distance metric used to compute the distance between two points. - - Attributes: - DISTANCE_TYPE_UNSPECIFIED (int) - EUCLIDEAN (int): Eculidean distance. - COSINE (int): Cosine distance. - """ - - DISTANCE_TYPE_UNSPECIFIED = 0 - EUCLIDEAN = 1 - COSINE = 2 - - class LearnRateStrategy(enum.IntEnum): - """ - Indicates the learning rate optimization strategy to use. - - Attributes: - LEARN_RATE_STRATEGY_UNSPECIFIED (int) - LINE_SEARCH (int): Use line search to determine learning rate. - CONSTANT (int): Use a constant learning rate. - """ - - LEARN_RATE_STRATEGY_UNSPECIFIED = 0 - LINE_SEARCH = 1 - CONSTANT = 2 - - class LossType(enum.IntEnum): - """ - Loss metric to evaluate model training performance. - - Attributes: - LOSS_TYPE_UNSPECIFIED (int) - MEAN_SQUARED_LOSS (int): Mean squared loss, used for linear regression. - MEAN_LOG_LOSS (int): Mean log loss, used for logistic regression. - """ - - LOSS_TYPE_UNSPECIFIED = 0 - MEAN_SQUARED_LOSS = 1 - MEAN_LOG_LOSS = 2 - - class ModelType(enum.IntEnum): - """ - Indicates the type of the Model. - - Attributes: - MODEL_TYPE_UNSPECIFIED (int) - LINEAR_REGRESSION (int): Linear regression model. - LOGISTIC_REGRESSION (int): Logistic regression based classification model. - KMEANS (int): K-means clustering model. - TENSORFLOW (int): [Beta] An imported TensorFlow model. - """ - - MODEL_TYPE_UNSPECIFIED = 0 - LINEAR_REGRESSION = 1 - LOGISTIC_REGRESSION = 2 - KMEANS = 3 - TENSORFLOW = 6 - - class OptimizationStrategy(enum.IntEnum): - """ - Indicates the optimization strategy used for training. - - Attributes: - OPTIMIZATION_STRATEGY_UNSPECIFIED (int) - BATCH_GRADIENT_DESCENT (int): Uses an iterative batch gradient descent algorithm. - NORMAL_EQUATION (int): Uses a normal equation to solve linear regression problem. - """ - - OPTIMIZATION_STRATEGY_UNSPECIFIED = 0 - BATCH_GRADIENT_DESCENT = 1 - NORMAL_EQUATION = 2 - - class KmeansEnums(object): - class KmeansInitializationMethod(enum.IntEnum): - """ - Indicates the method used to initialize the centroids for KMeans - clustering algorithm. - - Attributes: - KMEANS_INITIALIZATION_METHOD_UNSPECIFIED (int) - RANDOM (int): Initializes the centroids randomly. - CUSTOM (int): Initializes the centroids using data specified in - kmeans\_initialization\_column. - """ - - KMEANS_INITIALIZATION_METHOD_UNSPECIFIED = 0 - RANDOM = 1 - CUSTOM = 2 - - -class StandardSqlDataType(object): - class TypeKind(enum.IntEnum): - """ - Attributes: - TYPE_KIND_UNSPECIFIED (int): Invalid type. - INT64 (int): Encoded as a string in decimal format. - BOOL (int): Encoded as a boolean "false" or "true". - FLOAT64 (int): Encoded as a number, or string "NaN", "Infinity" or "-Infinity". - STRING (int): Encoded as a string value. - BYTES (int): Encoded as a base64 string per RFC 4648, section 4. - TIMESTAMP (int): Encoded as an RFC 3339 timestamp with mandatory "Z" time zone string: - 1985-04-12T23:20:50.52Z - DATE (int): Encoded as RFC 3339 full-date format string: 1985-04-12 - TIME (int): Encoded as RFC 3339 partial-time format string: 23:20:50.52 - DATETIME (int): Encoded as RFC 3339 full-date "T" partial-time: 1985-04-12T23:20:50.52 - GEOGRAPHY (int): Encoded as WKT - NUMERIC (int): Encoded as a decimal string. - ARRAY (int): Encoded as a list with types matching Type.array\_type. - STRUCT (int): Encoded as a list with fields of type Type.struct\_type[i]. List is used - because a JSON object cannot have duplicate field names. - """ - - TYPE_KIND_UNSPECIFIED = 0 - INT64 = 2 - BOOL = 5 - FLOAT64 = 7 - STRING = 8 - BYTES = 9 - TIMESTAMP = 19 - DATE = 10 - TIME = 20 - DATETIME = 21 - GEOGRAPHY = 22 - NUMERIC = 23 - ARRAY = 16 - STRUCT = 17 diff --git a/bigquery/google/cloud/bigquery_v2/proto/__init__.py b/bigquery/google/cloud/bigquery_v2/proto/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/bigquery/google/cloud/bigquery_v2/proto/encryption_config.proto b/bigquery/google/cloud/bigquery_v2/proto/encryption_config.proto deleted file mode 100644 index 54445f0fa770..000000000000 --- a/bigquery/google/cloud/bigquery_v2/proto/encryption_config.proto +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.cloud.bigquery.v2; - -import "google/api/field_behavior.proto"; -import "google/protobuf/wrappers.proto"; -import "google/api/annotations.proto"; - -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigquery"; -option java_outer_classname = "EncryptionConfigProto"; -option java_package = "com.google.cloud.bigquery.v2"; - -message EncryptionConfiguration { - // Optional. Describes the Cloud KMS encryption key that will be used to - // protect destination BigQuery table. The BigQuery Service Account associated - // with your project requires access to this encryption key. - google.protobuf.StringValue kms_key_name = 1 [(google.api.field_behavior) = OPTIONAL]; -} diff --git a/bigquery/google/cloud/bigquery_v2/proto/encryption_config_pb2.py b/bigquery/google/cloud/bigquery_v2/proto/encryption_config_pb2.py deleted file mode 100644 index f7b26be5547f..000000000000 --- a/bigquery/google/cloud/bigquery_v2/proto/encryption_config_pb2.py +++ /dev/null @@ -1,108 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/bigquery_v2/proto/encryption_config.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/bigquery_v2/proto/encryption_config.proto", - package="google.cloud.bigquery.v2", - syntax="proto3", - serialized_options=_b( - "\n\034com.google.cloud.bigquery.v2B\025EncryptionConfigProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigquery" - ), - serialized_pb=_b( - '\n6google/cloud/bigquery_v2/proto/encryption_config.proto\x12\x18google.cloud.bigquery.v2\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1cgoogle/api/annotations.proto"R\n\x17\x45ncryptionConfiguration\x12\x37\n\x0ckms_key_name\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValueB\x03\xe0\x41\x01\x42w\n\x1c\x63om.google.cloud.bigquery.v2B\x15\x45ncryptionConfigProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigqueryb\x06proto3' - ), - dependencies=[ - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR, - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - ], -) - - -_ENCRYPTIONCONFIGURATION = _descriptor.Descriptor( - name="EncryptionConfiguration", - full_name="google.cloud.bigquery.v2.EncryptionConfiguration", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="kms_key_name", - full_name="google.cloud.bigquery.v2.EncryptionConfiguration.kms_key_name", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=179, - serialized_end=261, -) - -_ENCRYPTIONCONFIGURATION.fields_by_name[ - "kms_key_name" -].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE -DESCRIPTOR.message_types_by_name["EncryptionConfiguration"] = _ENCRYPTIONCONFIGURATION -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -EncryptionConfiguration = _reflection.GeneratedProtocolMessageType( - "EncryptionConfiguration", - (_message.Message,), - dict( - DESCRIPTOR=_ENCRYPTIONCONFIGURATION, - __module__="google.cloud.bigquery_v2.proto.encryption_config_pb2", - __doc__="""Encryption configuration. - - Attributes: - kms_key_name: - Optional. Describes the Cloud KMS encryption key that will be - used to protect destination BigQuery table. The BigQuery - Service Account associated with your project requires access - to this encryption key. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.EncryptionConfiguration) - ), -) -_sym_db.RegisterMessage(EncryptionConfiguration) - - -DESCRIPTOR._options = None -_ENCRYPTIONCONFIGURATION.fields_by_name["kms_key_name"]._options = None -# @@protoc_insertion_point(module_scope) diff --git a/bigquery/google/cloud/bigquery_v2/proto/encryption_config_pb2_grpc.py b/bigquery/google/cloud/bigquery_v2/proto/encryption_config_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/bigquery/google/cloud/bigquery_v2/proto/encryption_config_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/bigquery/google/cloud/bigquery_v2/proto/location_metadata.proto b/bigquery/google/cloud/bigquery_v2/proto/location_metadata.proto deleted file mode 100644 index 95a3133c5755..000000000000 --- a/bigquery/google/cloud/bigquery_v2/proto/location_metadata.proto +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.cloud.bigquery.v2; - -import "google/api/annotations.proto"; - -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigquery"; -option java_outer_classname = "LocationMetadataProto"; -option java_package = "com.google.cloud.bigquery.v2"; - - -// BigQuery-specific metadata about a location. This will be set on -// google.cloud.location.Location.metadata in Cloud Location API -// responses. -message LocationMetadata { - // The legacy BigQuery location ID, e.g. “EU” for the “europe” location. - // This is for any API consumers that need the legacy “US” and “EU” locations. - string legacy_location_id = 1; -} diff --git a/bigquery/google/cloud/bigquery_v2/proto/location_metadata_pb2.py b/bigquery/google/cloud/bigquery_v2/proto/location_metadata_pb2.py deleted file mode 100644 index 6dd9da52e4ed..000000000000 --- a/bigquery/google/cloud/bigquery_v2/proto/location_metadata_pb2.py +++ /dev/null @@ -1,98 +0,0 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/bigquery_v2/proto/location_metadata.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/bigquery_v2/proto/location_metadata.proto", - package="google.cloud.bigquery.v2", - syntax="proto3", - serialized_options=_b( - "\n\034com.google.cloud.bigquery.v2B\025LocationMetadataProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigquery" - ), - serialized_pb=_b( - '\n6google/cloud/bigquery_v2/proto/location_metadata.proto\x12\x18google.cloud.bigquery.v2\x1a\x1cgoogle/api/annotations.proto".\n\x10LocationMetadata\x12\x1a\n\x12legacy_location_id\x18\x01 \x01(\tBw\n\x1c\x63om.google.cloud.bigquery.v2B\x15LocationMetadataProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigqueryb\x06proto3' - ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], -) - - -_LOCATIONMETADATA = _descriptor.Descriptor( - name="LocationMetadata", - full_name="google.cloud.bigquery.v2.LocationMetadata", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="legacy_location_id", - full_name="google.cloud.bigquery.v2.LocationMetadata.legacy_location_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=114, - serialized_end=160, -) - -DESCRIPTOR.message_types_by_name["LocationMetadata"] = _LOCATIONMETADATA -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -LocationMetadata = _reflection.GeneratedProtocolMessageType( - "LocationMetadata", - (_message.Message,), - dict( - DESCRIPTOR=_LOCATIONMETADATA, - __module__="google.cloud.bigquery_v2.proto.location_metadata_pb2", - __doc__="""BigQuery-specific metadata about a location. This will be set on - google.cloud.location.Location.metadata in Cloud Location API responses. - - - Attributes: - legacy_location_id: - The legacy BigQuery location ID, e.g. ``EU`` for the ``europe`` - location. This is for any API consumers that need the legacy - ``US`` and ``EU`` locations. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.LocationMetadata) - ), -) -_sym_db.RegisterMessage(LocationMetadata) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/bigquery/google/cloud/bigquery_v2/proto/location_metadata_pb2_grpc.py b/bigquery/google/cloud/bigquery_v2/proto/location_metadata_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/bigquery/google/cloud/bigquery_v2/proto/location_metadata_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/bigquery/google/cloud/bigquery_v2/proto/model.proto b/bigquery/google/cloud/bigquery_v2/proto/model.proto deleted file mode 100644 index 13d980774413..000000000000 --- a/bigquery/google/cloud/bigquery_v2/proto/model.proto +++ /dev/null @@ -1,640 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.cloud.bigquery.v2; - -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/cloud/bigquery/v2/encryption_config.proto"; -import "google/cloud/bigquery/v2/model_reference.proto"; -import "google/cloud/bigquery/v2/standard_sql.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/timestamp.proto"; -import "google/protobuf/wrappers.proto"; -import "google/api/annotations.proto"; - -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigquery"; -option java_outer_classname = "ModelProto"; -option java_package = "com.google.cloud.bigquery.v2"; - -service ModelService { - option (google.api.default_host) = "bigquery.googleapis.com"; - option (google.api.oauth_scopes) = - "https://www.googleapis.com/auth/bigquery," - "https://www.googleapis.com/auth/bigquery.readonly," - "https://www.googleapis.com/auth/cloud-platform," - "https://www.googleapis.com/auth/cloud-platform.read-only"; - - // Gets the specified model resource by model ID. - rpc GetModel(GetModelRequest) returns (Model) { - option (google.api.method_signature) = "project_id,dataset_id,model_id"; - } - - // Lists all models in the specified dataset. Requires the READER dataset - // role. - rpc ListModels(ListModelsRequest) returns (ListModelsResponse) { - option (google.api.method_signature) = "project_id,dataset_id,max_results"; - } - - // Patch specific fields in the specified model. - rpc PatchModel(PatchModelRequest) returns (Model) { - option (google.api.method_signature) = "project_id,dataset_id,model_id,model"; - } - - // Deletes the model specified by modelId from the dataset. - rpc DeleteModel(DeleteModelRequest) returns (google.protobuf.Empty) { - option (google.api.method_signature) = "project_id,dataset_id,model_id"; - } -} - -message Model { - message KmeansEnums { - // Indicates the method used to initialize the centroids for KMeans - // clustering algorithm. - enum KmeansInitializationMethod { - KMEANS_INITIALIZATION_METHOD_UNSPECIFIED = 0; - - // Initializes the centroids randomly. - RANDOM = 1; - - // Initializes the centroids using data specified in - // kmeans_initialization_column. - CUSTOM = 2; - } - - - } - - // Evaluation metrics for regression and explicit feedback type matrix - // factorization models. - message RegressionMetrics { - // Mean absolute error. - google.protobuf.DoubleValue mean_absolute_error = 1; - - // Mean squared error. - google.protobuf.DoubleValue mean_squared_error = 2; - - // Mean squared log error. - google.protobuf.DoubleValue mean_squared_log_error = 3; - - // Median absolute error. - google.protobuf.DoubleValue median_absolute_error = 4; - - // R^2 score. - google.protobuf.DoubleValue r_squared = 5; - } - - // Aggregate metrics for classification/classifier models. For multi-class - // models, the metrics are either macro-averaged or micro-averaged. When - // macro-averaged, the metrics are calculated for each label and then an - // unweighted average is taken of those values. When micro-averaged, the - // metric is calculated globally by counting the total number of correctly - // predicted rows. - message AggregateClassificationMetrics { - // Precision is the fraction of actual positive predictions that had - // positive actual labels. For multiclass this is a macro-averaged - // metric treating each class as a binary classifier. - google.protobuf.DoubleValue precision = 1; - - // Recall is the fraction of actual positive labels that were given a - // positive prediction. For multiclass this is a macro-averaged metric. - google.protobuf.DoubleValue recall = 2; - - // Accuracy is the fraction of predictions given the correct label. For - // multiclass this is a micro-averaged metric. - google.protobuf.DoubleValue accuracy = 3; - - // Threshold at which the metrics are computed. For binary - // classification models this is the positive class threshold. - // For multi-class classfication models this is the confidence - // threshold. - google.protobuf.DoubleValue threshold = 4; - - // The F1 score is an average of recall and precision. For multiclass - // this is a macro-averaged metric. - google.protobuf.DoubleValue f1_score = 5; - - // Logarithmic Loss. For multiclass this is a macro-averaged metric. - google.protobuf.DoubleValue log_loss = 6; - - // Area Under a ROC Curve. For multiclass this is a macro-averaged - // metric. - google.protobuf.DoubleValue roc_auc = 7; - } - - // Evaluation metrics for binary classification/classifier models. - message BinaryClassificationMetrics { - // Confusion matrix for binary classification models. - message BinaryConfusionMatrix { - // Threshold value used when computing each of the following metric. - google.protobuf.DoubleValue positive_class_threshold = 1; - - // Number of true samples predicted as true. - google.protobuf.Int64Value true_positives = 2; - - // Number of false samples predicted as true. - google.protobuf.Int64Value false_positives = 3; - - // Number of true samples predicted as false. - google.protobuf.Int64Value true_negatives = 4; - - // Number of false samples predicted as false. - google.protobuf.Int64Value false_negatives = 5; - - // The fraction of actual positive predictions that had positive actual - // labels. - google.protobuf.DoubleValue precision = 6; - - // The fraction of actual positive labels that were given a positive - // prediction. - google.protobuf.DoubleValue recall = 7; - - // The equally weighted average of recall and precision. - google.protobuf.DoubleValue f1_score = 8; - - // The fraction of predictions given the correct label. - google.protobuf.DoubleValue accuracy = 9; - } - - // Aggregate classification metrics. - AggregateClassificationMetrics aggregate_classification_metrics = 1; - - // Binary confusion matrix at multiple thresholds. - repeated BinaryConfusionMatrix binary_confusion_matrix_list = 2; - - // Label representing the positive class. - string positive_label = 3; - - // Label representing the negative class. - string negative_label = 4; - } - - // Evaluation metrics for multi-class classification/classifier models. - message MultiClassClassificationMetrics { - // Confusion matrix for multi-class classification models. - message ConfusionMatrix { - // A single entry in the confusion matrix. - message Entry { - // The predicted label. For confidence_threshold > 0, we will - // also add an entry indicating the number of items under the - // confidence threshold. - string predicted_label = 1; - - // Number of items being predicted as this label. - google.protobuf.Int64Value item_count = 2; - } - - // A single row in the confusion matrix. - message Row { - // The original label of this row. - string actual_label = 1; - - // Info describing predicted label distribution. - repeated Entry entries = 2; - } - - // Confidence threshold used when computing the entries of the - // confusion matrix. - google.protobuf.DoubleValue confidence_threshold = 1; - - // One row per actual label. - repeated Row rows = 2; - } - - // Aggregate classification metrics. - AggregateClassificationMetrics aggregate_classification_metrics = 1; - - // Confusion matrix at different thresholds. - repeated ConfusionMatrix confusion_matrix_list = 2; - } - - // Evaluation metrics for clustering models. - message ClusteringMetrics { - // Message containing the information about one cluster. - message Cluster { - // Representative value of a single feature within the cluster. - message FeatureValue { - // Representative value of a categorical feature. - message CategoricalValue { - // Represents the count of a single category within the cluster. - message CategoryCount { - // The name of category. - string category = 1; - - // The count of training samples matching the category within the - // cluster. - google.protobuf.Int64Value count = 2; - } - - // Counts of all categories for the categorical feature. If there are - // more than ten categories, we return top ten (by count) and return - // one more CategoryCount with category "_OTHER_" and count as - // aggregate counts of remaining categories. - repeated CategoryCount category_counts = 1; - } - - // The feature column name. - string feature_column = 1; - - oneof value { - // The numerical feature value. This is the centroid value for this - // feature. - google.protobuf.DoubleValue numerical_value = 2; - - // The categorical feature value. - CategoricalValue categorical_value = 3; - } - } - - // Centroid id. - int64 centroid_id = 1; - - // Values of highly variant features for this cluster. - repeated FeatureValue feature_values = 2; - - // Count of training data rows that were assigned to this cluster. - google.protobuf.Int64Value count = 3; - } - - // Davies-Bouldin index. - google.protobuf.DoubleValue davies_bouldin_index = 1; - - // Mean of squared distances between each sample to its cluster centroid. - google.protobuf.DoubleValue mean_squared_distance = 2; - - // [Beta] Information for all clusters. - repeated Cluster clusters = 3; - } - - // Evaluation metrics of a model. These are either computed on all training - // data or just the eval data based on whether eval data was used during - // training. These are not present for imported models. - message EvaluationMetrics { - oneof metrics { - // Populated for regression models and explicit feedback type matrix - // factorization models. - RegressionMetrics regression_metrics = 1; - - // Populated for binary classification/classifier models. - BinaryClassificationMetrics binary_classification_metrics = 2; - - // Populated for multi-class classification/classifier models. - MultiClassClassificationMetrics multi_class_classification_metrics = 3; - - // Populated for clustering models. - ClusteringMetrics clustering_metrics = 4; - } - } - - // Information about a single training query run for the model. - message TrainingRun { - message TrainingOptions { - // The maximum number of iterations in training. Used only for iterative - // training algorithms. - int64 max_iterations = 1; - - // Type of loss function used during training run. - LossType loss_type = 2; - - // Learning rate in training. Used only for iterative training algorithms. - double learn_rate = 3; - - // L1 regularization coefficient. - google.protobuf.DoubleValue l1_regularization = 4; - - // L2 regularization coefficient. - google.protobuf.DoubleValue l2_regularization = 5; - - // When early_stop is true, stops training when accuracy improvement is - // less than 'min_relative_progress'. Used only for iterative training - // algorithms. - google.protobuf.DoubleValue min_relative_progress = 6; - - // Whether to train a model from the last checkpoint. - google.protobuf.BoolValue warm_start = 7; - - // Whether to stop early when the loss doesn't improve significantly - // any more (compared to min_relative_progress). Used only for iterative - // training algorithms. - google.protobuf.BoolValue early_stop = 8; - - // Name of input label columns in training data. - repeated string input_label_columns = 9; - - // The data split type for training and evaluation, e.g. RANDOM. - DataSplitMethod data_split_method = 10; - - // The fraction of evaluation data over the whole input data. The rest - // of data will be used as training data. The format should be double. - // Accurate to two decimal places. - // Default value is 0.2. - double data_split_eval_fraction = 11; - - // The column to split data with. This column won't be used as a - // feature. - // 1. When data_split_method is CUSTOM, the corresponding column should - // be boolean. The rows with true value tag are eval data, and the false - // are training data. - // 2. When data_split_method is SEQ, the first DATA_SPLIT_EVAL_FRACTION - // rows (from smallest to largest) in the corresponding column are used - // as training data, and the rest are eval data. It respects the order - // in Orderable data types: - // https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#data-type-properties - string data_split_column = 12; - - // The strategy to determine learn rate for the current iteration. - LearnRateStrategy learn_rate_strategy = 13; - - // Specifies the initial learning rate for the line search learn rate - // strategy. - double initial_learn_rate = 16; - - // Weights associated with each label class, for rebalancing the - // training data. Only applicable for classification models. - map label_class_weights = 17; - - // Distance type for clustering models. - DistanceType distance_type = 20; - - // Number of clusters for clustering models. - int64 num_clusters = 21; - - // [Beta] Google Cloud Storage URI from which the model was imported. Only - // applicable for imported models. - string model_uri = 22; - - // Optimization strategy for training linear regression models. - OptimizationStrategy optimization_strategy = 23; - - // The method used to initialize the centroids for kmeans algorithm. - KmeansEnums.KmeansInitializationMethod kmeans_initialization_method = 33; - - // The column used to provide the initial centroids for kmeans algorithm - // when kmeans_initialization_method is CUSTOM. - string kmeans_initialization_column = 34; - } - - // Information about a single iteration of the training run. - message IterationResult { - // Information about a single cluster for clustering model. - message ClusterInfo { - // Centroid id. - int64 centroid_id = 1; - - // Cluster radius, the average distance from centroid - // to each point assigned to the cluster. - google.protobuf.DoubleValue cluster_radius = 2; - - // Cluster size, the total number of points assigned to the cluster. - google.protobuf.Int64Value cluster_size = 3; - } - - // Index of the iteration, 0 based. - google.protobuf.Int32Value index = 1; - - // Time taken to run the iteration in milliseconds. - google.protobuf.Int64Value duration_ms = 4; - - // Loss computed on the training data at the end of iteration. - google.protobuf.DoubleValue training_loss = 5; - - // Loss computed on the eval data at the end of iteration. - google.protobuf.DoubleValue eval_loss = 6; - - // Learn rate used for this iteration. - double learn_rate = 7; - - // Information about top clusters for clustering models. - repeated ClusterInfo cluster_infos = 8; - } - - // Options that were used for this training run, includes - // user specified and default options that were used. - TrainingOptions training_options = 1; - - // The start time of this training run. - google.protobuf.Timestamp start_time = 8; - - // Output of each iteration run, results.size() <= max_iterations. - repeated IterationResult results = 6; - - // The evaluation metrics over training/eval data that were computed at the - // end of training. - EvaluationMetrics evaluation_metrics = 7; - } - - // Indicates the type of the Model. - enum ModelType { - MODEL_TYPE_UNSPECIFIED = 0; - - // Linear regression model. - LINEAR_REGRESSION = 1; - - // Logistic regression based classification model. - LOGISTIC_REGRESSION = 2; - - // K-means clustering model. - KMEANS = 3; - - // [Beta] An imported TensorFlow model. - TENSORFLOW = 6; - } - - // Loss metric to evaluate model training performance. - enum LossType { - LOSS_TYPE_UNSPECIFIED = 0; - - // Mean squared loss, used for linear regression. - MEAN_SQUARED_LOSS = 1; - - // Mean log loss, used for logistic regression. - MEAN_LOG_LOSS = 2; - } - - // Distance metric used to compute the distance between two points. - enum DistanceType { - DISTANCE_TYPE_UNSPECIFIED = 0; - - // Eculidean distance. - EUCLIDEAN = 1; - - // Cosine distance. - COSINE = 2; - } - - // Indicates the method to split input data into multiple tables. - enum DataSplitMethod { - DATA_SPLIT_METHOD_UNSPECIFIED = 0; - - // Splits data randomly. - RANDOM = 1; - - // Splits data with the user provided tags. - CUSTOM = 2; - - // Splits data sequentially. - SEQUENTIAL = 3; - - // Data split will be skipped. - NO_SPLIT = 4; - - // Splits data automatically: Uses NO_SPLIT if the data size is small. - // Otherwise uses RANDOM. - AUTO_SPLIT = 5; - } - - // Indicates the learning rate optimization strategy to use. - enum LearnRateStrategy { - LEARN_RATE_STRATEGY_UNSPECIFIED = 0; - - // Use line search to determine learning rate. - LINE_SEARCH = 1; - - // Use a constant learning rate. - CONSTANT = 2; - } - - // Indicates the optimization strategy used for training. - enum OptimizationStrategy { - OPTIMIZATION_STRATEGY_UNSPECIFIED = 0; - - // Uses an iterative batch gradient descent algorithm. - BATCH_GRADIENT_DESCENT = 1; - - // Uses a normal equation to solve linear regression problem. - NORMAL_EQUATION = 2; - } - - // Output only. A hash of this resource. - string etag = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Required. Unique identifier for this model. - ModelReference model_reference = 2 [(google.api.field_behavior) = REQUIRED]; - - // Output only. The time when this model was created, in millisecs since the epoch. - int64 creation_time = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The time when this model was last modified, in millisecs since the epoch. - int64 last_modified_time = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Optional. A user-friendly description of this model. - string description = 12 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A descriptive name for this model. - string friendly_name = 14 [(google.api.field_behavior) = OPTIONAL]; - - // The labels associated with this model. You can use these to organize - // and group your models. Label keys and values can be no longer - // than 63 characters, can only contain lowercase letters, numeric - // characters, underscores and dashes. International characters are allowed. - // Label values are optional. Label keys must start with a letter and each - // label in the list must have a different key. - map labels = 15; - - // Optional. The time when this model expires, in milliseconds since the epoch. - // If not present, the model will persist indefinitely. Expired models - // will be deleted and their storage reclaimed. The defaultTableExpirationMs - // property of the encapsulating dataset can be used to set a default - // expirationTime on newly created models. - int64 expiration_time = 16 [(google.api.field_behavior) = OPTIONAL]; - - // Output only. The geographic location where the model resides. This value - // is inherited from the dataset. - string location = 13 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Custom encryption configuration (e.g., Cloud KMS keys). This shows the - // encryption configuration of the model data while stored in BigQuery - // storage. - google.cloud.bigquery.v2.EncryptionConfiguration encryption_configuration = 17; - - // Output only. Type of the model resource. - ModelType model_type = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Information for all training runs in increasing order of start_time. - repeated TrainingRun training_runs = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Input feature columns that were used to train this model. - repeated StandardSqlField feature_columns = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Label columns that were used to train this model. - // The output of the model will have a "predicted_" prefix to these columns. - repeated StandardSqlField label_columns = 11 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -message GetModelRequest { - // Required. Project ID of the requested model. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. Dataset ID of the requested model. - string dataset_id = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. Model ID of the requested model. - string model_id = 3 [(google.api.field_behavior) = REQUIRED]; -} - -message PatchModelRequest { - // Required. Project ID of the model to patch. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. Dataset ID of the model to patch. - string dataset_id = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. Model ID of the model to patch. - string model_id = 3 [(google.api.field_behavior) = REQUIRED]; - - // Required. Patched model. - // Follows RFC5789 patch semantics. Missing fields are not updated. - // To clear a field, explicitly set to default value. - Model model = 4 [(google.api.field_behavior) = REQUIRED]; -} - -message DeleteModelRequest { - // Required. Project ID of the model to delete. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. Dataset ID of the model to delete. - string dataset_id = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. Model ID of the model to delete. - string model_id = 3 [(google.api.field_behavior) = REQUIRED]; -} - -message ListModelsRequest { - // Required. Project ID of the models to list. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. Dataset ID of the models to list. - string dataset_id = 2 [(google.api.field_behavior) = REQUIRED]; - - // The maximum number of results to return in a single response page. - // Leverage the page tokens to iterate through the entire collection. - google.protobuf.UInt32Value max_results = 3; - - // Page token, returned by a previous call to request the next page of - // results - string page_token = 4; -} - -message ListModelsResponse { - // Models in the requested dataset. Only the following fields are populated: - // model_reference, model_type, creation_time, last_modified_time and - // labels. - repeated Model models = 1; - - // A token to request the next page of results. - string next_page_token = 2; -} diff --git a/bigquery/google/cloud/bigquery_v2/proto/model_pb2.py b/bigquery/google/cloud/bigquery_v2/proto/model_pb2.py deleted file mode 100644 index 0b4e9d23ed26..000000000000 --- a/bigquery/google/cloud/bigquery_v2/proto/model_pb2.py +++ /dev/null @@ -1,4087 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/bigquery_v2/proto/model.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import client_pb2 as google_dot_api_dot_client__pb2 -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.cloud.bigquery_v2.proto import ( - encryption_config_pb2 as google_dot_cloud_dot_bigquery__v2_dot_proto_dot_encryption__config__pb2, -) -from google.cloud.bigquery_v2.proto import ( - model_reference_pb2 as google_dot_cloud_dot_bigquery__v2_dot_proto_dot_model__reference__pb2, -) -from google.cloud.bigquery_v2.proto import ( - standard_sql_pb2 as google_dot_cloud_dot_bigquery__v2_dot_proto_dot_standard__sql__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/bigquery_v2/proto/model.proto", - package="google.cloud.bigquery.v2", - syntax="proto3", - serialized_options=_b( - "\n\034com.google.cloud.bigquery.v2B\nModelProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigquery" - ), - serialized_pb=_b( - '\n*google/cloud/bigquery_v2/proto/model.proto\x12\x18google.cloud.bigquery.v2\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x36google/cloud/bigquery_v2/proto/encryption_config.proto\x1a\x34google/cloud/bigquery_v2/proto/model_reference.proto\x1a\x31google/cloud/bigquery_v2/proto/standard_sql.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1cgoogle/api/annotations.proto"\x9b\x35\n\x05Model\x12\x11\n\x04\x65tag\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x46\n\x0fmodel_reference\x18\x02 \x01(\x0b\x32(.google.cloud.bigquery.v2.ModelReferenceB\x03\xe0\x41\x02\x12\x1a\n\rcreation_time\x18\x05 \x01(\x03\x42\x03\xe0\x41\x03\x12\x1f\n\x12last_modified_time\x18\x06 \x01(\x03\x42\x03\xe0\x41\x03\x12\x18\n\x0b\x64\x65scription\x18\x0c \x01(\tB\x03\xe0\x41\x01\x12\x1a\n\rfriendly_name\x18\x0e \x01(\tB\x03\xe0\x41\x01\x12;\n\x06labels\x18\x0f \x03(\x0b\x32+.google.cloud.bigquery.v2.Model.LabelsEntry\x12\x1c\n\x0f\x65xpiration_time\x18\x10 \x01(\x03\x42\x03\xe0\x41\x01\x12\x15\n\x08location\x18\r \x01(\tB\x03\xe0\x41\x03\x12S\n\x18\x65ncryption_configuration\x18\x11 \x01(\x0b\x32\x31.google.cloud.bigquery.v2.EncryptionConfiguration\x12\x42\n\nmodel_type\x18\x07 \x01(\x0e\x32).google.cloud.bigquery.v2.Model.ModelTypeB\x03\xe0\x41\x03\x12G\n\rtraining_runs\x18\t \x03(\x0b\x32+.google.cloud.bigquery.v2.Model.TrainingRunB\x03\xe0\x41\x03\x12H\n\x0f\x66\x65\x61ture_columns\x18\n \x03(\x0b\x32*.google.cloud.bigquery.v2.StandardSqlFieldB\x03\xe0\x41\x03\x12\x46\n\rlabel_columns\x18\x0b \x03(\x0b\x32*.google.cloud.bigquery.v2.StandardSqlFieldB\x03\xe0\x41\x03\x1aq\n\x0bKmeansEnums"b\n\x1aKmeansInitializationMethod\x12,\n(KMEANS_INITIALIZATION_METHOD_UNSPECIFIED\x10\x00\x12\n\n\x06RANDOM\x10\x01\x12\n\n\x06\x43USTOM\x10\x02\x1a\xb4\x02\n\x11RegressionMetrics\x12\x39\n\x13mean_absolute_error\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x38\n\x12mean_squared_error\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12<\n\x16mean_squared_log_error\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12;\n\x15median_absolute_error\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12/\n\tr_squared\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x1a\xef\x02\n\x1e\x41ggregateClassificationMetrics\x12/\n\tprecision\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12,\n\x06recall\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12.\n\x08\x61\x63\x63uracy\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12/\n\tthreshold\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12.\n\x08\x66\x31_score\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12.\n\x08log_loss\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12-\n\x07roc_auc\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x1a\x9f\x06\n\x1b\x42inaryClassificationMetrics\x12h\n aggregate_classification_metrics\x18\x01 \x01(\x0b\x32>.google.cloud.bigquery.v2.Model.AggregateClassificationMetrics\x12w\n\x1c\x62inary_confusion_matrix_list\x18\x02 \x03(\x0b\x32Q.google.cloud.bigquery.v2.Model.BinaryClassificationMetrics.BinaryConfusionMatrix\x12\x16\n\x0epositive_label\x18\x03 \x01(\t\x12\x16\n\x0enegative_label\x18\x04 \x01(\t\x1a\xec\x03\n\x15\x42inaryConfusionMatrix\x12>\n\x18positive_class_threshold\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x33\n\x0etrue_positives\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x34\n\x0f\x66\x61lse_positives\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x33\n\x0etrue_negatives\x18\x04 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x34\n\x0f\x66\x61lse_negatives\x18\x05 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12/\n\tprecision\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12,\n\x06recall\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12.\n\x08\x66\x31_score\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12.\n\x08\x61\x63\x63uracy\x18\t \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x1a\x87\x05\n\x1fMultiClassClassificationMetrics\x12h\n aggregate_classification_metrics\x18\x01 \x01(\x0b\x32>.google.cloud.bigquery.v2.Model.AggregateClassificationMetrics\x12n\n\x15\x63onfusion_matrix_list\x18\x02 \x03(\x0b\x32O.google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix\x1a\x89\x03\n\x0f\x43onfusionMatrix\x12:\n\x14\x63onfidence_threshold\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x61\n\x04rows\x18\x02 \x03(\x0b\x32S.google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix.Row\x1aQ\n\x05\x45ntry\x12\x17\n\x0fpredicted_label\x18\x01 \x01(\t\x12/\n\nitem_count\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x1a\x83\x01\n\x03Row\x12\x14\n\x0c\x61\x63tual_label\x18\x01 \x01(\t\x12\x66\n\x07\x65ntries\x18\x02 \x03(\x0b\x32U.google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix.Entry\x1a\xcb\x06\n\x11\x43lusteringMetrics\x12:\n\x14\x64\x61vies_bouldin_index\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12;\n\x15mean_squared_distance\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12K\n\x08\x63lusters\x18\x03 \x03(\x0b\x32\x39.google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster\x1a\xef\x04\n\x07\x43luster\x12\x13\n\x0b\x63\x65ntroid_id\x18\x01 \x01(\x03\x12^\n\x0e\x66\x65\x61ture_values\x18\x02 \x03(\x0b\x32\x46.google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue\x12*\n\x05\x63ount\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x1a\xc2\x03\n\x0c\x46\x65\x61tureValue\x12\x16\n\x0e\x66\x65\x61ture_column\x18\x01 \x01(\t\x12\x37\n\x0fnumerical_value\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.DoubleValueH\x00\x12t\n\x11\x63\x61tegorical_value\x18\x03 \x01(\x0b\x32W.google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue.CategoricalValueH\x00\x1a\xe1\x01\n\x10\x43\x61tegoricalValue\x12~\n\x0f\x63\x61tegory_counts\x18\x01 \x03(\x0b\x32\x65.google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue.CategoricalValue.CategoryCount\x1aM\n\rCategoryCount\x12\x10\n\x08\x63\x61tegory\x18\x01 \x01(\t\x12*\n\x05\x63ount\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x07\n\x05value\x1a\x95\x03\n\x11\x45valuationMetrics\x12O\n\x12regression_metrics\x18\x01 \x01(\x0b\x32\x31.google.cloud.bigquery.v2.Model.RegressionMetricsH\x00\x12\x64\n\x1d\x62inary_classification_metrics\x18\x02 \x01(\x0b\x32;.google.cloud.bigquery.v2.Model.BinaryClassificationMetricsH\x00\x12m\n"multi_class_classification_metrics\x18\x03 \x01(\x0b\x32?.google.cloud.bigquery.v2.Model.MultiClassClassificationMetricsH\x00\x12O\n\x12\x63lustering_metrics\x18\x04 \x01(\x0b\x32\x31.google.cloud.bigquery.v2.Model.ClusteringMetricsH\x00\x42\t\n\x07metrics\x1a\xab\x0f\n\x0bTrainingRun\x12U\n\x10training_options\x18\x01 \x01(\x0b\x32;.google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions\x12.\n\nstart_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12L\n\x07results\x18\x06 \x03(\x0b\x32;.google.cloud.bigquery.v2.Model.TrainingRun.IterationResult\x12M\n\x12\x65valuation_metrics\x18\x07 \x01(\x0b\x32\x31.google.cloud.bigquery.v2.Model.EvaluationMetrics\x1a\x9d\t\n\x0fTrainingOptions\x12\x16\n\x0emax_iterations\x18\x01 \x01(\x03\x12;\n\tloss_type\x18\x02 \x01(\x0e\x32(.google.cloud.bigquery.v2.Model.LossType\x12\x12\n\nlearn_rate\x18\x03 \x01(\x01\x12\x37\n\x11l1_regularization\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x37\n\x11l2_regularization\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12;\n\x15min_relative_progress\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12.\n\nwarm_start\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12.\n\nearly_stop\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x1b\n\x13input_label_columns\x18\t \x03(\t\x12J\n\x11\x64\x61ta_split_method\x18\n \x01(\x0e\x32/.google.cloud.bigquery.v2.Model.DataSplitMethod\x12 \n\x18\x64\x61ta_split_eval_fraction\x18\x0b \x01(\x01\x12\x19\n\x11\x64\x61ta_split_column\x18\x0c \x01(\t\x12N\n\x13learn_rate_strategy\x18\r \x01(\x0e\x32\x31.google.cloud.bigquery.v2.Model.LearnRateStrategy\x12\x1a\n\x12initial_learn_rate\x18\x10 \x01(\x01\x12o\n\x13label_class_weights\x18\x11 \x03(\x0b\x32R.google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions.LabelClassWeightsEntry\x12\x43\n\rdistance_type\x18\x14 \x01(\x0e\x32,.google.cloud.bigquery.v2.Model.DistanceType\x12\x14\n\x0cnum_clusters\x18\x15 \x01(\x03\x12\x11\n\tmodel_uri\x18\x16 \x01(\t\x12S\n\x15optimization_strategy\x18\x17 \x01(\x0e\x32\x34.google.cloud.bigquery.v2.Model.OptimizationStrategy\x12l\n\x1ckmeans_initialization_method\x18! \x01(\x0e\x32\x46.google.cloud.bigquery.v2.Model.KmeansEnums.KmeansInitializationMethod\x12$\n\x1ckmeans_initialization_column\x18" \x01(\t\x1a\x38\n\x16LabelClassWeightsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x01:\x02\x38\x01\x1a\xd7\x03\n\x0fIterationResult\x12*\n\x05index\x18\x01 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x12\x30\n\x0b\x64uration_ms\x18\x04 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x33\n\rtraining_loss\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12/\n\teval_loss\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x12\n\nlearn_rate\x18\x07 \x01(\x01\x12^\n\rcluster_infos\x18\x08 \x03(\x0b\x32G.google.cloud.bigquery.v2.Model.TrainingRun.IterationResult.ClusterInfo\x1a\x8b\x01\n\x0b\x43lusterInfo\x12\x13\n\x0b\x63\x65ntroid_id\x18\x01 \x01(\x03\x12\x34\n\x0e\x63luster_radius\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x31\n\x0c\x63luster_size\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"s\n\tModelType\x12\x1a\n\x16MODEL_TYPE_UNSPECIFIED\x10\x00\x12\x15\n\x11LINEAR_REGRESSION\x10\x01\x12\x17\n\x13LOGISTIC_REGRESSION\x10\x02\x12\n\n\x06KMEANS\x10\x03\x12\x0e\n\nTENSORFLOW\x10\x06"O\n\x08LossType\x12\x19\n\x15LOSS_TYPE_UNSPECIFIED\x10\x00\x12\x15\n\x11MEAN_SQUARED_LOSS\x10\x01\x12\x11\n\rMEAN_LOG_LOSS\x10\x02"H\n\x0c\x44istanceType\x12\x1d\n\x19\x44ISTANCE_TYPE_UNSPECIFIED\x10\x00\x12\r\n\tEUCLIDEAN\x10\x01\x12\n\n\x06\x43OSINE\x10\x02"z\n\x0f\x44\x61taSplitMethod\x12!\n\x1d\x44\x41TA_SPLIT_METHOD_UNSPECIFIED\x10\x00\x12\n\n\x06RANDOM\x10\x01\x12\n\n\x06\x43USTOM\x10\x02\x12\x0e\n\nSEQUENTIAL\x10\x03\x12\x0c\n\x08NO_SPLIT\x10\x04\x12\x0e\n\nAUTO_SPLIT\x10\x05"W\n\x11LearnRateStrategy\x12#\n\x1fLEARN_RATE_STRATEGY_UNSPECIFIED\x10\x00\x12\x0f\n\x0bLINE_SEARCH\x10\x01\x12\x0c\n\x08\x43ONSTANT\x10\x02"n\n\x14OptimizationStrategy\x12%\n!OPTIMIZATION_STRATEGY_UNSPECIFIED\x10\x00\x12\x1a\n\x16\x42\x41TCH_GRADIENT_DESCENT\x10\x01\x12\x13\n\x0fNORMAL_EQUATION\x10\x02"Z\n\x0fGetModelRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x17\n\ndataset_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x15\n\x08model_id\x18\x03 \x01(\tB\x03\xe0\x41\x02"\x91\x01\n\x11PatchModelRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x17\n\ndataset_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x15\n\x08model_id\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x33\n\x05model\x18\x04 \x01(\x0b\x32\x1f.google.cloud.bigquery.v2.ModelB\x03\xe0\x41\x02"]\n\x12\x44\x65leteModelRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x17\n\ndataset_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x15\n\x08model_id\x18\x03 \x01(\tB\x03\xe0\x41\x02"\x8c\x01\n\x11ListModelsRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x17\n\ndataset_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x31\n\x0bmax_results\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.UInt32Value\x12\x12\n\npage_token\x18\x04 \x01(\t"^\n\x12ListModelsResponse\x12/\n\x06models\x18\x01 \x03(\x0b\x32\x1f.google.cloud.bigquery.v2.Model\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\xfa\x05\n\x0cModelService\x12y\n\x08GetModel\x12).google.cloud.bigquery.v2.GetModelRequest\x1a\x1f.google.cloud.bigquery.v2.Model"!\xda\x41\x1eproject_id,dataset_id,model_id\x12\x8d\x01\n\nListModels\x12+.google.cloud.bigquery.v2.ListModelsRequest\x1a,.google.cloud.bigquery.v2.ListModelsResponse"$\xda\x41!project_id,dataset_id,max_results\x12\x83\x01\n\nPatchModel\x12+.google.cloud.bigquery.v2.PatchModelRequest\x1a\x1f.google.cloud.bigquery.v2.Model"\'\xda\x41$project_id,dataset_id,model_id,model\x12v\n\x0b\x44\x65leteModel\x12,.google.cloud.bigquery.v2.DeleteModelRequest\x1a\x16.google.protobuf.Empty"!\xda\x41\x1eproject_id,dataset_id,model_id\x1a\xe0\x01\xca\x41\x17\x62igquery.googleapis.com\xd2\x41\xc2\x01https://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/bigquery.readonly,https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-onlyBl\n\x1c\x63om.google.cloud.bigquery.v2B\nModelProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigqueryb\x06proto3' - ), - dependencies=[ - google_dot_api_dot_client__pb2.DESCRIPTOR, - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_cloud_dot_bigquery__v2_dot_proto_dot_encryption__config__pb2.DESCRIPTOR, - google_dot_cloud_dot_bigquery__v2_dot_proto_dot_model__reference__pb2.DESCRIPTOR, - google_dot_cloud_dot_bigquery__v2_dot_proto_dot_standard__sql__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR, - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - ], -) - - -_MODEL_KMEANSENUMS_KMEANSINITIALIZATIONMETHOD = _descriptor.EnumDescriptor( - name="KmeansInitializationMethod", - full_name="google.cloud.bigquery.v2.Model.KmeansEnums.KmeansInitializationMethod", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="KMEANS_INITIALIZATION_METHOD_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="RANDOM", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="CUSTOM", index=2, number=2, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=1132, - serialized_end=1230, -) -_sym_db.RegisterEnumDescriptor(_MODEL_KMEANSENUMS_KMEANSINITIALIZATIONMETHOD) - -_MODEL_MODELTYPE = _descriptor.EnumDescriptor( - name="ModelType", - full_name="google.cloud.bigquery.v2.Model.ModelType", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="MODEL_TYPE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="LINEAR_REGRESSION", - index=1, - number=1, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="LOGISTIC_REGRESSION", - index=2, - number=2, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="KMEANS", index=3, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="TENSORFLOW", index=4, number=6, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=6632, - serialized_end=6747, -) -_sym_db.RegisterEnumDescriptor(_MODEL_MODELTYPE) - -_MODEL_LOSSTYPE = _descriptor.EnumDescriptor( - name="LossType", - full_name="google.cloud.bigquery.v2.Model.LossType", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="LOSS_TYPE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="MEAN_SQUARED_LOSS", - index=1, - number=1, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="MEAN_LOG_LOSS", index=2, number=2, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=6749, - serialized_end=6828, -) -_sym_db.RegisterEnumDescriptor(_MODEL_LOSSTYPE) - -_MODEL_DISTANCETYPE = _descriptor.EnumDescriptor( - name="DistanceType", - full_name="google.cloud.bigquery.v2.Model.DistanceType", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="DISTANCE_TYPE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="EUCLIDEAN", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="COSINE", index=2, number=2, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=6830, - serialized_end=6902, -) -_sym_db.RegisterEnumDescriptor(_MODEL_DISTANCETYPE) - -_MODEL_DATASPLITMETHOD = _descriptor.EnumDescriptor( - name="DataSplitMethod", - full_name="google.cloud.bigquery.v2.Model.DataSplitMethod", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="DATA_SPLIT_METHOD_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="RANDOM", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="CUSTOM", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="SEQUENTIAL", index=3, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="NO_SPLIT", index=4, number=4, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="AUTO_SPLIT", index=5, number=5, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=6904, - serialized_end=7026, -) -_sym_db.RegisterEnumDescriptor(_MODEL_DATASPLITMETHOD) - -_MODEL_LEARNRATESTRATEGY = _descriptor.EnumDescriptor( - name="LearnRateStrategy", - full_name="google.cloud.bigquery.v2.Model.LearnRateStrategy", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="LEARN_RATE_STRATEGY_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="LINE_SEARCH", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="CONSTANT", index=2, number=2, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=7028, - serialized_end=7115, -) -_sym_db.RegisterEnumDescriptor(_MODEL_LEARNRATESTRATEGY) - -_MODEL_OPTIMIZATIONSTRATEGY = _descriptor.EnumDescriptor( - name="OptimizationStrategy", - full_name="google.cloud.bigquery.v2.Model.OptimizationStrategy", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="OPTIMIZATION_STRATEGY_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="BATCH_GRADIENT_DESCENT", - index=1, - number=1, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="NORMAL_EQUATION", - index=2, - number=2, - serialized_options=None, - type=None, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=7117, - serialized_end=7227, -) -_sym_db.RegisterEnumDescriptor(_MODEL_OPTIMIZATIONSTRATEGY) - - -_MODEL_KMEANSENUMS = _descriptor.Descriptor( - name="KmeansEnums", - full_name="google.cloud.bigquery.v2.Model.KmeansEnums", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[], - extensions=[], - nested_types=[], - enum_types=[_MODEL_KMEANSENUMS_KMEANSINITIALIZATIONMETHOD,], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1117, - serialized_end=1230, -) - -_MODEL_REGRESSIONMETRICS = _descriptor.Descriptor( - name="RegressionMetrics", - full_name="google.cloud.bigquery.v2.Model.RegressionMetrics", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="mean_absolute_error", - full_name="google.cloud.bigquery.v2.Model.RegressionMetrics.mean_absolute_error", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="mean_squared_error", - full_name="google.cloud.bigquery.v2.Model.RegressionMetrics.mean_squared_error", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="mean_squared_log_error", - full_name="google.cloud.bigquery.v2.Model.RegressionMetrics.mean_squared_log_error", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="median_absolute_error", - full_name="google.cloud.bigquery.v2.Model.RegressionMetrics.median_absolute_error", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="r_squared", - full_name="google.cloud.bigquery.v2.Model.RegressionMetrics.r_squared", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1233, - serialized_end=1541, -) - -_MODEL_AGGREGATECLASSIFICATIONMETRICS = _descriptor.Descriptor( - name="AggregateClassificationMetrics", - full_name="google.cloud.bigquery.v2.Model.AggregateClassificationMetrics", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="precision", - full_name="google.cloud.bigquery.v2.Model.AggregateClassificationMetrics.precision", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="recall", - full_name="google.cloud.bigquery.v2.Model.AggregateClassificationMetrics.recall", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="accuracy", - full_name="google.cloud.bigquery.v2.Model.AggregateClassificationMetrics.accuracy", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="threshold", - full_name="google.cloud.bigquery.v2.Model.AggregateClassificationMetrics.threshold", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="f1_score", - full_name="google.cloud.bigquery.v2.Model.AggregateClassificationMetrics.f1_score", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="log_loss", - full_name="google.cloud.bigquery.v2.Model.AggregateClassificationMetrics.log_loss", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="roc_auc", - full_name="google.cloud.bigquery.v2.Model.AggregateClassificationMetrics.roc_auc", - index=6, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1544, - serialized_end=1911, -) - -_MODEL_BINARYCLASSIFICATIONMETRICS_BINARYCONFUSIONMATRIX = _descriptor.Descriptor( - name="BinaryConfusionMatrix", - full_name="google.cloud.bigquery.v2.Model.BinaryClassificationMetrics.BinaryConfusionMatrix", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="positive_class_threshold", - full_name="google.cloud.bigquery.v2.Model.BinaryClassificationMetrics.BinaryConfusionMatrix.positive_class_threshold", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="true_positives", - full_name="google.cloud.bigquery.v2.Model.BinaryClassificationMetrics.BinaryConfusionMatrix.true_positives", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="false_positives", - full_name="google.cloud.bigquery.v2.Model.BinaryClassificationMetrics.BinaryConfusionMatrix.false_positives", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="true_negatives", - full_name="google.cloud.bigquery.v2.Model.BinaryClassificationMetrics.BinaryConfusionMatrix.true_negatives", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="false_negatives", - full_name="google.cloud.bigquery.v2.Model.BinaryClassificationMetrics.BinaryConfusionMatrix.false_negatives", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="precision", - full_name="google.cloud.bigquery.v2.Model.BinaryClassificationMetrics.BinaryConfusionMatrix.precision", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="recall", - full_name="google.cloud.bigquery.v2.Model.BinaryClassificationMetrics.BinaryConfusionMatrix.recall", - index=6, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="f1_score", - full_name="google.cloud.bigquery.v2.Model.BinaryClassificationMetrics.BinaryConfusionMatrix.f1_score", - index=7, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="accuracy", - full_name="google.cloud.bigquery.v2.Model.BinaryClassificationMetrics.BinaryConfusionMatrix.accuracy", - index=8, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2221, - serialized_end=2713, -) - -_MODEL_BINARYCLASSIFICATIONMETRICS = _descriptor.Descriptor( - name="BinaryClassificationMetrics", - full_name="google.cloud.bigquery.v2.Model.BinaryClassificationMetrics", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="aggregate_classification_metrics", - full_name="google.cloud.bigquery.v2.Model.BinaryClassificationMetrics.aggregate_classification_metrics", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="binary_confusion_matrix_list", - full_name="google.cloud.bigquery.v2.Model.BinaryClassificationMetrics.binary_confusion_matrix_list", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="positive_label", - full_name="google.cloud.bigquery.v2.Model.BinaryClassificationMetrics.positive_label", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="negative_label", - full_name="google.cloud.bigquery.v2.Model.BinaryClassificationMetrics.negative_label", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_MODEL_BINARYCLASSIFICATIONMETRICS_BINARYCONFUSIONMATRIX,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1914, - serialized_end=2713, -) - -_MODEL_MULTICLASSCLASSIFICATIONMETRICS_CONFUSIONMATRIX_ENTRY = _descriptor.Descriptor( - name="Entry", - full_name="google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix.Entry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="predicted_label", - full_name="google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix.Entry.predicted_label", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="item_count", - full_name="google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix.Entry.item_count", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3148, - serialized_end=3229, -) - -_MODEL_MULTICLASSCLASSIFICATIONMETRICS_CONFUSIONMATRIX_ROW = _descriptor.Descriptor( - name="Row", - full_name="google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix.Row", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="actual_label", - full_name="google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix.Row.actual_label", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="entries", - full_name="google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix.Row.entries", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3232, - serialized_end=3363, -) - -_MODEL_MULTICLASSCLASSIFICATIONMETRICS_CONFUSIONMATRIX = _descriptor.Descriptor( - name="ConfusionMatrix", - full_name="google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="confidence_threshold", - full_name="google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix.confidence_threshold", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="rows", - full_name="google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix.rows", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[ - _MODEL_MULTICLASSCLASSIFICATIONMETRICS_CONFUSIONMATRIX_ENTRY, - _MODEL_MULTICLASSCLASSIFICATIONMETRICS_CONFUSIONMATRIX_ROW, - ], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2970, - serialized_end=3363, -) - -_MODEL_MULTICLASSCLASSIFICATIONMETRICS = _descriptor.Descriptor( - name="MultiClassClassificationMetrics", - full_name="google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="aggregate_classification_metrics", - full_name="google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.aggregate_classification_metrics", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="confusion_matrix_list", - full_name="google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.confusion_matrix_list", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_MODEL_MULTICLASSCLASSIFICATIONMETRICS_CONFUSIONMATRIX,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2716, - serialized_end=3363, -) - -_MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE_CATEGORICALVALUE_CATEGORYCOUNT = _descriptor.Descriptor( - name="CategoryCount", - full_name="google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue.CategoricalValue.CategoryCount", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="category", - full_name="google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue.CategoricalValue.CategoryCount.category", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="count", - full_name="google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue.CategoricalValue.CategoryCount.count", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4123, - serialized_end=4200, -) - -_MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE_CATEGORICALVALUE = _descriptor.Descriptor( - name="CategoricalValue", - full_name="google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue.CategoricalValue", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="category_counts", - full_name="google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue.CategoricalValue.category_counts", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[ - _MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE_CATEGORICALVALUE_CATEGORYCOUNT, - ], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3975, - serialized_end=4200, -) - -_MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE = _descriptor.Descriptor( - name="FeatureValue", - full_name="google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="feature_column", - full_name="google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue.feature_column", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="numerical_value", - full_name="google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue.numerical_value", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="categorical_value", - full_name="google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue.categorical_value", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE_CATEGORICALVALUE,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="value", - full_name="google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue.value", - index=0, - containing_type=None, - fields=[], - ), - ], - serialized_start=3759, - serialized_end=4209, -) - -_MODEL_CLUSTERINGMETRICS_CLUSTER = _descriptor.Descriptor( - name="Cluster", - full_name="google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="centroid_id", - full_name="google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.centroid_id", - index=0, - number=1, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="feature_values", - full_name="google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.feature_values", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="count", - full_name="google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.count", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3586, - serialized_end=4209, -) - -_MODEL_CLUSTERINGMETRICS = _descriptor.Descriptor( - name="ClusteringMetrics", - full_name="google.cloud.bigquery.v2.Model.ClusteringMetrics", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="davies_bouldin_index", - full_name="google.cloud.bigquery.v2.Model.ClusteringMetrics.davies_bouldin_index", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="mean_squared_distance", - full_name="google.cloud.bigquery.v2.Model.ClusteringMetrics.mean_squared_distance", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="clusters", - full_name="google.cloud.bigquery.v2.Model.ClusteringMetrics.clusters", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_MODEL_CLUSTERINGMETRICS_CLUSTER,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3366, - serialized_end=4209, -) - -_MODEL_EVALUATIONMETRICS = _descriptor.Descriptor( - name="EvaluationMetrics", - full_name="google.cloud.bigquery.v2.Model.EvaluationMetrics", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="regression_metrics", - full_name="google.cloud.bigquery.v2.Model.EvaluationMetrics.regression_metrics", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="binary_classification_metrics", - full_name="google.cloud.bigquery.v2.Model.EvaluationMetrics.binary_classification_metrics", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="multi_class_classification_metrics", - full_name="google.cloud.bigquery.v2.Model.EvaluationMetrics.multi_class_classification_metrics", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="clustering_metrics", - full_name="google.cloud.bigquery.v2.Model.EvaluationMetrics.clustering_metrics", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="metrics", - full_name="google.cloud.bigquery.v2.Model.EvaluationMetrics.metrics", - index=0, - containing_type=None, - fields=[], - ), - ], - serialized_start=4212, - serialized_end=4617, -) - -_MODEL_TRAININGRUN_TRAININGOPTIONS_LABELCLASSWEIGHTSENTRY = _descriptor.Descriptor( - name="LabelClassWeightsEntry", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions.LabelClassWeightsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions.LabelClassWeightsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions.LabelClassWeightsEntry.value", - index=1, - number=2, - type=1, - cpp_type=5, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6053, - serialized_end=6109, -) - -_MODEL_TRAININGRUN_TRAININGOPTIONS = _descriptor.Descriptor( - name="TrainingOptions", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="max_iterations", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions.max_iterations", - index=0, - number=1, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="loss_type", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions.loss_type", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="learn_rate", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions.learn_rate", - index=2, - number=3, - type=1, - cpp_type=5, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="l1_regularization", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions.l1_regularization", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="l2_regularization", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions.l2_regularization", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="min_relative_progress", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions.min_relative_progress", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="warm_start", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions.warm_start", - index=6, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="early_stop", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions.early_stop", - index=7, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="input_label_columns", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions.input_label_columns", - index=8, - number=9, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="data_split_method", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions.data_split_method", - index=9, - number=10, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="data_split_eval_fraction", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions.data_split_eval_fraction", - index=10, - number=11, - type=1, - cpp_type=5, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="data_split_column", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions.data_split_column", - index=11, - number=12, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="learn_rate_strategy", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions.learn_rate_strategy", - index=12, - number=13, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="initial_learn_rate", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions.initial_learn_rate", - index=13, - number=16, - type=1, - cpp_type=5, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="label_class_weights", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions.label_class_weights", - index=14, - number=17, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="distance_type", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions.distance_type", - index=15, - number=20, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="num_clusters", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions.num_clusters", - index=16, - number=21, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="model_uri", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions.model_uri", - index=17, - number=22, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="optimization_strategy", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions.optimization_strategy", - index=18, - number=23, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="kmeans_initialization_method", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions.kmeans_initialization_method", - index=19, - number=33, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="kmeans_initialization_column", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions.kmeans_initialization_column", - index=20, - number=34, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_MODEL_TRAININGRUN_TRAININGOPTIONS_LABELCLASSWEIGHTSENTRY,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4928, - serialized_end=6109, -) - -_MODEL_TRAININGRUN_ITERATIONRESULT_CLUSTERINFO = _descriptor.Descriptor( - name="ClusterInfo", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.IterationResult.ClusterInfo", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="centroid_id", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.IterationResult.ClusterInfo.centroid_id", - index=0, - number=1, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cluster_radius", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.IterationResult.ClusterInfo.cluster_radius", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cluster_size", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.IterationResult.ClusterInfo.cluster_size", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6444, - serialized_end=6583, -) - -_MODEL_TRAININGRUN_ITERATIONRESULT = _descriptor.Descriptor( - name="IterationResult", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.IterationResult", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="index", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.IterationResult.index", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="duration_ms", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.IterationResult.duration_ms", - index=1, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="training_loss", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.IterationResult.training_loss", - index=2, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="eval_loss", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.IterationResult.eval_loss", - index=3, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="learn_rate", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.IterationResult.learn_rate", - index=4, - number=7, - type=1, - cpp_type=5, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cluster_infos", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.IterationResult.cluster_infos", - index=5, - number=8, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_MODEL_TRAININGRUN_ITERATIONRESULT_CLUSTERINFO,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6112, - serialized_end=6583, -) - -_MODEL_TRAININGRUN = _descriptor.Descriptor( - name="TrainingRun", - full_name="google.cloud.bigquery.v2.Model.TrainingRun", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="training_options", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.training_options", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="start_time", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.start_time", - index=1, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="results", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.results", - index=2, - number=6, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="evaluation_metrics", - full_name="google.cloud.bigquery.v2.Model.TrainingRun.evaluation_metrics", - index=3, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[ - _MODEL_TRAININGRUN_TRAININGOPTIONS, - _MODEL_TRAININGRUN_ITERATIONRESULT, - ], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4620, - serialized_end=6583, -) - -_MODEL_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="google.cloud.bigquery.v2.Model.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.bigquery.v2.Model.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.bigquery.v2.Model.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6585, - serialized_end=6630, -) - -_MODEL = _descriptor.Descriptor( - name="Model", - full_name="google.cloud.bigquery.v2.Model", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="etag", - full_name="google.cloud.bigquery.v2.Model.etag", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="model_reference", - full_name="google.cloud.bigquery.v2.Model.model_reference", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="creation_time", - full_name="google.cloud.bigquery.v2.Model.creation_time", - index=2, - number=5, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="last_modified_time", - full_name="google.cloud.bigquery.v2.Model.last_modified_time", - index=3, - number=6, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="description", - full_name="google.cloud.bigquery.v2.Model.description", - index=4, - number=12, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="friendly_name", - full_name="google.cloud.bigquery.v2.Model.friendly_name", - index=5, - number=14, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.cloud.bigquery.v2.Model.labels", - index=6, - number=15, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="expiration_time", - full_name="google.cloud.bigquery.v2.Model.expiration_time", - index=7, - number=16, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="location", - full_name="google.cloud.bigquery.v2.Model.location", - index=8, - number=13, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="encryption_configuration", - full_name="google.cloud.bigquery.v2.Model.encryption_configuration", - index=9, - number=17, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="model_type", - full_name="google.cloud.bigquery.v2.Model.model_type", - index=10, - number=7, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="training_runs", - full_name="google.cloud.bigquery.v2.Model.training_runs", - index=11, - number=9, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="feature_columns", - full_name="google.cloud.bigquery.v2.Model.feature_columns", - index=12, - number=10, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="label_columns", - full_name="google.cloud.bigquery.v2.Model.label_columns", - index=13, - number=11, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[ - _MODEL_KMEANSENUMS, - _MODEL_REGRESSIONMETRICS, - _MODEL_AGGREGATECLASSIFICATIONMETRICS, - _MODEL_BINARYCLASSIFICATIONMETRICS, - _MODEL_MULTICLASSCLASSIFICATIONMETRICS, - _MODEL_CLUSTERINGMETRICS, - _MODEL_EVALUATIONMETRICS, - _MODEL_TRAININGRUN, - _MODEL_LABELSENTRY, - ], - enum_types=[ - _MODEL_MODELTYPE, - _MODEL_LOSSTYPE, - _MODEL_DISTANCETYPE, - _MODEL_DATASPLITMETHOD, - _MODEL_LEARNRATESTRATEGY, - _MODEL_OPTIMIZATIONSTRATEGY, - ], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=416, - serialized_end=7227, -) - - -_GETMODELREQUEST = _descriptor.Descriptor( - name="GetModelRequest", - full_name="google.cloud.bigquery.v2.GetModelRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.cloud.bigquery.v2.GetModelRequest.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="dataset_id", - full_name="google.cloud.bigquery.v2.GetModelRequest.dataset_id", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="model_id", - full_name="google.cloud.bigquery.v2.GetModelRequest.model_id", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=7229, - serialized_end=7319, -) - - -_PATCHMODELREQUEST = _descriptor.Descriptor( - name="PatchModelRequest", - full_name="google.cloud.bigquery.v2.PatchModelRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.cloud.bigquery.v2.PatchModelRequest.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="dataset_id", - full_name="google.cloud.bigquery.v2.PatchModelRequest.dataset_id", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="model_id", - full_name="google.cloud.bigquery.v2.PatchModelRequest.model_id", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="model", - full_name="google.cloud.bigquery.v2.PatchModelRequest.model", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=7322, - serialized_end=7467, -) - - -_DELETEMODELREQUEST = _descriptor.Descriptor( - name="DeleteModelRequest", - full_name="google.cloud.bigquery.v2.DeleteModelRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.cloud.bigquery.v2.DeleteModelRequest.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="dataset_id", - full_name="google.cloud.bigquery.v2.DeleteModelRequest.dataset_id", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="model_id", - full_name="google.cloud.bigquery.v2.DeleteModelRequest.model_id", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=7469, - serialized_end=7562, -) - - -_LISTMODELSREQUEST = _descriptor.Descriptor( - name="ListModelsRequest", - full_name="google.cloud.bigquery.v2.ListModelsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.cloud.bigquery.v2.ListModelsRequest.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="dataset_id", - full_name="google.cloud.bigquery.v2.ListModelsRequest.dataset_id", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="max_results", - full_name="google.cloud.bigquery.v2.ListModelsRequest.max_results", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.cloud.bigquery.v2.ListModelsRequest.page_token", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=7565, - serialized_end=7705, -) - - -_LISTMODELSRESPONSE = _descriptor.Descriptor( - name="ListModelsResponse", - full_name="google.cloud.bigquery.v2.ListModelsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="models", - full_name="google.cloud.bigquery.v2.ListModelsResponse.models", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.cloud.bigquery.v2.ListModelsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=7707, - serialized_end=7801, -) - -_MODEL_KMEANSENUMS.containing_type = _MODEL -_MODEL_KMEANSENUMS_KMEANSINITIALIZATIONMETHOD.containing_type = _MODEL_KMEANSENUMS -_MODEL_REGRESSIONMETRICS.fields_by_name[ - "mean_absolute_error" -].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE -_MODEL_REGRESSIONMETRICS.fields_by_name[ - "mean_squared_error" -].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE -_MODEL_REGRESSIONMETRICS.fields_by_name[ - "mean_squared_log_error" -].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE -_MODEL_REGRESSIONMETRICS.fields_by_name[ - "median_absolute_error" -].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE -_MODEL_REGRESSIONMETRICS.fields_by_name[ - "r_squared" -].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE -_MODEL_REGRESSIONMETRICS.containing_type = _MODEL -_MODEL_AGGREGATECLASSIFICATIONMETRICS.fields_by_name[ - "precision" -].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE -_MODEL_AGGREGATECLASSIFICATIONMETRICS.fields_by_name[ - "recall" -].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE -_MODEL_AGGREGATECLASSIFICATIONMETRICS.fields_by_name[ - "accuracy" -].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE -_MODEL_AGGREGATECLASSIFICATIONMETRICS.fields_by_name[ - "threshold" -].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE -_MODEL_AGGREGATECLASSIFICATIONMETRICS.fields_by_name[ - "f1_score" -].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE -_MODEL_AGGREGATECLASSIFICATIONMETRICS.fields_by_name[ - "log_loss" -].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE -_MODEL_AGGREGATECLASSIFICATIONMETRICS.fields_by_name[ - "roc_auc" -].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE -_MODEL_AGGREGATECLASSIFICATIONMETRICS.containing_type = _MODEL -_MODEL_BINARYCLASSIFICATIONMETRICS_BINARYCONFUSIONMATRIX.fields_by_name[ - "positive_class_threshold" -].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE -_MODEL_BINARYCLASSIFICATIONMETRICS_BINARYCONFUSIONMATRIX.fields_by_name[ - "true_positives" -].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE -_MODEL_BINARYCLASSIFICATIONMETRICS_BINARYCONFUSIONMATRIX.fields_by_name[ - "false_positives" -].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE -_MODEL_BINARYCLASSIFICATIONMETRICS_BINARYCONFUSIONMATRIX.fields_by_name[ - "true_negatives" -].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE -_MODEL_BINARYCLASSIFICATIONMETRICS_BINARYCONFUSIONMATRIX.fields_by_name[ - "false_negatives" -].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE -_MODEL_BINARYCLASSIFICATIONMETRICS_BINARYCONFUSIONMATRIX.fields_by_name[ - "precision" -].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE -_MODEL_BINARYCLASSIFICATIONMETRICS_BINARYCONFUSIONMATRIX.fields_by_name[ - "recall" -].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE -_MODEL_BINARYCLASSIFICATIONMETRICS_BINARYCONFUSIONMATRIX.fields_by_name[ - "f1_score" -].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE -_MODEL_BINARYCLASSIFICATIONMETRICS_BINARYCONFUSIONMATRIX.fields_by_name[ - "accuracy" -].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE -_MODEL_BINARYCLASSIFICATIONMETRICS_BINARYCONFUSIONMATRIX.containing_type = ( - _MODEL_BINARYCLASSIFICATIONMETRICS -) -_MODEL_BINARYCLASSIFICATIONMETRICS.fields_by_name[ - "aggregate_classification_metrics" -].message_type = _MODEL_AGGREGATECLASSIFICATIONMETRICS -_MODEL_BINARYCLASSIFICATIONMETRICS.fields_by_name[ - "binary_confusion_matrix_list" -].message_type = _MODEL_BINARYCLASSIFICATIONMETRICS_BINARYCONFUSIONMATRIX -_MODEL_BINARYCLASSIFICATIONMETRICS.containing_type = _MODEL -_MODEL_MULTICLASSCLASSIFICATIONMETRICS_CONFUSIONMATRIX_ENTRY.fields_by_name[ - "item_count" -].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE -_MODEL_MULTICLASSCLASSIFICATIONMETRICS_CONFUSIONMATRIX_ENTRY.containing_type = ( - _MODEL_MULTICLASSCLASSIFICATIONMETRICS_CONFUSIONMATRIX -) -_MODEL_MULTICLASSCLASSIFICATIONMETRICS_CONFUSIONMATRIX_ROW.fields_by_name[ - "entries" -].message_type = _MODEL_MULTICLASSCLASSIFICATIONMETRICS_CONFUSIONMATRIX_ENTRY -_MODEL_MULTICLASSCLASSIFICATIONMETRICS_CONFUSIONMATRIX_ROW.containing_type = ( - _MODEL_MULTICLASSCLASSIFICATIONMETRICS_CONFUSIONMATRIX -) -_MODEL_MULTICLASSCLASSIFICATIONMETRICS_CONFUSIONMATRIX.fields_by_name[ - "confidence_threshold" -].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE -_MODEL_MULTICLASSCLASSIFICATIONMETRICS_CONFUSIONMATRIX.fields_by_name[ - "rows" -].message_type = _MODEL_MULTICLASSCLASSIFICATIONMETRICS_CONFUSIONMATRIX_ROW -_MODEL_MULTICLASSCLASSIFICATIONMETRICS_CONFUSIONMATRIX.containing_type = ( - _MODEL_MULTICLASSCLASSIFICATIONMETRICS -) -_MODEL_MULTICLASSCLASSIFICATIONMETRICS.fields_by_name[ - "aggregate_classification_metrics" -].message_type = _MODEL_AGGREGATECLASSIFICATIONMETRICS -_MODEL_MULTICLASSCLASSIFICATIONMETRICS.fields_by_name[ - "confusion_matrix_list" -].message_type = _MODEL_MULTICLASSCLASSIFICATIONMETRICS_CONFUSIONMATRIX -_MODEL_MULTICLASSCLASSIFICATIONMETRICS.containing_type = _MODEL -_MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE_CATEGORICALVALUE_CATEGORYCOUNT.fields_by_name[ - "count" -].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE -_MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE_CATEGORICALVALUE_CATEGORYCOUNT.containing_type = ( - _MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE_CATEGORICALVALUE -) -_MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE_CATEGORICALVALUE.fields_by_name[ - "category_counts" -].message_type = ( - _MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE_CATEGORICALVALUE_CATEGORYCOUNT -) -_MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE_CATEGORICALVALUE.containing_type = ( - _MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE -) -_MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE.fields_by_name[ - "numerical_value" -].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE -_MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE.fields_by_name[ - "categorical_value" -].message_type = _MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE_CATEGORICALVALUE -_MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE.containing_type = ( - _MODEL_CLUSTERINGMETRICS_CLUSTER -) -_MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE.oneofs_by_name["value"].fields.append( - _MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE.fields_by_name["numerical_value"] -) -_MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE.fields_by_name[ - "numerical_value" -].containing_oneof = _MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE.oneofs_by_name[ - "value" -] -_MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE.oneofs_by_name["value"].fields.append( - _MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE.fields_by_name["categorical_value"] -) -_MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE.fields_by_name[ - "categorical_value" -].containing_oneof = _MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE.oneofs_by_name[ - "value" -] -_MODEL_CLUSTERINGMETRICS_CLUSTER.fields_by_name[ - "feature_values" -].message_type = _MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE -_MODEL_CLUSTERINGMETRICS_CLUSTER.fields_by_name[ - "count" -].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE -_MODEL_CLUSTERINGMETRICS_CLUSTER.containing_type = _MODEL_CLUSTERINGMETRICS -_MODEL_CLUSTERINGMETRICS.fields_by_name[ - "davies_bouldin_index" -].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE -_MODEL_CLUSTERINGMETRICS.fields_by_name[ - "mean_squared_distance" -].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE -_MODEL_CLUSTERINGMETRICS.fields_by_name[ - "clusters" -].message_type = _MODEL_CLUSTERINGMETRICS_CLUSTER -_MODEL_CLUSTERINGMETRICS.containing_type = _MODEL -_MODEL_EVALUATIONMETRICS.fields_by_name[ - "regression_metrics" -].message_type = _MODEL_REGRESSIONMETRICS -_MODEL_EVALUATIONMETRICS.fields_by_name[ - "binary_classification_metrics" -].message_type = _MODEL_BINARYCLASSIFICATIONMETRICS -_MODEL_EVALUATIONMETRICS.fields_by_name[ - "multi_class_classification_metrics" -].message_type = _MODEL_MULTICLASSCLASSIFICATIONMETRICS -_MODEL_EVALUATIONMETRICS.fields_by_name[ - "clustering_metrics" -].message_type = _MODEL_CLUSTERINGMETRICS -_MODEL_EVALUATIONMETRICS.containing_type = _MODEL -_MODEL_EVALUATIONMETRICS.oneofs_by_name["metrics"].fields.append( - _MODEL_EVALUATIONMETRICS.fields_by_name["regression_metrics"] -) -_MODEL_EVALUATIONMETRICS.fields_by_name[ - "regression_metrics" -].containing_oneof = _MODEL_EVALUATIONMETRICS.oneofs_by_name["metrics"] -_MODEL_EVALUATIONMETRICS.oneofs_by_name["metrics"].fields.append( - _MODEL_EVALUATIONMETRICS.fields_by_name["binary_classification_metrics"] -) -_MODEL_EVALUATIONMETRICS.fields_by_name[ - "binary_classification_metrics" -].containing_oneof = _MODEL_EVALUATIONMETRICS.oneofs_by_name["metrics"] -_MODEL_EVALUATIONMETRICS.oneofs_by_name["metrics"].fields.append( - _MODEL_EVALUATIONMETRICS.fields_by_name["multi_class_classification_metrics"] -) -_MODEL_EVALUATIONMETRICS.fields_by_name[ - "multi_class_classification_metrics" -].containing_oneof = _MODEL_EVALUATIONMETRICS.oneofs_by_name["metrics"] -_MODEL_EVALUATIONMETRICS.oneofs_by_name["metrics"].fields.append( - _MODEL_EVALUATIONMETRICS.fields_by_name["clustering_metrics"] -) -_MODEL_EVALUATIONMETRICS.fields_by_name[ - "clustering_metrics" -].containing_oneof = _MODEL_EVALUATIONMETRICS.oneofs_by_name["metrics"] -_MODEL_TRAININGRUN_TRAININGOPTIONS_LABELCLASSWEIGHTSENTRY.containing_type = ( - _MODEL_TRAININGRUN_TRAININGOPTIONS -) -_MODEL_TRAININGRUN_TRAININGOPTIONS.fields_by_name[ - "loss_type" -].enum_type = _MODEL_LOSSTYPE -_MODEL_TRAININGRUN_TRAININGOPTIONS.fields_by_name[ - "l1_regularization" -].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE -_MODEL_TRAININGRUN_TRAININGOPTIONS.fields_by_name[ - "l2_regularization" -].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE -_MODEL_TRAININGRUN_TRAININGOPTIONS.fields_by_name[ - "min_relative_progress" -].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE -_MODEL_TRAININGRUN_TRAININGOPTIONS.fields_by_name[ - "warm_start" -].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE -_MODEL_TRAININGRUN_TRAININGOPTIONS.fields_by_name[ - "early_stop" -].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE -_MODEL_TRAININGRUN_TRAININGOPTIONS.fields_by_name[ - "data_split_method" -].enum_type = _MODEL_DATASPLITMETHOD -_MODEL_TRAININGRUN_TRAININGOPTIONS.fields_by_name[ - "learn_rate_strategy" -].enum_type = _MODEL_LEARNRATESTRATEGY -_MODEL_TRAININGRUN_TRAININGOPTIONS.fields_by_name[ - "label_class_weights" -].message_type = _MODEL_TRAININGRUN_TRAININGOPTIONS_LABELCLASSWEIGHTSENTRY -_MODEL_TRAININGRUN_TRAININGOPTIONS.fields_by_name[ - "distance_type" -].enum_type = _MODEL_DISTANCETYPE -_MODEL_TRAININGRUN_TRAININGOPTIONS.fields_by_name[ - "optimization_strategy" -].enum_type = _MODEL_OPTIMIZATIONSTRATEGY -_MODEL_TRAININGRUN_TRAININGOPTIONS.fields_by_name[ - "kmeans_initialization_method" -].enum_type = _MODEL_KMEANSENUMS_KMEANSINITIALIZATIONMETHOD -_MODEL_TRAININGRUN_TRAININGOPTIONS.containing_type = _MODEL_TRAININGRUN -_MODEL_TRAININGRUN_ITERATIONRESULT_CLUSTERINFO.fields_by_name[ - "cluster_radius" -].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE -_MODEL_TRAININGRUN_ITERATIONRESULT_CLUSTERINFO.fields_by_name[ - "cluster_size" -].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE -_MODEL_TRAININGRUN_ITERATIONRESULT_CLUSTERINFO.containing_type = ( - _MODEL_TRAININGRUN_ITERATIONRESULT -) -_MODEL_TRAININGRUN_ITERATIONRESULT.fields_by_name[ - "index" -].message_type = google_dot_protobuf_dot_wrappers__pb2._INT32VALUE -_MODEL_TRAININGRUN_ITERATIONRESULT.fields_by_name[ - "duration_ms" -].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE -_MODEL_TRAININGRUN_ITERATIONRESULT.fields_by_name[ - "training_loss" -].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE -_MODEL_TRAININGRUN_ITERATIONRESULT.fields_by_name[ - "eval_loss" -].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE -_MODEL_TRAININGRUN_ITERATIONRESULT.fields_by_name[ - "cluster_infos" -].message_type = _MODEL_TRAININGRUN_ITERATIONRESULT_CLUSTERINFO -_MODEL_TRAININGRUN_ITERATIONRESULT.containing_type = _MODEL_TRAININGRUN -_MODEL_TRAININGRUN.fields_by_name[ - "training_options" -].message_type = _MODEL_TRAININGRUN_TRAININGOPTIONS -_MODEL_TRAININGRUN.fields_by_name[ - "start_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_MODEL_TRAININGRUN.fields_by_name[ - "results" -].message_type = _MODEL_TRAININGRUN_ITERATIONRESULT -_MODEL_TRAININGRUN.fields_by_name[ - "evaluation_metrics" -].message_type = _MODEL_EVALUATIONMETRICS -_MODEL_TRAININGRUN.containing_type = _MODEL -_MODEL_LABELSENTRY.containing_type = _MODEL -_MODEL.fields_by_name[ - "model_reference" -].message_type = ( - google_dot_cloud_dot_bigquery__v2_dot_proto_dot_model__reference__pb2._MODELREFERENCE -) -_MODEL.fields_by_name["labels"].message_type = _MODEL_LABELSENTRY -_MODEL.fields_by_name[ - "encryption_configuration" -].message_type = ( - google_dot_cloud_dot_bigquery__v2_dot_proto_dot_encryption__config__pb2._ENCRYPTIONCONFIGURATION -) -_MODEL.fields_by_name["model_type"].enum_type = _MODEL_MODELTYPE -_MODEL.fields_by_name["training_runs"].message_type = _MODEL_TRAININGRUN -_MODEL.fields_by_name[ - "feature_columns" -].message_type = ( - google_dot_cloud_dot_bigquery__v2_dot_proto_dot_standard__sql__pb2._STANDARDSQLFIELD -) -_MODEL.fields_by_name[ - "label_columns" -].message_type = ( - google_dot_cloud_dot_bigquery__v2_dot_proto_dot_standard__sql__pb2._STANDARDSQLFIELD -) -_MODEL_MODELTYPE.containing_type = _MODEL -_MODEL_LOSSTYPE.containing_type = _MODEL -_MODEL_DISTANCETYPE.containing_type = _MODEL -_MODEL_DATASPLITMETHOD.containing_type = _MODEL -_MODEL_LEARNRATESTRATEGY.containing_type = _MODEL -_MODEL_OPTIMIZATIONSTRATEGY.containing_type = _MODEL -_PATCHMODELREQUEST.fields_by_name["model"].message_type = _MODEL -_LISTMODELSREQUEST.fields_by_name[ - "max_results" -].message_type = google_dot_protobuf_dot_wrappers__pb2._UINT32VALUE -_LISTMODELSRESPONSE.fields_by_name["models"].message_type = _MODEL -DESCRIPTOR.message_types_by_name["Model"] = _MODEL -DESCRIPTOR.message_types_by_name["GetModelRequest"] = _GETMODELREQUEST -DESCRIPTOR.message_types_by_name["PatchModelRequest"] = _PATCHMODELREQUEST -DESCRIPTOR.message_types_by_name["DeleteModelRequest"] = _DELETEMODELREQUEST -DESCRIPTOR.message_types_by_name["ListModelsRequest"] = _LISTMODELSREQUEST -DESCRIPTOR.message_types_by_name["ListModelsResponse"] = _LISTMODELSRESPONSE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Model = _reflection.GeneratedProtocolMessageType( - "Model", - (_message.Message,), - dict( - KmeansEnums=_reflection.GeneratedProtocolMessageType( - "KmeansEnums", - (_message.Message,), - dict( - DESCRIPTOR=_MODEL_KMEANSENUMS, - __module__="google.cloud.bigquery_v2.proto.model_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.KmeansEnums) - ), - ), - RegressionMetrics=_reflection.GeneratedProtocolMessageType( - "RegressionMetrics", - (_message.Message,), - dict( - DESCRIPTOR=_MODEL_REGRESSIONMETRICS, - __module__="google.cloud.bigquery_v2.proto.model_pb2", - __doc__="""Evaluation metrics for regression and explicit feedback - type matrix factorization models. - - - Attributes: - mean_absolute_error: - Mean absolute error. - mean_squared_error: - Mean squared error. - mean_squared_log_error: - Mean squared log error. - median_absolute_error: - Median absolute error. - r_squared: - R^2 score. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.RegressionMetrics) - ), - ), - AggregateClassificationMetrics=_reflection.GeneratedProtocolMessageType( - "AggregateClassificationMetrics", - (_message.Message,), - dict( - DESCRIPTOR=_MODEL_AGGREGATECLASSIFICATIONMETRICS, - __module__="google.cloud.bigquery_v2.proto.model_pb2", - __doc__="""Aggregate metrics for classification/classifier models. - For multi-class models, the metrics are either macro-averaged or - micro-averaged. When macro-averaged, the metrics are calculated for each - label and then an unweighted average is taken of those values. When - micro-averaged, the metric is calculated globally by counting the total - number of correctly predicted rows. - - - Attributes: - precision: - Precision is the fraction of actual positive predictions that - had positive actual labels. For multiclass this is a macro- - averaged metric treating each class as a binary classifier. - recall: - Recall is the fraction of actual positive labels that were - given a positive prediction. For multiclass this is a macro- - averaged metric. - accuracy: - Accuracy is the fraction of predictions given the correct - label. For multiclass this is a micro-averaged metric. - threshold: - Threshold at which the metrics are computed. For binary - classification models this is the positive class threshold. - For multi-class classfication models this is the confidence - threshold. - f1_score: - The F1 score is an average of recall and precision. For - multiclass this is a macro-averaged metric. - log_loss: - Logarithmic Loss. For multiclass this is a macro-averaged - metric. - roc_auc: - Area Under a ROC Curve. For multiclass this is a macro- - averaged metric. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.AggregateClassificationMetrics) - ), - ), - BinaryClassificationMetrics=_reflection.GeneratedProtocolMessageType( - "BinaryClassificationMetrics", - (_message.Message,), - dict( - BinaryConfusionMatrix=_reflection.GeneratedProtocolMessageType( - "BinaryConfusionMatrix", - (_message.Message,), - dict( - DESCRIPTOR=_MODEL_BINARYCLASSIFICATIONMETRICS_BINARYCONFUSIONMATRIX, - __module__="google.cloud.bigquery_v2.proto.model_pb2", - __doc__="""Confusion matrix for binary classification models. - - - Attributes: - positive_class_threshold: - Threshold value used when computing each of the following - metric. - true_positives: - Number of true samples predicted as true. - false_positives: - Number of false samples predicted as true. - true_negatives: - Number of true samples predicted as false. - false_negatives: - Number of false samples predicted as false. - precision: - The fraction of actual positive predictions that had positive - actual labels. - recall: - The fraction of actual positive labels that were given a - positive prediction. - f1_score: - The equally weighted average of recall and precision. - accuracy: - The fraction of predictions given the correct label. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.BinaryClassificationMetrics.BinaryConfusionMatrix) - ), - ), - DESCRIPTOR=_MODEL_BINARYCLASSIFICATIONMETRICS, - __module__="google.cloud.bigquery_v2.proto.model_pb2", - __doc__="""Evaluation metrics for binary classification/classifier - models. - - - Attributes: - aggregate_classification_metrics: - Aggregate classification metrics. - binary_confusion_matrix_list: - Binary confusion matrix at multiple thresholds. - positive_label: - Label representing the positive class. - negative_label: - Label representing the negative class. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.BinaryClassificationMetrics) - ), - ), - MultiClassClassificationMetrics=_reflection.GeneratedProtocolMessageType( - "MultiClassClassificationMetrics", - (_message.Message,), - dict( - ConfusionMatrix=_reflection.GeneratedProtocolMessageType( - "ConfusionMatrix", - (_message.Message,), - dict( - Entry=_reflection.GeneratedProtocolMessageType( - "Entry", - (_message.Message,), - dict( - DESCRIPTOR=_MODEL_MULTICLASSCLASSIFICATIONMETRICS_CONFUSIONMATRIX_ENTRY, - __module__="google.cloud.bigquery_v2.proto.model_pb2", - __doc__="""A single entry in the confusion matrix. - - - Attributes: - predicted_label: - The predicted label. For confidence\_threshold > 0, we will - also add an entry indicating the number of items under the - confidence threshold. - item_count: - Number of items being predicted as this label. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix.Entry) - ), - ), - Row=_reflection.GeneratedProtocolMessageType( - "Row", - (_message.Message,), - dict( - DESCRIPTOR=_MODEL_MULTICLASSCLASSIFICATIONMETRICS_CONFUSIONMATRIX_ROW, - __module__="google.cloud.bigquery_v2.proto.model_pb2", - __doc__="""A single row in the confusion matrix. - - - Attributes: - actual_label: - The original label of this row. - entries: - Info describing predicted label distribution. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix.Row) - ), - ), - DESCRIPTOR=_MODEL_MULTICLASSCLASSIFICATIONMETRICS_CONFUSIONMATRIX, - __module__="google.cloud.bigquery_v2.proto.model_pb2", - __doc__="""Confusion matrix for multi-class classification models. - - - Attributes: - confidence_threshold: - Confidence threshold used when computing the entries of the - confusion matrix. - rows: - One row per actual label. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix) - ), - ), - DESCRIPTOR=_MODEL_MULTICLASSCLASSIFICATIONMETRICS, - __module__="google.cloud.bigquery_v2.proto.model_pb2", - __doc__="""Evaluation metrics for multi-class - classification/classifier models. - - - Attributes: - aggregate_classification_metrics: - Aggregate classification metrics. - confusion_matrix_list: - Confusion matrix at different thresholds. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics) - ), - ), - ClusteringMetrics=_reflection.GeneratedProtocolMessageType( - "ClusteringMetrics", - (_message.Message,), - dict( - Cluster=_reflection.GeneratedProtocolMessageType( - "Cluster", - (_message.Message,), - dict( - FeatureValue=_reflection.GeneratedProtocolMessageType( - "FeatureValue", - (_message.Message,), - dict( - CategoricalValue=_reflection.GeneratedProtocolMessageType( - "CategoricalValue", - (_message.Message,), - dict( - CategoryCount=_reflection.GeneratedProtocolMessageType( - "CategoryCount", - (_message.Message,), - dict( - DESCRIPTOR=_MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE_CATEGORICALVALUE_CATEGORYCOUNT, - __module__="google.cloud.bigquery_v2.proto.model_pb2", - __doc__="""Represents the count of a single category within the - cluster. - - - Attributes: - category: - The name of category. - count: - The count of training samples matching the category within the - cluster. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue.CategoricalValue.CategoryCount) - ), - ), - DESCRIPTOR=_MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE_CATEGORICALVALUE, - __module__="google.cloud.bigquery_v2.proto.model_pb2", - __doc__="""Representative value of a categorical feature. - - - Attributes: - category_counts: - Counts of all categories for the categorical feature. If there - are more than ten categories, we return top ten (by count) and - return one more CategoryCount with category "*OTHER*" and - count as aggregate counts of remaining categories. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue.CategoricalValue) - ), - ), - DESCRIPTOR=_MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE, - __module__="google.cloud.bigquery_v2.proto.model_pb2", - __doc__="""Representative value of a single feature within the cluster. - - - Attributes: - feature_column: - The feature column name. - numerical_value: - The numerical feature value. This is the centroid value for - this feature. - categorical_value: - The categorical feature value. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue) - ), - ), - DESCRIPTOR=_MODEL_CLUSTERINGMETRICS_CLUSTER, - __module__="google.cloud.bigquery_v2.proto.model_pb2", - __doc__="""Message containing the information about one cluster. - - - Attributes: - centroid_id: - Centroid id. - feature_values: - Values of highly variant features for this cluster. - count: - Count of training data rows that were assigned to this - cluster. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster) - ), - ), - DESCRIPTOR=_MODEL_CLUSTERINGMETRICS, - __module__="google.cloud.bigquery_v2.proto.model_pb2", - __doc__="""Evaluation metrics for clustering models. - - - Attributes: - davies_bouldin_index: - Davies-Bouldin index. - mean_squared_distance: - Mean of squared distances between each sample to its cluster - centroid. - clusters: - [Beta] Information for all clusters. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.ClusteringMetrics) - ), - ), - EvaluationMetrics=_reflection.GeneratedProtocolMessageType( - "EvaluationMetrics", - (_message.Message,), - dict( - DESCRIPTOR=_MODEL_EVALUATIONMETRICS, - __module__="google.cloud.bigquery_v2.proto.model_pb2", - __doc__="""Evaluation metrics of a model. These are either computed - on all training data or just the eval data based on whether eval data - was used during training. These are not present for imported models. - - - Attributes: - regression_metrics: - Populated for regression models and explicit feedback type - matrix factorization models. - binary_classification_metrics: - Populated for binary classification/classifier models. - multi_class_classification_metrics: - Populated for multi-class classification/classifier models. - clustering_metrics: - Populated for clustering models. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.EvaluationMetrics) - ), - ), - TrainingRun=_reflection.GeneratedProtocolMessageType( - "TrainingRun", - (_message.Message,), - dict( - TrainingOptions=_reflection.GeneratedProtocolMessageType( - "TrainingOptions", - (_message.Message,), - dict( - LabelClassWeightsEntry=_reflection.GeneratedProtocolMessageType( - "LabelClassWeightsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_MODEL_TRAININGRUN_TRAININGOPTIONS_LABELCLASSWEIGHTSENTRY, - __module__="google.cloud.bigquery_v2.proto.model_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions.LabelClassWeightsEntry) - ), - ), - DESCRIPTOR=_MODEL_TRAININGRUN_TRAININGOPTIONS, - __module__="google.cloud.bigquery_v2.proto.model_pb2", - __doc__="""Protocol buffer. - - Attributes: - max_iterations: - The maximum number of iterations in training. Used only for - iterative training algorithms. - loss_type: - Type of loss function used during training run. - learn_rate: - Learning rate in training. Used only for iterative training - algorithms. - l1_regularization: - L1 regularization coefficient. - l2_regularization: - L2 regularization coefficient. - min_relative_progress: - When early\_stop is true, stops training when accuracy - improvement is less than 'min\_relative\_progress'. Used only - for iterative training algorithms. - warm_start: - Whether to train a model from the last checkpoint. - early_stop: - Whether to stop early when the loss doesn't improve - significantly any more (compared to min\_relative\_progress). - Used only for iterative training algorithms. - input_label_columns: - Name of input label columns in training data. - data_split_method: - The data split type for training and evaluation, e.g. RANDOM. - data_split_eval_fraction: - The fraction of evaluation data over the whole input data. The - rest of data will be used as training data. The format should - be double. Accurate to two decimal places. Default value is - 0.2. - data_split_column: - The column to split data with. This column won't be used as a - feature. 1. When data\_split\_method is CUSTOM, the - corresponding column should be boolean. The rows with true - value tag are eval data, and the false are training data. 2. - When data\_split\_method is SEQ, the first - DATA\_SPLIT\_EVAL\_FRACTION rows (from smallest to largest) in - the corresponding column are used as training data, and the - rest are eval data. It respects the order in Orderable data - types: - https://cloud.google.com/bigquery/docs/reference/standard- - sql/data-types#data-type-properties - learn_rate_strategy: - The strategy to determine learn rate for the current - iteration. - initial_learn_rate: - Specifies the initial learning rate for the line search learn - rate strategy. - label_class_weights: - Weights associated with each label class, for rebalancing the - training data. Only applicable for classification models. - distance_type: - Distance type for clustering models. - num_clusters: - Number of clusters for clustering models. - model_uri: - [Beta] Google Cloud Storage URI from which the model was - imported. Only applicable for imported models. - optimization_strategy: - Optimization strategy for training linear regression models. - kmeans_initialization_method: - The method used to initialize the centroids for kmeans - algorithm. - kmeans_initialization_column: - The column used to provide the initial centroids for kmeans - algorithm when kmeans\_initialization\_method is CUSTOM. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions) - ), - ), - IterationResult=_reflection.GeneratedProtocolMessageType( - "IterationResult", - (_message.Message,), - dict( - ClusterInfo=_reflection.GeneratedProtocolMessageType( - "ClusterInfo", - (_message.Message,), - dict( - DESCRIPTOR=_MODEL_TRAININGRUN_ITERATIONRESULT_CLUSTERINFO, - __module__="google.cloud.bigquery_v2.proto.model_pb2", - __doc__="""Information about a single cluster for clustering model. - - - Attributes: - centroid_id: - Centroid id. - cluster_radius: - Cluster radius, the average distance from centroid to each - point assigned to the cluster. - cluster_size: - Cluster size, the total number of points assigned to the - cluster. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.TrainingRun.IterationResult.ClusterInfo) - ), - ), - DESCRIPTOR=_MODEL_TRAININGRUN_ITERATIONRESULT, - __module__="google.cloud.bigquery_v2.proto.model_pb2", - __doc__="""Information about a single iteration of the training run. - - - Attributes: - index: - Index of the iteration, 0 based. - duration_ms: - Time taken to run the iteration in milliseconds. - training_loss: - Loss computed on the training data at the end of iteration. - eval_loss: - Loss computed on the eval data at the end of iteration. - learn_rate: - Learn rate used for this iteration. - cluster_infos: - Information about top clusters for clustering models. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.TrainingRun.IterationResult) - ), - ), - DESCRIPTOR=_MODEL_TRAININGRUN, - __module__="google.cloud.bigquery_v2.proto.model_pb2", - __doc__="""Information about a single training query run for the - model. - - - Attributes: - training_options: - Options that were used for this training run, includes user - specified and default options that were used. - start_time: - The start time of this training run. - results: - Output of each iteration run, results.size() <= - max\_iterations. - evaluation_metrics: - The evaluation metrics over training/eval data that were - computed at the end of training. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.TrainingRun) - ), - ), - LabelsEntry=_reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_MODEL_LABELSENTRY, - __module__="google.cloud.bigquery_v2.proto.model_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.LabelsEntry) - ), - ), - DESCRIPTOR=_MODEL, - __module__="google.cloud.bigquery_v2.proto.model_pb2", - __doc__="""Protocol buffer. - - Attributes: - etag: - Output only. A hash of this resource. - model_reference: - Required. Unique identifier for this model. - creation_time: - Output only. The time when this model was created, in - millisecs since the epoch. - last_modified_time: - Output only. The time when this model was last modified, in - millisecs since the epoch. - description: - Optional. A user-friendly description of this model. - friendly_name: - Optional. A descriptive name for this model. - labels: - The labels associated with this model. You can use these to - organize and group your models. Label keys and values can be - no longer than 63 characters, can only contain lowercase - letters, numeric characters, underscores and dashes. - International characters are allowed. Label values are - optional. Label keys must start with a letter and each label - in the list must have a different key. - expiration_time: - Optional. The time when this model expires, in milliseconds - since the epoch. If not present, the model will persist - indefinitely. Expired models will be deleted and their storage - reclaimed. The defaultTableExpirationMs property of the - encapsulating dataset can be used to set a default - expirationTime on newly created models. - location: - Output only. The geographic location where the model resides. - This value is inherited from the dataset. - encryption_configuration: - Custom encryption configuration (e.g., Cloud KMS keys). This - shows the encryption configuration of the model data while - stored in BigQuery storage. - model_type: - Output only. Type of the model resource. - training_runs: - Output only. Information for all training runs in increasing - order of start\_time. - feature_columns: - Output only. Input feature columns that were used to train - this model. - label_columns: - Output only. Label columns that were used to train this model. - The output of the model will have a "predicted\_" prefix to - these columns. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model) - ), -) -_sym_db.RegisterMessage(Model) -_sym_db.RegisterMessage(Model.KmeansEnums) -_sym_db.RegisterMessage(Model.RegressionMetrics) -_sym_db.RegisterMessage(Model.AggregateClassificationMetrics) -_sym_db.RegisterMessage(Model.BinaryClassificationMetrics) -_sym_db.RegisterMessage(Model.BinaryClassificationMetrics.BinaryConfusionMatrix) -_sym_db.RegisterMessage(Model.MultiClassClassificationMetrics) -_sym_db.RegisterMessage(Model.MultiClassClassificationMetrics.ConfusionMatrix) -_sym_db.RegisterMessage(Model.MultiClassClassificationMetrics.ConfusionMatrix.Entry) -_sym_db.RegisterMessage(Model.MultiClassClassificationMetrics.ConfusionMatrix.Row) -_sym_db.RegisterMessage(Model.ClusteringMetrics) -_sym_db.RegisterMessage(Model.ClusteringMetrics.Cluster) -_sym_db.RegisterMessage(Model.ClusteringMetrics.Cluster.FeatureValue) -_sym_db.RegisterMessage(Model.ClusteringMetrics.Cluster.FeatureValue.CategoricalValue) -_sym_db.RegisterMessage( - Model.ClusteringMetrics.Cluster.FeatureValue.CategoricalValue.CategoryCount -) -_sym_db.RegisterMessage(Model.EvaluationMetrics) -_sym_db.RegisterMessage(Model.TrainingRun) -_sym_db.RegisterMessage(Model.TrainingRun.TrainingOptions) -_sym_db.RegisterMessage(Model.TrainingRun.TrainingOptions.LabelClassWeightsEntry) -_sym_db.RegisterMessage(Model.TrainingRun.IterationResult) -_sym_db.RegisterMessage(Model.TrainingRun.IterationResult.ClusterInfo) -_sym_db.RegisterMessage(Model.LabelsEntry) - -GetModelRequest = _reflection.GeneratedProtocolMessageType( - "GetModelRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETMODELREQUEST, - __module__="google.cloud.bigquery_v2.proto.model_pb2", - __doc__="""Protocol buffer. - - Attributes: - project_id: - Required. Project ID of the requested model. - dataset_id: - Required. Dataset ID of the requested model. - model_id: - Required. Model ID of the requested model. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.GetModelRequest) - ), -) -_sym_db.RegisterMessage(GetModelRequest) - -PatchModelRequest = _reflection.GeneratedProtocolMessageType( - "PatchModelRequest", - (_message.Message,), - dict( - DESCRIPTOR=_PATCHMODELREQUEST, - __module__="google.cloud.bigquery_v2.proto.model_pb2", - __doc__="""Protocol buffer. - - Attributes: - project_id: - Required. Project ID of the model to patch. - dataset_id: - Required. Dataset ID of the model to patch. - model_id: - Required. Model ID of the model to patch. - model: - Required. Patched model. Follows RFC5789 patch semantics. - Missing fields are not updated. To clear a field, explicitly - set to default value. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.PatchModelRequest) - ), -) -_sym_db.RegisterMessage(PatchModelRequest) - -DeleteModelRequest = _reflection.GeneratedProtocolMessageType( - "DeleteModelRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETEMODELREQUEST, - __module__="google.cloud.bigquery_v2.proto.model_pb2", - __doc__="""Protocol buffer. - - Attributes: - project_id: - Required. Project ID of the model to delete. - dataset_id: - Required. Dataset ID of the model to delete. - model_id: - Required. Model ID of the model to delete. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.DeleteModelRequest) - ), -) -_sym_db.RegisterMessage(DeleteModelRequest) - -ListModelsRequest = _reflection.GeneratedProtocolMessageType( - "ListModelsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTMODELSREQUEST, - __module__="google.cloud.bigquery_v2.proto.model_pb2", - __doc__="""Protocol buffer. - - Attributes: - project_id: - Required. Project ID of the models to list. - dataset_id: - Required. Dataset ID of the models to list. - max_results: - The maximum number of results to return in a single response - page. Leverage the page tokens to iterate through the entire - collection. - page_token: - Page token, returned by a previous call to request the next - page of results - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.ListModelsRequest) - ), -) -_sym_db.RegisterMessage(ListModelsRequest) - -ListModelsResponse = _reflection.GeneratedProtocolMessageType( - "ListModelsResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTMODELSRESPONSE, - __module__="google.cloud.bigquery_v2.proto.model_pb2", - __doc__="""Protocol buffer. - - Attributes: - models: - Models in the requested dataset. Only the following fields are - populated: model\_reference, model\_type, creation\_time, - last\_modified\_time and labels. - next_page_token: - A token to request the next page of results. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.ListModelsResponse) - ), -) -_sym_db.RegisterMessage(ListModelsResponse) - - -DESCRIPTOR._options = None -_MODEL_TRAININGRUN_TRAININGOPTIONS_LABELCLASSWEIGHTSENTRY._options = None -_MODEL_LABELSENTRY._options = None -_MODEL.fields_by_name["etag"]._options = None -_MODEL.fields_by_name["model_reference"]._options = None -_MODEL.fields_by_name["creation_time"]._options = None -_MODEL.fields_by_name["last_modified_time"]._options = None -_MODEL.fields_by_name["description"]._options = None -_MODEL.fields_by_name["friendly_name"]._options = None -_MODEL.fields_by_name["expiration_time"]._options = None -_MODEL.fields_by_name["location"]._options = None -_MODEL.fields_by_name["model_type"]._options = None -_MODEL.fields_by_name["training_runs"]._options = None -_MODEL.fields_by_name["feature_columns"]._options = None -_MODEL.fields_by_name["label_columns"]._options = None -_GETMODELREQUEST.fields_by_name["project_id"]._options = None -_GETMODELREQUEST.fields_by_name["dataset_id"]._options = None -_GETMODELREQUEST.fields_by_name["model_id"]._options = None -_PATCHMODELREQUEST.fields_by_name["project_id"]._options = None -_PATCHMODELREQUEST.fields_by_name["dataset_id"]._options = None -_PATCHMODELREQUEST.fields_by_name["model_id"]._options = None -_PATCHMODELREQUEST.fields_by_name["model"]._options = None -_DELETEMODELREQUEST.fields_by_name["project_id"]._options = None -_DELETEMODELREQUEST.fields_by_name["dataset_id"]._options = None -_DELETEMODELREQUEST.fields_by_name["model_id"]._options = None -_LISTMODELSREQUEST.fields_by_name["project_id"]._options = None -_LISTMODELSREQUEST.fields_by_name["dataset_id"]._options = None - -_MODELSERVICE = _descriptor.ServiceDescriptor( - name="ModelService", - full_name="google.cloud.bigquery.v2.ModelService", - file=DESCRIPTOR, - index=0, - serialized_options=_b( - "\312A\027bigquery.googleapis.com\322A\302\001https://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/bigquery.readonly,https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only" - ), - serialized_start=7804, - serialized_end=8566, - methods=[ - _descriptor.MethodDescriptor( - name="GetModel", - full_name="google.cloud.bigquery.v2.ModelService.GetModel", - index=0, - containing_service=None, - input_type=_GETMODELREQUEST, - output_type=_MODEL, - serialized_options=_b("\332A\036project_id,dataset_id,model_id"), - ), - _descriptor.MethodDescriptor( - name="ListModels", - full_name="google.cloud.bigquery.v2.ModelService.ListModels", - index=1, - containing_service=None, - input_type=_LISTMODELSREQUEST, - output_type=_LISTMODELSRESPONSE, - serialized_options=_b("\332A!project_id,dataset_id,max_results"), - ), - _descriptor.MethodDescriptor( - name="PatchModel", - full_name="google.cloud.bigquery.v2.ModelService.PatchModel", - index=2, - containing_service=None, - input_type=_PATCHMODELREQUEST, - output_type=_MODEL, - serialized_options=_b("\332A$project_id,dataset_id,model_id,model"), - ), - _descriptor.MethodDescriptor( - name="DeleteModel", - full_name="google.cloud.bigquery.v2.ModelService.DeleteModel", - index=3, - containing_service=None, - input_type=_DELETEMODELREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b("\332A\036project_id,dataset_id,model_id"), - ), - ], -) -_sym_db.RegisterServiceDescriptor(_MODELSERVICE) - -DESCRIPTOR.services_by_name["ModelService"] = _MODELSERVICE - -# @@protoc_insertion_point(module_scope) diff --git a/bigquery/google/cloud/bigquery_v2/proto/model_pb2_grpc.py b/bigquery/google/cloud/bigquery_v2/proto/model_pb2_grpc.py deleted file mode 100644 index 5abcdf0f2606..000000000000 --- a/bigquery/google/cloud/bigquery_v2/proto/model_pb2_grpc.py +++ /dev/null @@ -1,102 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.cloud.bigquery_v2.proto import ( - model_pb2 as google_dot_cloud_dot_bigquery__v2_dot_proto_dot_model__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class ModelServiceStub(object): - # missing associated documentation comment in .proto file - pass - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.GetModel = channel.unary_unary( - "/google.cloud.bigquery.v2.ModelService/GetModel", - request_serializer=google_dot_cloud_dot_bigquery__v2_dot_proto_dot_model__pb2.GetModelRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery__v2_dot_proto_dot_model__pb2.Model.FromString, - ) - self.ListModels = channel.unary_unary( - "/google.cloud.bigquery.v2.ModelService/ListModels", - request_serializer=google_dot_cloud_dot_bigquery__v2_dot_proto_dot_model__pb2.ListModelsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery__v2_dot_proto_dot_model__pb2.ListModelsResponse.FromString, - ) - self.PatchModel = channel.unary_unary( - "/google.cloud.bigquery.v2.ModelService/PatchModel", - request_serializer=google_dot_cloud_dot_bigquery__v2_dot_proto_dot_model__pb2.PatchModelRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery__v2_dot_proto_dot_model__pb2.Model.FromString, - ) - self.DeleteModel = channel.unary_unary( - "/google.cloud.bigquery.v2.ModelService/DeleteModel", - request_serializer=google_dot_cloud_dot_bigquery__v2_dot_proto_dot_model__pb2.DeleteModelRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - - -class ModelServiceServicer(object): - # missing associated documentation comment in .proto file - pass - - def GetModel(self, request, context): - """Gets the specified model resource by model ID. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListModels(self, request, context): - """Lists all models in the specified dataset. Requires the READER dataset - role. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def PatchModel(self, request, context): - """Patch specific fields in the specified model. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteModel(self, request, context): - """Deletes the model specified by modelId from the dataset. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_ModelServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - "GetModel": grpc.unary_unary_rpc_method_handler( - servicer.GetModel, - request_deserializer=google_dot_cloud_dot_bigquery__v2_dot_proto_dot_model__pb2.GetModelRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery__v2_dot_proto_dot_model__pb2.Model.SerializeToString, - ), - "ListModels": grpc.unary_unary_rpc_method_handler( - servicer.ListModels, - request_deserializer=google_dot_cloud_dot_bigquery__v2_dot_proto_dot_model__pb2.ListModelsRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery__v2_dot_proto_dot_model__pb2.ListModelsResponse.SerializeToString, - ), - "PatchModel": grpc.unary_unary_rpc_method_handler( - servicer.PatchModel, - request_deserializer=google_dot_cloud_dot_bigquery__v2_dot_proto_dot_model__pb2.PatchModelRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery__v2_dot_proto_dot_model__pb2.Model.SerializeToString, - ), - "DeleteModel": grpc.unary_unary_rpc_method_handler( - servicer.DeleteModel, - request_deserializer=google_dot_cloud_dot_bigquery__v2_dot_proto_dot_model__pb2.DeleteModelRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.cloud.bigquery.v2.ModelService", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/bigquery/google/cloud/bigquery_v2/proto/model_reference.proto b/bigquery/google/cloud/bigquery_v2/proto/model_reference.proto deleted file mode 100644 index fadd175146b0..000000000000 --- a/bigquery/google/cloud/bigquery_v2/proto/model_reference.proto +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.cloud.bigquery.v2; - -import "google/api/field_behavior.proto"; -import "google/api/annotations.proto"; - -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigquery"; -option java_outer_classname = "ModelReferenceProto"; -option java_package = "com.google.cloud.bigquery.v2"; - -// Id path of a model. -message ModelReference { - // Required. The ID of the project containing this model. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The ID of the dataset containing this model. - string dataset_id = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. The ID of the model. The ID must contain only - // letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum - // length is 1,024 characters. - string model_id = 3 [(google.api.field_behavior) = REQUIRED]; -} diff --git a/bigquery/google/cloud/bigquery_v2/proto/model_reference_pb2.py b/bigquery/google/cloud/bigquery_v2/proto/model_reference_pb2.py deleted file mode 100644 index 01e6e29522a5..000000000000 --- a/bigquery/google/cloud/bigquery_v2/proto/model_reference_pb2.py +++ /dev/null @@ -1,145 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/bigquery_v2/proto/model_reference.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/bigquery_v2/proto/model_reference.proto", - package="google.cloud.bigquery.v2", - syntax="proto3", - serialized_options=_b( - "\n\034com.google.cloud.bigquery.v2B\023ModelReferenceProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigquery" - ), - serialized_pb=_b( - '\n4google/cloud/bigquery_v2/proto/model_reference.proto\x12\x18google.cloud.bigquery.v2\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1cgoogle/api/annotations.proto"Y\n\x0eModelReference\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x17\n\ndataset_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x15\n\x08model_id\x18\x03 \x01(\tB\x03\xe0\x41\x02\x42u\n\x1c\x63om.google.cloud.bigquery.v2B\x13ModelReferenceProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigqueryb\x06proto3' - ), - dependencies=[ - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - ], -) - - -_MODELREFERENCE = _descriptor.Descriptor( - name="ModelReference", - full_name="google.cloud.bigquery.v2.ModelReference", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.cloud.bigquery.v2.ModelReference.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="dataset_id", - full_name="google.cloud.bigquery.v2.ModelReference.dataset_id", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="model_id", - full_name="google.cloud.bigquery.v2.ModelReference.model_id", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=145, - serialized_end=234, -) - -DESCRIPTOR.message_types_by_name["ModelReference"] = _MODELREFERENCE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -ModelReference = _reflection.GeneratedProtocolMessageType( - "ModelReference", - (_message.Message,), - dict( - DESCRIPTOR=_MODELREFERENCE, - __module__="google.cloud.bigquery_v2.proto.model_reference_pb2", - __doc__="""Id path of a model. - - - Attributes: - project_id: - Required. The ID of the project containing this model. - dataset_id: - Required. The ID of the dataset containing this model. - model_id: - Required. The ID of the model. The ID must contain only - letters (a-z, A-Z), numbers (0-9), or underscores (\_). The - maximum length is 1,024 characters. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.ModelReference) - ), -) -_sym_db.RegisterMessage(ModelReference) - - -DESCRIPTOR._options = None -_MODELREFERENCE.fields_by_name["project_id"]._options = None -_MODELREFERENCE.fields_by_name["dataset_id"]._options = None -_MODELREFERENCE.fields_by_name["model_id"]._options = None -# @@protoc_insertion_point(module_scope) diff --git a/bigquery/google/cloud/bigquery_v2/proto/model_reference_pb2_grpc.py b/bigquery/google/cloud/bigquery_v2/proto/model_reference_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/bigquery/google/cloud/bigquery_v2/proto/model_reference_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/bigquery/google/cloud/bigquery_v2/proto/standard_sql.proto b/bigquery/google/cloud/bigquery_v2/proto/standard_sql.proto deleted file mode 100644 index ff69dfc4eb30..000000000000 --- a/bigquery/google/cloud/bigquery_v2/proto/standard_sql.proto +++ /dev/null @@ -1,110 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.cloud.bigquery.v2; - -import "google/api/field_behavior.proto"; -import "google/api/annotations.proto"; - -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigquery"; -option java_outer_classname = "StandardSqlProto"; -option java_package = "com.google.cloud.bigquery.v2"; - -// The type of a variable, e.g., a function argument. -// Examples: -// INT64: {type_kind="INT64"} -// ARRAY: {type_kind="ARRAY", array_element_type="STRING"} -// STRUCT>: -// {type_kind="STRUCT", -// struct_type={fields=[ -// {name="x", type={type_kind="STRING"}}, -// {name="y", type={type_kind="ARRAY", array_element_type="DATE"}} -// ]}} -message StandardSqlDataType { - enum TypeKind { - // Invalid type. - TYPE_KIND_UNSPECIFIED = 0; - - // Encoded as a string in decimal format. - INT64 = 2; - - // Encoded as a boolean "false" or "true". - BOOL = 5; - - // Encoded as a number, or string "NaN", "Infinity" or "-Infinity". - FLOAT64 = 7; - - // Encoded as a string value. - STRING = 8; - - // Encoded as a base64 string per RFC 4648, section 4. - BYTES = 9; - - // Encoded as an RFC 3339 timestamp with mandatory "Z" time zone string: - // 1985-04-12T23:20:50.52Z - TIMESTAMP = 19; - - // Encoded as RFC 3339 full-date format string: 1985-04-12 - DATE = 10; - - // Encoded as RFC 3339 partial-time format string: 23:20:50.52 - TIME = 20; - - // Encoded as RFC 3339 full-date "T" partial-time: 1985-04-12T23:20:50.52 - DATETIME = 21; - - // Encoded as WKT - GEOGRAPHY = 22; - - // Encoded as a decimal string. - NUMERIC = 23; - - // Encoded as a list with types matching Type.array_type. - ARRAY = 16; - - // Encoded as a list with fields of type Type.struct_type[i]. List is used - // because a JSON object cannot have duplicate field names. - STRUCT = 17; - } - - // Required. The top level type of this field. - // Can be any standard SQL data type (e.g., "INT64", "DATE", "ARRAY"). - TypeKind type_kind = 1 [(google.api.field_behavior) = REQUIRED]; - - oneof sub_type { - // The type of the array's elements, if type_kind = "ARRAY". - StandardSqlDataType array_element_type = 2; - - // The fields of this struct, in order, if type_kind = "STRUCT". - StandardSqlStructType struct_type = 3; - } -} - -// A field or a column. -message StandardSqlField { - // Optional. The name of this field. Can be absent for struct fields. - string name = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The type of this parameter. Absent if not explicitly - // specified (e.g., CREATE FUNCTION statement can omit the return type; - // in this case the output parameter does not have this "type" field). - StandardSqlDataType type = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -message StandardSqlStructType { - repeated StandardSqlField fields = 1; -} diff --git a/bigquery/google/cloud/bigquery_v2/proto/standard_sql_pb2.py b/bigquery/google/cloud/bigquery_v2/proto/standard_sql_pb2.py deleted file mode 100644 index ca02014057d2..000000000000 --- a/bigquery/google/cloud/bigquery_v2/proto/standard_sql_pb2.py +++ /dev/null @@ -1,373 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/bigquery_v2/proto/standard_sql.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/bigquery_v2/proto/standard_sql.proto", - package="google.cloud.bigquery.v2", - syntax="proto3", - serialized_options=_b( - "\n\034com.google.cloud.bigquery.v2B\020StandardSqlProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigquery" - ), - serialized_pb=_b( - '\n1google/cloud/bigquery_v2/proto/standard_sql.proto\x12\x18google.cloud.bigquery.v2\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1cgoogle/api/annotations.proto"\xcb\x03\n\x13StandardSqlDataType\x12N\n\ttype_kind\x18\x01 \x01(\x0e\x32\x36.google.cloud.bigquery.v2.StandardSqlDataType.TypeKindB\x03\xe0\x41\x02\x12K\n\x12\x61rray_element_type\x18\x02 \x01(\x0b\x32-.google.cloud.bigquery.v2.StandardSqlDataTypeH\x00\x12\x46\n\x0bstruct_type\x18\x03 \x01(\x0b\x32/.google.cloud.bigquery.v2.StandardSqlStructTypeH\x00"\xc2\x01\n\x08TypeKind\x12\x19\n\x15TYPE_KIND_UNSPECIFIED\x10\x00\x12\t\n\x05INT64\x10\x02\x12\x08\n\x04\x42OOL\x10\x05\x12\x0b\n\x07\x46LOAT64\x10\x07\x12\n\n\x06STRING\x10\x08\x12\t\n\x05\x42YTES\x10\t\x12\r\n\tTIMESTAMP\x10\x13\x12\x08\n\x04\x44\x41TE\x10\n\x12\x08\n\x04TIME\x10\x14\x12\x0c\n\x08\x44\x41TETIME\x10\x15\x12\r\n\tGEOGRAPHY\x10\x16\x12\x0b\n\x07NUMERIC\x10\x17\x12\t\n\x05\x41RRAY\x10\x10\x12\n\n\x06STRUCT\x10\x11\x42\n\n\x08sub_type"g\n\x10StandardSqlField\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12@\n\x04type\x18\x02 \x01(\x0b\x32-.google.cloud.bigquery.v2.StandardSqlDataTypeB\x03\xe0\x41\x01"S\n\x15StandardSqlStructType\x12:\n\x06\x66ields\x18\x01 \x03(\x0b\x32*.google.cloud.bigquery.v2.StandardSqlFieldBr\n\x1c\x63om.google.cloud.bigquery.v2B\x10StandardSqlProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigqueryb\x06proto3' - ), - dependencies=[ - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - ], -) - - -_STANDARDSQLDATATYPE_TYPEKIND = _descriptor.EnumDescriptor( - name="TypeKind", - full_name="google.cloud.bigquery.v2.StandardSqlDataType.TypeKind", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="TYPE_KIND_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="INT64", index=1, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="BOOL", index=2, number=5, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="FLOAT64", index=3, number=7, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="STRING", index=4, number=8, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="BYTES", index=5, number=9, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="TIMESTAMP", index=6, number=19, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="DATE", index=7, number=10, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="TIME", index=8, number=20, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="DATETIME", index=9, number=21, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="GEOGRAPHY", index=10, number=22, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="NUMERIC", index=11, number=23, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ARRAY", index=12, number=16, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="STRUCT", index=13, number=17, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=396, - serialized_end=590, -) -_sym_db.RegisterEnumDescriptor(_STANDARDSQLDATATYPE_TYPEKIND) - - -_STANDARDSQLDATATYPE = _descriptor.Descriptor( - name="StandardSqlDataType", - full_name="google.cloud.bigquery.v2.StandardSqlDataType", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="type_kind", - full_name="google.cloud.bigquery.v2.StandardSqlDataType.type_kind", - index=0, - number=1, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="array_element_type", - full_name="google.cloud.bigquery.v2.StandardSqlDataType.array_element_type", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="struct_type", - full_name="google.cloud.bigquery.v2.StandardSqlDataType.struct_type", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_STANDARDSQLDATATYPE_TYPEKIND,], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="sub_type", - full_name="google.cloud.bigquery.v2.StandardSqlDataType.sub_type", - index=0, - containing_type=None, - fields=[], - ), - ], - serialized_start=143, - serialized_end=602, -) - - -_STANDARDSQLFIELD = _descriptor.Descriptor( - name="StandardSqlField", - full_name="google.cloud.bigquery.v2.StandardSqlField", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.bigquery.v2.StandardSqlField.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="type", - full_name="google.cloud.bigquery.v2.StandardSqlField.type", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=604, - serialized_end=707, -) - - -_STANDARDSQLSTRUCTTYPE = _descriptor.Descriptor( - name="StandardSqlStructType", - full_name="google.cloud.bigquery.v2.StandardSqlStructType", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="fields", - full_name="google.cloud.bigquery.v2.StandardSqlStructType.fields", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=709, - serialized_end=792, -) - -_STANDARDSQLDATATYPE.fields_by_name[ - "type_kind" -].enum_type = _STANDARDSQLDATATYPE_TYPEKIND -_STANDARDSQLDATATYPE.fields_by_name[ - "array_element_type" -].message_type = _STANDARDSQLDATATYPE -_STANDARDSQLDATATYPE.fields_by_name["struct_type"].message_type = _STANDARDSQLSTRUCTTYPE -_STANDARDSQLDATATYPE_TYPEKIND.containing_type = _STANDARDSQLDATATYPE -_STANDARDSQLDATATYPE.oneofs_by_name["sub_type"].fields.append( - _STANDARDSQLDATATYPE.fields_by_name["array_element_type"] -) -_STANDARDSQLDATATYPE.fields_by_name[ - "array_element_type" -].containing_oneof = _STANDARDSQLDATATYPE.oneofs_by_name["sub_type"] -_STANDARDSQLDATATYPE.oneofs_by_name["sub_type"].fields.append( - _STANDARDSQLDATATYPE.fields_by_name["struct_type"] -) -_STANDARDSQLDATATYPE.fields_by_name[ - "struct_type" -].containing_oneof = _STANDARDSQLDATATYPE.oneofs_by_name["sub_type"] -_STANDARDSQLFIELD.fields_by_name["type"].message_type = _STANDARDSQLDATATYPE -_STANDARDSQLSTRUCTTYPE.fields_by_name["fields"].message_type = _STANDARDSQLFIELD -DESCRIPTOR.message_types_by_name["StandardSqlDataType"] = _STANDARDSQLDATATYPE -DESCRIPTOR.message_types_by_name["StandardSqlField"] = _STANDARDSQLFIELD -DESCRIPTOR.message_types_by_name["StandardSqlStructType"] = _STANDARDSQLSTRUCTTYPE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -StandardSqlDataType = _reflection.GeneratedProtocolMessageType( - "StandardSqlDataType", - (_message.Message,), - dict( - DESCRIPTOR=_STANDARDSQLDATATYPE, - __module__="google.cloud.bigquery_v2.proto.standard_sql_pb2", - __doc__="""The type of a variable, e.g., a function argument. - Examples: INT64: {type\_kind="INT64"} ARRAY: {type\_kind="ARRAY", - array\_element\_type="STRING"} STRUCT: - {type\_kind="STRUCT", struct\_type={fields=[ {name="x", - type={type\_kind="STRING"}}, {name="y", type={type\_kind="ARRAY", - array\_element\_type="DATE"}} ]}} - - - Attributes: - type_kind: - Required. The top level type of this field. Can be any - standard SQL data type (e.g., "INT64", "DATE", "ARRAY"). - array_element_type: - The type of the array's elements, if type\_kind = "ARRAY". - struct_type: - The fields of this struct, in order, if type\_kind = "STRUCT". - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.StandardSqlDataType) - ), -) -_sym_db.RegisterMessage(StandardSqlDataType) - -StandardSqlField = _reflection.GeneratedProtocolMessageType( - "StandardSqlField", - (_message.Message,), - dict( - DESCRIPTOR=_STANDARDSQLFIELD, - __module__="google.cloud.bigquery_v2.proto.standard_sql_pb2", - __doc__="""A field or a column. - - - Attributes: - name: - Optional. The name of this field. Can be absent for struct - fields. - type: - Optional. The type of this parameter. Absent if not explicitly - specified (e.g., CREATE FUNCTION statement can omit the return - type; in this case the output parameter does not have this - "type" field). - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.StandardSqlField) - ), -) -_sym_db.RegisterMessage(StandardSqlField) - -StandardSqlStructType = _reflection.GeneratedProtocolMessageType( - "StandardSqlStructType", - (_message.Message,), - dict( - DESCRIPTOR=_STANDARDSQLSTRUCTTYPE, - __module__="google.cloud.bigquery_v2.proto.standard_sql_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.StandardSqlStructType) - ), -) -_sym_db.RegisterMessage(StandardSqlStructType) - - -DESCRIPTOR._options = None -_STANDARDSQLDATATYPE.fields_by_name["type_kind"]._options = None -_STANDARDSQLFIELD.fields_by_name["name"]._options = None -_STANDARDSQLFIELD.fields_by_name["type"]._options = None -# @@protoc_insertion_point(module_scope) diff --git a/bigquery/google/cloud/bigquery_v2/proto/standard_sql_pb2_grpc.py b/bigquery/google/cloud/bigquery_v2/proto/standard_sql_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/bigquery/google/cloud/bigquery_v2/proto/standard_sql_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/bigquery/google/cloud/bigquery_v2/types.py b/bigquery/google/cloud/bigquery_v2/types.py deleted file mode 100644 index 7d4f9b7326e4..000000000000 --- a/bigquery/google/cloud/bigquery_v2/types.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from __future__ import absolute_import -import sys - -from google.api_core.protobuf_helpers import get_messages - -from google.cloud.bigquery_v2.proto import encryption_config_pb2 -from google.cloud.bigquery_v2.proto import model_pb2 -from google.cloud.bigquery_v2.proto import model_reference_pb2 -from google.cloud.bigquery_v2.proto import standard_sql_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import timestamp_pb2 -from google.protobuf import wrappers_pb2 - - -_shared_modules = [ - empty_pb2, - timestamp_pb2, - wrappers_pb2, -] - -_local_modules = [ - encryption_config_pb2, - model_pb2, - model_reference_pb2, - standard_sql_pb2, -] - -names = [] - -for module in _shared_modules: # pragma: NO COVER - for name, message in get_messages(module).items(): - setattr(sys.modules[__name__], name, message) - names.append(name) -for module in _local_modules: - for name, message in get_messages(module).items(): - message.__module__ = "google.cloud.bigquery_v2.types" - setattr(sys.modules[__name__], name, message) - names.append(name) - - -__all__ = tuple(sorted(names)) diff --git a/bigquery/noxfile.py b/bigquery/noxfile.py deleted file mode 100644 index 17a2dee417c0..000000000000 --- a/bigquery/noxfile.py +++ /dev/null @@ -1,213 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import - -import os -import shutil - -import nox - - -LOCAL_DEPS = (os.path.join("..", "api_core[grpc]"), os.path.join("..", "core")) - -BLACK_PATHS = ("docs", "google", "samples", "tests", "noxfile.py", "setup.py") - - -def default(session): - """Default unit test session. - - This is intended to be run **without** an interpreter set, so - that the current ``python`` (on the ``PATH``) or the version of - Python corresponding to the ``nox`` binary the ``PATH`` can - run the tests. - """ - # Install all test dependencies, then install local packages in-place. - session.install("mock", "pytest", "pytest-cov", "freezegun") - for local_dep in LOCAL_DEPS: - session.install("-e", local_dep) - - session.install("-e", os.path.join("..", "test_utils")) - - coverage_fail_under = "--cov-fail-under=97" - - # fastparquet is not included in .[all] because, in general, it's redundant - # with pyarrow. We still want to run some unit tests with fastparquet - # serialization, though. - dev_install = ".[all,fastparquet]" - - # There is no pyarrow or fastparquet wheel for Python 3.8. - if session.python == "3.8": - # Since many tests are skipped due to missing dependencies, test - # coverage is much lower in Python 3.8. Remove once we can test with - # pyarrow. - coverage_fail_under = "--cov-fail-under=92" - dev_install = ".[pandas,tqdm]" - - session.install("-e", dev_install) - - # IPython does not support Python 2 after version 5.x - if session.python == "2.7": - session.install("ipython==5.5") - else: - session.install("ipython") - - # Run py.test against the unit tests. - session.run( - "py.test", - "--quiet", - "--cov=google.cloud.bigquery", - "--cov=tests.unit", - "--cov-append", - "--cov-config=.coveragerc", - "--cov-report=", - coverage_fail_under, - os.path.join("tests", "unit"), - *session.posargs - ) - - -@nox.session(python=["2.7", "3.5", "3.6", "3.7", "3.8"]) -def unit(session): - """Run the unit test suite.""" - default(session) - - -@nox.session(python=["2.7", "3.7"]) -def system(session): - """Run the system test suite.""" - - # Sanity check: Only run system tests if the environment variable is set. - if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): - session.skip("Credentials must be set via environment variable.") - - # Use pre-release gRPC for system tests. - session.install("--pre", "grpcio") - - # Install all test dependencies, then install local packages in place. - session.install("mock", "pytest", "psutil") - for local_dep in LOCAL_DEPS: - session.install("-e", local_dep) - session.install("-e", os.path.join("..", "storage")) - session.install("-e", os.path.join("..", "test_utils")) - session.install("-e", ".[all]") - - # IPython does not support Python 2 after version 5.x - if session.python == "2.7": - session.install("ipython==5.5") - else: - session.install("ipython") - - # Run py.test against the system tests. - session.run( - "py.test", "--quiet", os.path.join("tests", "system.py"), *session.posargs - ) - - -@nox.session(python=["2.7", "3.7"]) -def snippets(session): - """Run the snippets test suite.""" - - # Sanity check: Only run snippets tests if the environment variable is set. - if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): - session.skip("Credentials must be set via environment variable.") - - # Install all test dependencies, then install local packages in place. - session.install("mock", "pytest") - for local_dep in LOCAL_DEPS: - session.install("-e", local_dep) - session.install("-e", os.path.join("..", "storage")) - session.install("-e", os.path.join("..", "test_utils")) - session.install("-e", ".[all]") - - # Run py.test against the snippets tests. - session.run("py.test", os.path.join("docs", "snippets.py"), *session.posargs) - session.run("py.test", "samples", *session.posargs) - - -@nox.session(python="3.7") -def cover(session): - """Run the final coverage report. - - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - session.run("coverage", "erase") - - -@nox.session(python="3.7") -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - - session.install("black", "flake8") - for local_dep in LOCAL_DEPS: - session.install("-e", local_dep) - session.install("-e", ".") - session.run("flake8", os.path.join("google", "cloud", "bigquery")) - session.run("flake8", "tests") - session.run("flake8", os.path.join("docs", "samples")) - session.run("flake8", os.path.join("docs", "snippets.py")) - session.run("black", "--check", *BLACK_PATHS) - - -@nox.session(python="3.7") -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - - session.install("docutils", "Pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") - - -@nox.session(python="3.6") -def blacken(session): - """Run black. - Format code to uniform standard. - - This currently uses Python 3.6 due to the automated Kokoro run of synthtool. - That run uses an image that doesn't have 3.6 installed. Before updating this - check the state of the `gcp_ubuntu_config` we use for that Kokoro run. - """ - session.install("black") - session.run("black", *BLACK_PATHS) - - -@nox.session(python="3.7") -def docs(session): - """Build the docs.""" - - session.install("ipython", "recommonmark", "sphinx", "sphinx_rtd_theme") - for local_dep in LOCAL_DEPS: - session.install("-e", local_dep) - session.install("-e", os.path.join("..", "storage")) - session.install("-e", ".[all]") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) diff --git a/bigquery/pylint.config.py b/bigquery/pylint.config.py deleted file mode 100644 index 5d64b9d2f256..000000000000 --- a/bigquery/pylint.config.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This module is used to configure gcp-devrel-py-tools run-pylint.""" - -# Library configuration - -# library_additions = {} -# library_replacements = {} - -# Test configuration - -# test_additions = copy.deepcopy(library_additions) -# test_replacements = copy.deepcopy(library_replacements) diff --git a/bigquery/samples/__init__.py b/bigquery/samples/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/bigquery/samples/add_empty_column.py b/bigquery/samples/add_empty_column.py deleted file mode 100644 index cd7cf5018e1f..000000000000 --- a/bigquery/samples/add_empty_column.py +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def add_empty_column(table_id): - - # [START bigquery_add_empty_column] - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set table_id to the ID of the table - # to add an empty column. - # table_id = "your-project.your_dataset.your_table_name" - - table = client.get_table(table_id) # Make an API request. - - original_schema = table.schema - new_schema = original_schema[:] # Creates a copy of the schema. - new_schema.append(bigquery.SchemaField("phone", "STRING")) - - table.schema = new_schema - table = client.update_table(table, ["schema"]) # Make an API request. - - if len(table.schema) == len(original_schema) + 1 == len(new_schema): - print("A new column has been added.") - else: - print("The column has not been added.") - # [END bigquery_add_empty_column] diff --git a/bigquery/samples/browse_table_data.py b/bigquery/samples/browse_table_data.py deleted file mode 100644 index 29a1c2ff61e0..000000000000 --- a/bigquery/samples/browse_table_data.py +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def browse_table_data(table_id): - - # [START bigquery_browse_table] - - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set table_id to the ID of the table to browse data rows. - # table_id = "your-project.your_dataset.your_table_name" - - # Download all rows from a table. - rows_iter = client.list_rows(table_id) # Make an API request. - - # Iterate over rows to make the API requests to fetch row data. - rows = list(rows_iter) - print("Downloaded {} rows from table {}".format(len(rows), table_id)) - - # Download at most 10 rows. - rows_iter = client.list_rows(table_id, max_results=10) - rows = list(rows_iter) - print("Downloaded {} rows from table {}".format(len(rows), table_id)) - - # Specify selected fields to limit the results to certain columns. - table = client.get_table(table_id) # Make an API request. - fields = table.schema[:2] # First two columns. - rows_iter = client.list_rows(table_id, selected_fields=fields, max_results=10) - rows = list(rows_iter) - print("Selected {} columns from table {}.".format(len(rows_iter.schema), table_id)) - print("Downloaded {} rows from table {}".format(len(rows), table_id)) - - # Print row data in tabular format. - rows = client.list_rows(table, max_results=10) - format_string = "{!s:<16} " * len(rows.schema) - field_names = [field.name for field in rows.schema] - print(format_string.format(*field_names)) # Prints column headers. - for row in rows: - print(format_string.format(*row)) # Prints row data. - # [END bigquery_browse_table] diff --git a/bigquery/samples/client_list_jobs.py b/bigquery/samples/client_list_jobs.py deleted file mode 100644 index b2344e23c7f7..000000000000 --- a/bigquery/samples/client_list_jobs.py +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def client_list_jobs(): - - # [START bigquery_list_jobs] - - from google.cloud import bigquery - - import datetime - - # Construct a BigQuery client object. - client = bigquery.Client() - - # List the 10 most recent jobs in reverse chronological order. - # Omit the max_results parameter to list jobs from the past 6 months. - print("Last 10 jobs:") - for job in client.list_jobs(max_results=10): # API request(s) - print("{}".format(job.job_id)) - - # The following are examples of additional optional parameters: - - # Use min_creation_time and/or max_creation_time to specify a time window. - print("Jobs from the last ten minutes:") - ten_mins_ago = datetime.datetime.utcnow() - datetime.timedelta(minutes=10) - for job in client.list_jobs(min_creation_time=ten_mins_ago): - print("{}".format(job.job_id)) - - # Use all_users to include jobs run by all users in the project. - print("Last 10 jobs run by all users:") - for job in client.list_jobs(max_results=10, all_users=True): - print("{} run by user: {}".format(job.job_id, job.user_email)) - - # Use state_filter to filter by job state. - print("Last 10 jobs done:") - for job in client.list_jobs(max_results=10, state_filter="DONE"): - print("{}".format(job.job_id)) - # [END bigquery_list_jobs] diff --git a/bigquery/samples/client_load_partitioned_table.py b/bigquery/samples/client_load_partitioned_table.py deleted file mode 100644 index e4e8a296c9a3..000000000000 --- a/bigquery/samples/client_load_partitioned_table.py +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def client_load_partitioned_table(table_id): - - # [START bigquery_load_table_partitioned] - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set table_id to the ID of the table to create. - # table_id = "your-project.your_dataset.your_table_name" - - job_config = bigquery.LoadJobConfig( - schema=[ - bigquery.SchemaField("name", "STRING"), - bigquery.SchemaField("post_abbr", "STRING"), - bigquery.SchemaField("date", "DATE"), - ], - skip_leading_rows=1, - time_partitioning=bigquery.TimePartitioning( - type_=bigquery.TimePartitioningType.DAY, - field="date", # Name of the column to use for partitioning. - expiration_ms=7776000000, # 90 days. - ), - ) - uri = "gs://cloud-samples-data/bigquery/us-states/us-states-by-date.csv" - - load_job = client.load_table_from_uri( - uri, table_id, job_config=job_config - ) # Make an API request. - - load_job.result() # Wait for the job to complete. - - table = client.get_table(table_id) - print("Loaded {} rows to table {}".format(table.num_rows, table_id)) - # [END bigquery_load_table_partitioned] diff --git a/bigquery/samples/client_query.py b/bigquery/samples/client_query.py deleted file mode 100644 index 7fedc3f90b1e..000000000000 --- a/bigquery/samples/client_query.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def client_query(): - - # [START bigquery_query] - - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - query = """ - SELECT name, SUM(number) as total_people - FROM `bigquery-public-data.usa_names.usa_1910_2013` - WHERE state = 'TX' - GROUP BY name, state - ORDER BY total_people DESC - LIMIT 20 - """ - query_job = client.query(query) # Make an API request. - - print("The query data:") - for row in query_job: - # Row values can be accessed by field name or index. - print("name={}, count={}".format(row[0], row["total_people"])) - # [END bigquery_query] diff --git a/bigquery/samples/client_query_add_column.py b/bigquery/samples/client_query_add_column.py deleted file mode 100644 index ff7d5aa68add..000000000000 --- a/bigquery/samples/client_query_add_column.py +++ /dev/null @@ -1,52 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def client_query_add_column(table_id): - - # [START bigquery_add_column_query_append] - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set table_id to the ID of the destination table. - # table_id = "your-project.your_dataset.your_table_name" - - # Retrieves the destination table and checks the length of the schema. - table = client.get_table(table_id) # Make an API request. - print("Table {} contains {} columns".format(table_id, len(table.schema))) - - # Configures the query to append the results to a destination table, - # allowing field addition. - job_config = bigquery.QueryJobConfig( - destination=table_id, - schema_update_options=[bigquery.SchemaUpdateOption.ALLOW_FIELD_ADDITION], - write_disposition=bigquery.WriteDisposition.WRITE_APPEND, - ) - - # Start the query, passing in the extra configuration. - query_job = client.query( - # In this example, the existing table contains only the 'full_name' and - # 'age' columns, while the results of this query will contain an - # additional 'favorite_color' column. - 'SELECT "Timmy" as full_name, 85 as age, "Blue" as favorite_color;', - job_config=job_config, - ) # Make an API request. - query_job.result() # Wait for the job to complete. - - # Checks the updated length of the schema. - table = client.get_table(table_id) # Make an API request. - print("Table {} now contains {} columns".format(table_id, len(table.schema))) - # [END bigquery_add_column_query_append] diff --git a/bigquery/samples/client_query_batch.py b/bigquery/samples/client_query_batch.py deleted file mode 100644 index e1680f4a18d2..000000000000 --- a/bigquery/samples/client_query_batch.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def client_query_batch(): - - # [START bigquery_query_batch] - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - job_config = bigquery.QueryJobConfig( - # Run at batch priority, which won't count toward concurrent rate limit. - priority=bigquery.QueryPriority.BATCH - ) - - sql = """ - SELECT corpus - FROM `bigquery-public-data.samples.shakespeare` - GROUP BY corpus; - """ - - # Start the query, passing in the extra configuration. - query_job = client.query(sql, job_config=job_config) # Make an API request. - - # Check on the progress by getting the job's updated state. Once the state - # is `DONE`, the results are ready. - query_job = client.get_job( - query_job.job_id, location=query_job.location - ) # Make an API request. - - print("Job {} is currently in state {}".format(query_job.job_id, query_job.state)) - # [END bigquery_query_batch] - return query_job diff --git a/bigquery/samples/client_query_destination_table.py b/bigquery/samples/client_query_destination_table.py deleted file mode 100644 index 303ce5a0cc36..000000000000 --- a/bigquery/samples/client_query_destination_table.py +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def client_query_destination_table(table_id): - - # [START bigquery_query_destination_table] - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set table_id to the ID of the destination table. - # table_id = "your-project.your_dataset.your_table_name" - - job_config = bigquery.QueryJobConfig(destination=table_id) - - sql = """ - SELECT corpus - FROM `bigquery-public-data.samples.shakespeare` - GROUP BY corpus; - """ - - # Start the query, passing in the extra configuration. - query_job = client.query(sql, job_config=job_config) # Make an API request. - query_job.result() # Wait for the job to complete. - - print("Query results loaded to the table {}".format(table_id)) - # [END bigquery_query_destination_table] diff --git a/bigquery/samples/client_query_destination_table_cmek.py b/bigquery/samples/client_query_destination_table_cmek.py deleted file mode 100644 index 24d4f22228c7..000000000000 --- a/bigquery/samples/client_query_destination_table_cmek.py +++ /dev/null @@ -1,49 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def client_query_destination_table_cmek(table_id, kms_key_name): - - # [START bigquery_query_destination_table_cmek] - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set table_id to the ID of the destination table. - # table_id = "your-project.your_dataset.your_table_name" - - # Set the encryption key to use for the destination. - # TODO(developer): Replace this key with a key you have created in KMS. - # kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( - # your-project, location, your-ring, your-key - # ) - - job_config = bigquery.QueryJobConfig( - destination=table_id, - destination_encryption_configuration=bigquery.EncryptionConfiguration( - kms_key_name=kms_key_name - ), - ) - - # Start the query, passing in the extra configuration. - query_job = client.query( - "SELECT 17 AS my_col;", job_config=job_config - ) # Make an API request. - query_job.result() # Wait for the job to complete. - - table = client.get_table(table_id) # Make an API request. - if table.encryption_configuration.kms_key_name == kms_key_name: - print("The destination table is written using the encryption configuration") - # [END bigquery_query_destination_table_cmek] diff --git a/bigquery/samples/client_query_destination_table_legacy.py b/bigquery/samples/client_query_destination_table_legacy.py deleted file mode 100644 index c8fdd606f9f4..000000000000 --- a/bigquery/samples/client_query_destination_table_legacy.py +++ /dev/null @@ -1,44 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def client_query_destination_table_legacy(table_id): - - # [START bigquery_query_legacy_large_results] - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set table_id to the ID of the destination table. - # table_id = "your-project.your_dataset.your_table_name" - - # Set the destination table and use_legacy_sql to True to use - # legacy SQL syntax. - job_config = bigquery.QueryJobConfig( - allow_large_results=True, destination=table_id, use_legacy_sql=True - ) - - sql = """ - SELECT corpus - FROM [bigquery-public-data:samples.shakespeare] - GROUP BY corpus; - """ - - # Start the query, passing in the extra configuration. - query_job = client.query(sql, job_config=job_config) # Make an API request. - query_job.result() # Wait for the job to complete. - - print("Query results loaded to the table {}".format(table_id)) - # [END bigquery_query_legacy_large_results] diff --git a/bigquery/samples/client_query_dry_run.py b/bigquery/samples/client_query_dry_run.py deleted file mode 100644 index 1f7bd0c9c4e7..000000000000 --- a/bigquery/samples/client_query_dry_run.py +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def client_query_dry_run(): - - # [START bigquery_query_dry_run] - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - job_config = bigquery.QueryJobConfig(dry_run=True, use_query_cache=False) - - # Start the query, passing in the extra configuration. - query_job = client.query( - ( - "SELECT name, COUNT(*) as name_count " - "FROM `bigquery-public-data.usa_names.usa_1910_2013` " - "WHERE state = 'WA' " - "GROUP BY name" - ), - job_config=job_config, - ) # Make an API request. - - # A dry run query completes immediately. - print("This query will process {} bytes.".format(query_job.total_bytes_processed)) - # [END bigquery_query_dry_run] - return query_job diff --git a/bigquery/samples/client_query_legacy_sql.py b/bigquery/samples/client_query_legacy_sql.py deleted file mode 100644 index 3f94657795fb..000000000000 --- a/bigquery/samples/client_query_legacy_sql.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def client_query_legacy_sql(): - - # [START bigquery_query_legacy] - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - query = ( - "SELECT name FROM [bigquery-public-data:usa_names.usa_1910_2013] " - 'WHERE state = "TX" ' - "LIMIT 100" - ) - - # Set use_legacy_sql to True to use legacy SQL syntax. - job_config = bigquery.QueryJobConfig(use_legacy_sql=True) - - # Start the query, passing in the extra configuration. - query_job = client.query(query, job_config=job_config) # Make an API request. - - print("The query data:") - for row in query_job: - print(row) - # [END bigquery_query_legacy] diff --git a/bigquery/samples/client_query_relax_column.py b/bigquery/samples/client_query_relax_column.py deleted file mode 100644 index 5e2ec8056a00..000000000000 --- a/bigquery/samples/client_query_relax_column.py +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def client_query_relax_column(table_id): - - # [START bigquery_relax_column_query_append] - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set table_id to the ID of the destination table. - # table_id = "your-project.your_dataset.your_table_name" - - # Retrieves the destination table and checks the number of required fields. - table = client.get_table(table_id) # Make an API request. - original_required_fields = sum(field.mode == "REQUIRED" for field in table.schema) - - # In this example, the existing table has 2 required fields. - print("{} fields in the schema are required.".format(original_required_fields)) - - # Configures the query to append the results to a destination table, - # allowing field relaxation. - job_config = bigquery.QueryJobConfig( - destination=table_id, - schema_update_options=[bigquery.SchemaUpdateOption.ALLOW_FIELD_RELAXATION], - write_disposition=bigquery.WriteDisposition.WRITE_APPEND, - ) - - # Start the query, passing in the extra configuration. - query_job = client.query( - # In this example, the existing table contains 'full_name' and 'age' as - # required columns, but the query results will omit the second column. - 'SELECT "Beyonce" as full_name;', - job_config=job_config, - ) # Make an API request. - query_job.result() # Wait for the job to complete. - - # Checks the updated number of required fields. - table = client.get_table(table_id) # Make an API request. - current_required_fields = sum(field.mode == "REQUIRED" for field in table.schema) - print("{} fields in the schema are now required.".format(current_required_fields)) - # [END bigquery_relax_column_query_append] diff --git a/bigquery/samples/client_query_w_array_params.py b/bigquery/samples/client_query_w_array_params.py deleted file mode 100644 index 4077be2c7d3e..000000000000 --- a/bigquery/samples/client_query_w_array_params.py +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def client_query_w_array_params(): - - # [START bigquery_query_params_arrays] - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - query = """ - SELECT name, sum(number) as count - FROM `bigquery-public-data.usa_names.usa_1910_2013` - WHERE gender = @gender - AND state IN UNNEST(@states) - GROUP BY name - ORDER BY count DESC - LIMIT 10; - """ - job_config = bigquery.QueryJobConfig( - query_parameters=[ - bigquery.ScalarQueryParameter("gender", "STRING", "M"), - bigquery.ArrayQueryParameter("states", "STRING", ["WA", "WI", "WV", "WY"]), - ] - ) - query_job = client.query(query, job_config=job_config) # Make an API request. - - for row in query_job: - print("{}: \t{}".format(row.name, row.count)) - # [END bigquery_query_params_arrays] diff --git a/bigquery/samples/client_query_w_named_params.py b/bigquery/samples/client_query_w_named_params.py deleted file mode 100644 index a0de8f63aa99..000000000000 --- a/bigquery/samples/client_query_w_named_params.py +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def client_query_w_named_params(): - - # [START bigquery_query_params_named] - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - query = """ - SELECT word, word_count - FROM `bigquery-public-data.samples.shakespeare` - WHERE corpus = @corpus - AND word_count >= @min_word_count - ORDER BY word_count DESC; - """ - job_config = bigquery.QueryJobConfig( - query_parameters=[ - bigquery.ScalarQueryParameter("corpus", "STRING", "romeoandjuliet"), - bigquery.ScalarQueryParameter("min_word_count", "INT64", 250), - ] - ) - query_job = client.query(query, job_config=job_config) # Make an API request. - - for row in query_job: - print("{}: \t{}".format(row.word, row.word_count)) - # [END bigquery_query_params_named] diff --git a/bigquery/samples/client_query_w_positional_params.py b/bigquery/samples/client_query_w_positional_params.py deleted file mode 100644 index ee316044bda3..000000000000 --- a/bigquery/samples/client_query_w_positional_params.py +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def client_query_w_positional_params(): - - # [START bigquery_query_params_positional] - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - query = """ - SELECT word, word_count - FROM `bigquery-public-data.samples.shakespeare` - WHERE corpus = ? - AND word_count >= ? - ORDER BY word_count DESC; - """ - # Set the name to None to use positional parameters. - # Note that you cannot mix named and positional parameters. - job_config = bigquery.QueryJobConfig( - query_parameters=[ - bigquery.ScalarQueryParameter(None, "STRING", "romeoandjuliet"), - bigquery.ScalarQueryParameter(None, "INT64", 250), - ] - ) - query_job = client.query(query, job_config=job_config) # Make an API request. - - for row in query_job: - print("{}: \t{}".format(row.word, row.word_count)) - # [END bigquery_query_params_positional] diff --git a/bigquery/samples/client_query_w_struct_params.py b/bigquery/samples/client_query_w_struct_params.py deleted file mode 100644 index 041a3a0e3839..000000000000 --- a/bigquery/samples/client_query_w_struct_params.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def client_query_w_struct_params(): - - # [START bigquery_query_params_structs] - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - query = "SELECT @struct_value AS s;" - job_config = bigquery.QueryJobConfig( - query_parameters=[ - bigquery.StructQueryParameter( - "struct_value", - bigquery.ScalarQueryParameter("x", "INT64", 1), - bigquery.ScalarQueryParameter("y", "STRING", "foo"), - ) - ] - ) - query_job = client.query(query, job_config=job_config) # Make an API request. - - for row in query_job: - print(row.s) - # [END bigquery_query_params_structs] diff --git a/bigquery/samples/client_query_w_timestamp_params.py b/bigquery/samples/client_query_w_timestamp_params.py deleted file mode 100644 index ca8eec0b5e89..000000000000 --- a/bigquery/samples/client_query_w_timestamp_params.py +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def client_query_w_timestamp_params(): - - # [START bigquery_query_params_timestamps] - import datetime - - import pytz - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - query = "SELECT TIMESTAMP_ADD(@ts_value, INTERVAL 1 HOUR);" - job_config = bigquery.QueryJobConfig( - query_parameters=[ - bigquery.ScalarQueryParameter( - "ts_value", - "TIMESTAMP", - datetime.datetime(2016, 12, 7, 8, 0, tzinfo=pytz.UTC), - ) - ] - ) - query_job = client.query(query, job_config=job_config) # Make an API request. - - for row in query_job: - print(row) - # [END bigquery_query_params_timestamps] diff --git a/bigquery/samples/copy_table.py b/bigquery/samples/copy_table.py deleted file mode 100644 index 91c58e109cb9..000000000000 --- a/bigquery/samples/copy_table.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def copy_table(source_table_id, destination_table_id): - - # [START bigquery_copy_table] - - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set source_table_id to the ID of the original table. - # source_table_id = "your-project.source_dataset.source_table" - - # TODO(developer): Set destination_table_id to the ID of the destination table. - # destination_table_id = "your-project.destination_dataset.destination_table" - - job = client.copy_table(source_table_id, destination_table_id) - job.result() # Wait for the job to complete. - - print("A copy of the table created.") - # [END bigquery_copy_table] diff --git a/bigquery/samples/copy_table_cmek.py b/bigquery/samples/copy_table_cmek.py deleted file mode 100644 index 52ccb5f7b1df..000000000000 --- a/bigquery/samples/copy_table_cmek.py +++ /dev/null @@ -1,47 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def copy_table_cmek(dest_table_id, orig_table_id, kms_key_name): - - # [START bigquery_copy_table_cmek] - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set dest_table_id to the ID of the destination table. - # dest_table_id = "your-project.your_dataset.your_table_name" - - # TODO(developer): Set orig_table_id to the ID of the original table. - # orig_table_id = "your-project.your_dataset.your_table_name" - - # Set the encryption key to use for the destination. - # TODO(developer): Replace this key with a key you have created in KMS. - # kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( - # your-project, location, your-ring, your-key - # ) - - job_config = bigquery.CopyJobConfig( - destination_encryption_configuration=bigquery.EncryptionConfiguration( - kms_key_name=kms_key_name - ) - ) - job = client.copy_table(orig_table_id, dest_table_id, job_config=job_config) - job.result() # Wait for the job to complete. - - dest_table = client.get_table(dest_table_id) # Make an API request. - if dest_table.encryption_configuration.kms_key_name == kms_key_name: - print("A copy of the table created") - # [END bigquery_copy_table_cmek] diff --git a/bigquery/samples/copy_table_multiple_source.py b/bigquery/samples/copy_table_multiple_source.py deleted file mode 100644 index d86e380d0682..000000000000 --- a/bigquery/samples/copy_table_multiple_source.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def copy_table_multiple_source(dest_table_id, table_ids): - - # [START bigquery_copy_table_multiple_source] - - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set dest_table_id to the ID of the destination table. - # dest_table_id = "your-project.your_dataset.your_table_name" - - # TODO(developer): Set table_ids to the list of the IDs of the original tables. - # table_ids = ["your-project.your_dataset.your_table_name", ...] - - job = client.copy_table(table_ids, dest_table_id) # Make an API request. - job.result() # Wait for the job to complete. - - print("The tables {} have been appended to {}".format(table_ids, dest_table_id)) - # [END bigquery_copy_table_multiple_source] diff --git a/bigquery/samples/create_dataset.py b/bigquery/samples/create_dataset.py deleted file mode 100644 index e47d68a96b2a..000000000000 --- a/bigquery/samples/create_dataset.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def create_dataset(dataset_id): - - # [START bigquery_create_dataset] - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set dataset_id to the ID of the dataset to create. - # dataset_id = "{}.your_dataset".format(client.project) - - # Construct a full Dataset object to send to the API. - dataset = bigquery.Dataset(dataset_id) - - # TODO(developer): Specify the geographic location where the dataset should reside. - dataset.location = "US" - - # Send the dataset to the API for creation. - # Raises google.api_core.exceptions.Conflict if the Dataset already - # exists within the project. - dataset = client.create_dataset(dataset) # Make an API request. - print("Created dataset {}.{}".format(client.project, dataset.dataset_id)) - # [END bigquery_create_dataset] diff --git a/bigquery/samples/create_job.py b/bigquery/samples/create_job.py deleted file mode 100644 index feed04ca00e8..000000000000 --- a/bigquery/samples/create_job.py +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def create_job(): - - # [START bigquery_create_job] - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - query_job = client.query( - "SELECT country_name from `bigquery-public-data.utility_us.country_code_iso`", - # Explicitly force job execution to be routed to a specific processing - # location. - location="US", - # Specify a job configuration to set optional job resource properties. - job_config=bigquery.QueryJobConfig( - labels={"example-label": "example-value"}, maximum_bytes_billed=1000000 - ), - # The client libraries automatically generate a job ID. Override the - # generated ID with either the job_id_prefix or job_id parameters. - job_id_prefix="code_sample_", - ) # Make an API request. - - print("Started job: {}".format(query_job.job_id)) - # [END bigquery_create_job] - return query_job diff --git a/bigquery/samples/create_routine.py b/bigquery/samples/create_routine.py deleted file mode 100644 index d9b221a4f62b..000000000000 --- a/bigquery/samples/create_routine.py +++ /dev/null @@ -1,47 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def create_routine(routine_id): - - # [START bigquery_create_routine] - from google.cloud import bigquery - from google.cloud import bigquery_v2 - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Choose a fully-qualified ID for the routine. - # routine_id = "my-project.my_dataset.my_routine" - - routine = bigquery.Routine( - routine_id, - type_="SCALAR_FUNCTION", - language="SQL", - body="x * 3", - arguments=[ - bigquery.RoutineArgument( - name="x", - data_type=bigquery_v2.types.StandardSqlDataType( - type_kind=bigquery_v2.enums.StandardSqlDataType.TypeKind.INT64 - ), - ) - ], - ) - - routine = client.create_routine(routine) # Make an API request. - - print("Created routine {}".format(routine.reference)) - # [END bigquery_create_routine] - return routine diff --git a/bigquery/samples/create_routine_ddl.py b/bigquery/samples/create_routine_ddl.py deleted file mode 100644 index c191bd385041..000000000000 --- a/bigquery/samples/create_routine_ddl.py +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def create_routine_ddl(routine_id): - - # [START bigquery_create_routine_ddl] - - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Choose a fully-qualified ID for the routine. - # routine_id = "my-project.my_dataset.my_routine" - - sql = """ - CREATE FUNCTION `{}`( - arr ARRAY> - ) AS ( - (SELECT SUM(IF(elem.name = "foo",elem.val,null)) FROM UNNEST(arr) AS elem) - ) - """.format( - routine_id - ) - query_job = client.query(sql) # Make an API request. - query_job.result() # Wait for the job to complete. - - print("Created routine {}".format(query_job.ddl_target_routine)) - # [END bigquery_create_routine_ddl] diff --git a/bigquery/samples/create_table.py b/bigquery/samples/create_table.py deleted file mode 100644 index d62e86681afc..000000000000 --- a/bigquery/samples/create_table.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def create_table(table_id): - - # [START bigquery_create_table] - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set table_id to the ID of the table to create. - # table_id = "your-project.your_dataset.your_table_name" - - schema = [ - bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"), - bigquery.SchemaField("age", "INTEGER", mode="REQUIRED"), - ] - - table = bigquery.Table(table_id, schema=schema) - table = client.create_table(table) # Make an API request. - print( - "Created table {}.{}.{}".format(table.project, table.dataset_id, table.table_id) - ) - # [END bigquery_create_table] diff --git a/bigquery/samples/create_table_range_partitioned.py b/bigquery/samples/create_table_range_partitioned.py deleted file mode 100644 index 260041aa5d5d..000000000000 --- a/bigquery/samples/create_table_range_partitioned.py +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def create_table_range_partitioned(table_id): - - # [START bigquery_create_table_range_partitioned] - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set table_id to the ID of the table to create. - # table_id = "your-project.your_dataset.your_table_name" - - schema = [ - bigquery.SchemaField("full_name", "STRING"), - bigquery.SchemaField("city", "STRING"), - bigquery.SchemaField("zipcode", "INTEGER"), - ] - - table = bigquery.Table(table_id, schema=schema) - table.range_partitioning = bigquery.RangePartitioning( - # To use integer range partitioning, select a top-level REQUIRED / - # NULLABLE column with INTEGER / INT64 data type. - field="zipcode", - range_=bigquery.PartitionRange(start=0, end=100000, interval=10), - ) - table = client.create_table(table) # Make an API request. - print( - "Created table {}.{}.{}".format(table.project, table.dataset_id, table.table_id) - ) - # [END bigquery_create_table_range_partitioned] - return table diff --git a/bigquery/samples/dataset_exists.py b/bigquery/samples/dataset_exists.py deleted file mode 100644 index b4db9353b37f..000000000000 --- a/bigquery/samples/dataset_exists.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def dataset_exists(dataset_id): - - # [START bigquery_dataset_exists] - from google.cloud import bigquery - from google.cloud.exceptions import NotFound - - client = bigquery.Client() - - # TODO(developer): Set dataset_id to the ID of the dataset to determine existence. - # dataset_id = "your-project.your_dataset" - - try: - client.get_dataset(dataset_id) # Make an API request. - print("Dataset {} already exists".format(dataset_id)) - except NotFound: - print("Dataset {} is not found".format(dataset_id)) - # [END bigquery_dataset_exists] diff --git a/bigquery/samples/delete_dataset.py b/bigquery/samples/delete_dataset.py deleted file mode 100644 index e25740baaff0..000000000000 --- a/bigquery/samples/delete_dataset.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def delete_dataset(dataset_id): - - # [START bigquery_delete_dataset] - - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set model_id to the ID of the model to fetch. - # dataset_id = 'your-project.your_dataset' - - # Use the delete_contents parameter to delete a dataset and its contents. - # Use the not_found_ok parameter to not receive an error if the dataset has already been deleted. - client.delete_dataset( - dataset_id, delete_contents=True, not_found_ok=True - ) # Make an API request. - - print("Deleted dataset '{}'.".format(dataset_id)) - # [END bigquery_delete_dataset] diff --git a/bigquery/samples/delete_dataset_labels.py b/bigquery/samples/delete_dataset_labels.py deleted file mode 100644 index a52de2967e70..000000000000 --- a/bigquery/samples/delete_dataset_labels.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def delete_dataset_labels(dataset_id): - - # [START bigquery_delete_label_dataset] - - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set dataset_id to the ID of the dataset to fetch. - # dataset_id = "your-project.your_dataset" - - dataset = client.get_dataset(dataset_id) # Make an API request. - - # To delete a label from a dataset, set its value to None. - dataset.labels["color"] = None - - dataset = client.update_dataset(dataset, ["labels"]) # Make an API request. - print("Labels deleted from {}".format(dataset_id)) - # [END bigquery_delete_label_dataset] - return dataset diff --git a/bigquery/samples/delete_model.py b/bigquery/samples/delete_model.py deleted file mode 100644 index 0190315c6bed..000000000000 --- a/bigquery/samples/delete_model.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def delete_model(model_id): - """Sample ID: go/samples-tracker/1534""" - - # [START bigquery_delete_model] - - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set model_id to the ID of the model to fetch. - # model_id = 'your-project.your_dataset.your_model' - - client.delete_model(model_id) # Make an API request. - - print("Deleted model '{}'.".format(model_id)) - # [END bigquery_delete_model] diff --git a/bigquery/samples/delete_routine.py b/bigquery/samples/delete_routine.py deleted file mode 100644 index 679cbee4bc94..000000000000 --- a/bigquery/samples/delete_routine.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def delete_routine(routine_id): - - # [START bigquery_delete_routine] - - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set the fully-qualified ID for the routine. - # routine_id = "my-project.my_dataset.my_routine" - - client.delete_routine(routine_id) # Make an API request. - - print("Deleted routine {}.".format(routine_id)) - # [END bigquery_delete_routine] diff --git a/bigquery/samples/delete_table.py b/bigquery/samples/delete_table.py deleted file mode 100644 index 3d0a6f0babc2..000000000000 --- a/bigquery/samples/delete_table.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def delete_table(table_id): - - # [START bigquery_delete_table] - - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set table_id to the ID of the table to fetch. - # table_id = 'your-project.your_dataset.your_table' - - # If the table does not exist, delete_table raises - # google.api_core.exceptions.NotFound unless not_found_ok is True. - client.delete_table(table_id, not_found_ok=True) # Make an API request. - print("Deleted table '{}'.".format(table_id)) - # [END bigquery_delete_table] diff --git a/bigquery/samples/download_public_data.py b/bigquery/samples/download_public_data.py deleted file mode 100644 index d10ed161a5da..000000000000 --- a/bigquery/samples/download_public_data.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def download_public_data(): - - # [START bigquery_pandas_public_data] - - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set table_id to the fully-qualified table ID in standard - # SQL format, including the project ID and dataset ID. - table_id = "bigquery-public-data.usa_names.usa_1910_current" - - # Use the BigQuery Storage API to speed-up downloads of large tables. - dataframe = client.list_rows(table_id).to_dataframe(create_bqstorage_client=True) - - print(dataframe.info()) - # [END bigquery_pandas_public_data] diff --git a/bigquery/samples/download_public_data_sandbox.py b/bigquery/samples/download_public_data_sandbox.py deleted file mode 100644 index afb50b15c3a9..000000000000 --- a/bigquery/samples/download_public_data_sandbox.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def download_public_data_sandbox(): - - # [START bigquery_pandas_public_data_sandbox] - - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # `SELECT *` is an anti-pattern in BigQuery because it is cheaper and - # faster to use the BigQuery Storage API directly, but BigQuery Sandbox - # users can only use the BigQuery Storage API to download query results. - query_string = "SELECT * FROM `bigquery-public-data.usa_names.usa_1910_current`" - - # Use the BigQuery Storage API to speed-up downloads of large tables. - dataframe = client.query(query_string).to_dataframe(create_bqstorage_client=True) - - print(dataframe.info()) - # [END bigquery_pandas_public_data_sandbox] diff --git a/bigquery/samples/get_dataset.py b/bigquery/samples/get_dataset.py deleted file mode 100644 index 54ba05781dd6..000000000000 --- a/bigquery/samples/get_dataset.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def get_dataset(dataset_id): - - # [START bigquery_get_dataset] - - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set dataset_id to the ID of the dataset to fetch. - # dataset_id = 'your-project.your_dataset' - - dataset = client.get_dataset(dataset_id) # Make an API request. - - full_dataset_id = "{}.{}".format(dataset.project, dataset.dataset_id) - friendly_name = dataset.friendly_name - print( - "Got dataset '{}' with friendly_name '{}'.".format( - full_dataset_id, friendly_name - ) - ) - - # View dataset properties. - print("Description: {}".format(dataset.description)) - print("Labels:") - labels = dataset.labels - if labels: - for label, value in labels.items(): - print("\t{}: {}".format(label, value)) - else: - print("\tDataset has no labels defined.") - - # View tables in dataset. - print("Tables:") - tables = list(client.list_tables(dataset)) # Make an API request(s). - if tables: - for table in tables: - print("\t{}".format(table.table_id)) - else: - print("\tThis dataset does not contain any tables.") - # [END bigquery_get_dataset] diff --git a/bigquery/samples/get_dataset_labels.py b/bigquery/samples/get_dataset_labels.py deleted file mode 100644 index 18a9ca985f51..000000000000 --- a/bigquery/samples/get_dataset_labels.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def get_dataset_labels(dataset_id): - - # [START bigquery_get_dataset_labels] - - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set dataset_id to the ID of the dataset to fetch. - # dataset_id = "your-project.your_dataset" - - dataset = client.get_dataset(dataset_id) # Make an API request. - - # View dataset labels. - print("Dataset ID: {}".format(dataset_id)) - print("Labels:") - if dataset.labels: - for label, value in dataset.labels.items(): - print("\t{}: {}".format(label, value)) - else: - print("\tDataset has no labels defined.") - # [END bigquery_get_dataset_labels] diff --git a/bigquery/samples/get_model.py b/bigquery/samples/get_model.py deleted file mode 100644 index 1570ef816895..000000000000 --- a/bigquery/samples/get_model.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def get_model(model_id): - """Sample ID: go/samples-tracker/1510""" - - # [START bigquery_get_model] - - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set model_id to the ID of the model to fetch. - # model_id = 'your-project.your_dataset.your_model' - - model = client.get_model(model_id) # Make an API request. - - full_model_id = "{}.{}.{}".format(model.project, model.dataset_id, model.model_id) - friendly_name = model.friendly_name - print( - "Got model '{}' with friendly_name '{}'.".format(full_model_id, friendly_name) - ) - # [END bigquery_get_model] diff --git a/bigquery/samples/get_routine.py b/bigquery/samples/get_routine.py deleted file mode 100644 index 72715ee1bcd7..000000000000 --- a/bigquery/samples/get_routine.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def get_routine(routine_id): - - # [START bigquery_get_routine] - - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set the fully-qualified ID for the routine. - # routine_id = "my-project.my_dataset.my_routine" - - routine = client.get_routine(routine_id) # Make an API request. - - print("Routine '{}':".format(routine.reference)) - print("\tType: '{}'".format(routine.type_)) - print("\tLanguage: '{}'".format(routine.language)) - print("\tArguments:") - - for argument in routine.arguments: - print("\t\tName: '{}'".format(argument.name)) - print("\t\tType: '{}'".format(argument.data_type)) - # [END bigquery_get_routine] - return routine diff --git a/bigquery/samples/get_table.py b/bigquery/samples/get_table.py deleted file mode 100644 index 0d1d809ba791..000000000000 --- a/bigquery/samples/get_table.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def get_table(table_id): - - # [START bigquery_get_table] - - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set table_id to the ID of the model to fetch. - # table_id = 'your-project.your_dataset.your_table' - - table = client.get_table(table_id) # Make an API request. - - # View table properties - print( - "Got table '{}.{}.{}'.".format(table.project, table.dataset_id, table.table_id) - ) - print("Table schema: {}".format(table.schema)) - print("Table description: {}".format(table.description)) - print("Table has {} rows".format(table.num_rows)) - # [END bigquery_get_table] diff --git a/bigquery/samples/label_dataset.py b/bigquery/samples/label_dataset.py deleted file mode 100644 index bd4cd6721a57..000000000000 --- a/bigquery/samples/label_dataset.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def label_dataset(dataset_id): - - # [START bigquery_label_dataset] - - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set dataset_id to the ID of the dataset to fetch. - # dataset_id = "your-project.your_dataset" - - dataset = client.get_dataset(dataset_id) # Make an API request. - dataset.labels = {"color": "green"} - dataset = client.update_dataset(dataset, ["labels"]) # Make an API request. - - print("Labels added to {}".format(dataset_id)) - # [END bigquery_label_dataset] diff --git a/bigquery/samples/list_datasets.py b/bigquery/samples/list_datasets.py deleted file mode 100644 index 6a1b93d00bb6..000000000000 --- a/bigquery/samples/list_datasets.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def list_datasets(): - - # [START bigquery_list_datasets] - - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - datasets = list(client.list_datasets()) # Make an API request. - project = client.project - - if datasets: - print("Datasets in project {}:".format(project)) - for dataset in datasets: - print("\t{}".format(dataset.dataset_id)) - else: - print("{} project does not contain any datasets.".format(project)) - # [END bigquery_list_datasets] diff --git a/bigquery/samples/list_datasets_by_label.py b/bigquery/samples/list_datasets_by_label.py deleted file mode 100644 index 1b310049b4b3..000000000000 --- a/bigquery/samples/list_datasets_by_label.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def list_datasets_by_label(): - - # [START bigquery_list_datasets_by_label] - - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - label_filter = "labels.color:green" - datasets = list(client.list_datasets(filter=label_filter)) # Make an API request. - - if datasets: - print("Datasets filtered by {}:".format(label_filter)) - for dataset in datasets: - print("\t{}.{}".format(dataset.project, dataset.dataset_id)) - else: - print("No datasets found with this filter.") - # [END bigquery_list_datasets_by_label] diff --git a/bigquery/samples/list_models.py b/bigquery/samples/list_models.py deleted file mode 100644 index 7251c001a770..000000000000 --- a/bigquery/samples/list_models.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def list_models(dataset_id): - """Sample ID: go/samples-tracker/1512""" - - # [START bigquery_list_models] - - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set dataset_id to the ID of the dataset that contains - # the models you are listing. - # dataset_id = 'your-project.your_dataset' - - models = client.list_models(dataset_id) # Make an API request. - - print("Models contained in '{}':".format(dataset_id)) - for model in models: - full_model_id = "{}.{}.{}".format( - model.project, model.dataset_id, model.model_id - ) - friendly_name = model.friendly_name - print("{}: friendly_name='{}'".format(full_model_id, friendly_name)) - # [END bigquery_list_models] diff --git a/bigquery/samples/list_routines.py b/bigquery/samples/list_routines.py deleted file mode 100644 index 718d40d680aa..000000000000 --- a/bigquery/samples/list_routines.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def list_routines(dataset_id): - - # [START bigquery_list_routines] - - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set dataset_id to the ID of the dataset that contains - # the routines you are listing. - # dataset_id = 'your-project.your_dataset' - - routines = client.list_routines(dataset_id) # Make an API request. - - print("Routines contained in dataset {}:".format(dataset_id)) - for routine in routines: - print(routine.reference) - # [END bigquery_list_routines] diff --git a/bigquery/samples/list_tables.py b/bigquery/samples/list_tables.py deleted file mode 100644 index 9ab527a4915f..000000000000 --- a/bigquery/samples/list_tables.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def list_tables(dataset_id): - - # [START bigquery_list_tables] - - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set dataset_id to the ID of the dataset that contains - # the tables you are listing. - # dataset_id = 'your-project.your_dataset' - - tables = client.list_tables(dataset_id) # Make an API request. - - print("Tables contained in '{}':".format(dataset_id)) - for table in tables: - print("{}.{}.{}".format(table.project, table.dataset_id, table.table_id)) - # [END bigquery_list_tables] diff --git a/bigquery/samples/load_table_dataframe.py b/bigquery/samples/load_table_dataframe.py deleted file mode 100644 index 91dd6e9f09fc..000000000000 --- a/bigquery/samples/load_table_dataframe.py +++ /dev/null @@ -1,116 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def load_table_dataframe(table_id): - - # [START bigquery_load_table_dataframe] - import datetime - - from google.cloud import bigquery - import pandas - import pytz - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set table_id to the ID of the table to create. - # table_id = "your-project.your_dataset.your_table_name" - - records = [ - { - "title": u"The Meaning of Life", - "release_year": 1983, - "length_minutes": 112.5, - "release_date": pytz.timezone("Europe/Paris") - .localize(datetime.datetime(1983, 5, 9, 13, 0, 0)) - .astimezone(pytz.utc), - # Assume UTC timezone when a datetime object contains no timezone. - "dvd_release": datetime.datetime(2002, 1, 22, 7, 0, 0), - }, - { - "title": u"Monty Python and the Holy Grail", - "release_year": 1975, - "length_minutes": 91.5, - "release_date": pytz.timezone("Europe/London") - .localize(datetime.datetime(1975, 4, 9, 23, 59, 2)) - .astimezone(pytz.utc), - "dvd_release": datetime.datetime(2002, 7, 16, 9, 0, 0), - }, - { - "title": u"Life of Brian", - "release_year": 1979, - "length_minutes": 94.25, - "release_date": pytz.timezone("America/New_York") - .localize(datetime.datetime(1979, 8, 17, 23, 59, 5)) - .astimezone(pytz.utc), - "dvd_release": datetime.datetime(2008, 1, 14, 8, 0, 0), - }, - { - "title": u"And Now for Something Completely Different", - "release_year": 1971, - "length_minutes": 88.0, - "release_date": pytz.timezone("Europe/London") - .localize(datetime.datetime(1971, 9, 28, 23, 59, 7)) - .astimezone(pytz.utc), - "dvd_release": datetime.datetime(2003, 10, 22, 10, 0, 0), - }, - ] - dataframe = pandas.DataFrame( - records, - # In the loaded table, the column order reflects the order of the - # columns in the DataFrame. - columns=[ - "title", - "release_year", - "length_minutes", - "release_date", - "dvd_release", - ], - # Optionally, set a named index, which can also be written to the - # BigQuery table. - index=pandas.Index( - [u"Q24980", u"Q25043", u"Q24953", u"Q16403"], name="wikidata_id" - ), - ) - job_config = bigquery.LoadJobConfig( - # Specify a (partial) schema. All columns are always written to the - # table. The schema is used to assist in data type definitions. - schema=[ - # Specify the type of columns whose type cannot be auto-detected. For - # example the "title" column uses pandas dtype "object", so its - # data type is ambiguous. - bigquery.SchemaField("title", bigquery.enums.SqlTypeNames.STRING), - # Indexes are written if included in the schema by name. - bigquery.SchemaField("wikidata_id", bigquery.enums.SqlTypeNames.STRING), - ], - # Optionally, set the write disposition. BigQuery appends loaded rows - # to an existing table by default, but with WRITE_TRUNCATE write - # disposition it replaces the table with the loaded data. - write_disposition="WRITE_TRUNCATE", - ) - - job = client.load_table_from_dataframe( - dataframe, table_id, job_config=job_config - ) # Make an API request. - job.result() # Wait for the job to complete. - - table = client.get_table(table_id) # Make an API request. - print( - "Loaded {} rows and {} columns to {}".format( - table.num_rows, len(table.schema), table_id - ) - ) - # [END bigquery_load_table_dataframe] - return table diff --git a/bigquery/samples/load_table_file.py b/bigquery/samples/load_table_file.py deleted file mode 100644 index b7e45dac3a30..000000000000 --- a/bigquery/samples/load_table_file.py +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def load_table_file(file_path, table_id): - - # [START bigquery_load_from_file] - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set table_id to the ID of the table to create. - # table_id = "your-project.your_dataset.your_table_name" - - job_config = bigquery.LoadJobConfig( - source_format=bigquery.SourceFormat.CSV, skip_leading_rows=1, autodetect=True, - ) - - with open(file_path, "rb") as source_file: - job = client.load_table_from_file(source_file, table_id, job_config=job_config) - - job.result() # Waits for the job to complete. - - table = client.get_table(table_id) # Make an API request. - print( - "Loaded {} rows and {} columns to {}".format( - table.num_rows, len(table.schema), table_id - ) - ) - # [END bigquery_load_from_file] - return table diff --git a/bigquery/samples/load_table_uri_avro.py b/bigquery/samples/load_table_uri_avro.py deleted file mode 100644 index 5c25eed226b6..000000000000 --- a/bigquery/samples/load_table_uri_avro.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def load_table_uri_avro(table_id): - - # [START bigquery_load_table_gcs_avro] - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set table_id to the ID of the table to create. - # table_id = "your-project.your_dataset.your_table_name - - job_config = bigquery.LoadJobConfig(source_format=bigquery.SourceFormat.AVRO) - uri = "gs://cloud-samples-data/bigquery/us-states/us-states.avro" - - load_job = client.load_table_from_uri( - uri, table_id, job_config=job_config - ) # Make an API request. - - load_job.result() # Waits for the job to complete. - - destination_table = client.get_table(table_id) - print("Loaded {} rows.".format(destination_table.num_rows)) - # [END bigquery_load_table_gcs_avro] diff --git a/bigquery/samples/load_table_uri_cmek.py b/bigquery/samples/load_table_uri_cmek.py deleted file mode 100644 index 8bd84993c293..000000000000 --- a/bigquery/samples/load_table_uri_cmek.py +++ /dev/null @@ -1,60 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def load_table_uri_cmek(table_id, kms_key_name): - - # [START bigquery_load_table_gcs_json_cmek] - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set table_id to the ID of the table to create. - # table_id = "your-project.your_dataset.your_table_name - - # Set the encryption key to use for the destination. - # TODO: Replace this key with a key you have created in KMS. - # kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( - # "cloud-samples-tests", "us", "test", "test" - # ) - - job_config = bigquery.LoadJobConfig( - autodetect=True, - source_format=bigquery.SourceFormat.NEWLINE_DELIMITED_JSON, - destination_encryption_configuration=bigquery.EncryptionConfiguration( - kms_key_name=kms_key_name - ), - ) - - uri = "gs://cloud-samples-data/bigquery/us-states/us-states.json" - - load_job = client.load_table_from_uri( - uri, - table_id, - location="US", # Must match the destination dataset location. - job_config=job_config, - ) # Make an API request. - - assert load_job.job_type == "load" - - load_job.result() # Waits for the job to complete. - - assert load_job.state == "DONE" - table = client.get_table(table_id) - - if table.encryption_configuration.kms_key_name == kms_key_name: - print("A table loaded with encryption configuration key") - - # [END bigquery_load_table_gcs_json_cmek] diff --git a/bigquery/samples/load_table_uri_csv.py b/bigquery/samples/load_table_uri_csv.py deleted file mode 100644 index 0736a560cc75..000000000000 --- a/bigquery/samples/load_table_uri_csv.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def load_table_uri_csv(table_id): - - # [START bigquery_load_table_gcs_csv] - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set table_id to the ID of the table to create. - # table_id = "your-project.your_dataset.your_table_name" - - job_config = bigquery.LoadJobConfig( - schema=[ - bigquery.SchemaField("name", "STRING"), - bigquery.SchemaField("post_abbr", "STRING"), - ], - skip_leading_rows=1, - # The source format defaults to CSV, so the line below is optional. - source_format=bigquery.SourceFormat.CSV, - ) - uri = "gs://cloud-samples-data/bigquery/us-states/us-states.csv" - - load_job = client.load_table_from_uri( - uri, table_id, job_config=job_config - ) # Make an API request. - - load_job.result() # Waits for the job to complete. - - destination_table = client.get_table(table_id) # Make an API request. - print("Loaded {} rows.".format(destination_table.num_rows)) - # [END bigquery_load_table_gcs_csv] diff --git a/bigquery/samples/load_table_uri_json.py b/bigquery/samples/load_table_uri_json.py deleted file mode 100644 index 3c21972c80a0..000000000000 --- a/bigquery/samples/load_table_uri_json.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def load_table_uri_json(table_id): - # [START bigquery_load_table_gcs_json] - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set table_id to the ID of the table to create. - # table_id = "your-project.your_dataset.your_table_name" - - job_config = bigquery.LoadJobConfig( - schema=[ - bigquery.SchemaField("name", "STRING"), - bigquery.SchemaField("post_abbr", "STRING"), - ], - source_format=bigquery.SourceFormat.NEWLINE_DELIMITED_JSON, - ) - uri = "gs://cloud-samples-data/bigquery/us-states/us-states.json" - - load_job = client.load_table_from_uri( - uri, - table_id, - location="US", # Must match the destination dataset location. - job_config=job_config, - ) # Make an API request. - - load_job.result() # Waits for the job to complete. - - destination_table = client.get_table(table_id) - print("Loaded {} rows.".format(destination_table.num_rows)) - # [END bigquery_load_table_gcs_json] diff --git a/bigquery/samples/load_table_uri_orc.py b/bigquery/samples/load_table_uri_orc.py deleted file mode 100644 index 3ab6ff45aa0a..000000000000 --- a/bigquery/samples/load_table_uri_orc.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def load_table_uri_orc(table_id): - - # [START bigquery_load_table_gcs_orc] - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set table_id to the ID of the table to create. - # table_id = "your-project.your_dataset.your_table_name - - job_config = bigquery.LoadJobConfig(source_format=bigquery.SourceFormat.ORC) - uri = "gs://cloud-samples-data/bigquery/us-states/us-states.orc" - - load_job = client.load_table_from_uri( - uri, table_id, job_config=job_config - ) # Make an API request. - - load_job.result() # Waits for the job to complete. - - destination_table = client.get_table(table_id) - print("Loaded {} rows.".format(destination_table.num_rows)) - # [END bigquery_load_table_gcs_orc] diff --git a/bigquery/samples/load_table_uri_parquet.py b/bigquery/samples/load_table_uri_parquet.py deleted file mode 100644 index 3dce5e8efda8..000000000000 --- a/bigquery/samples/load_table_uri_parquet.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def load_table_uri_parquet(table_id): - # [START bigquery_load_table_gcs_parquet] - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set table_id to the ID of the table to create. - # table_id = "your-project.your_dataset.your_table_name" - - job_config = bigquery.LoadJobConfig(source_format=bigquery.SourceFormat.PARQUET,) - uri = "gs://cloud-samples-data/bigquery/us-states/us-states.parquet" - - load_job = client.load_table_from_uri( - uri, table_id, job_config=job_config - ) # Make an API request. - - load_job.result() # Waits for the job to complete. - - destination_table = client.get_table(table_id) - print("Loaded {} rows.".format(destination_table.num_rows)) - # [END bigquery_load_table_gcs_parquet] diff --git a/bigquery/samples/query_external_gcs_temporary_table.py b/bigquery/samples/query_external_gcs_temporary_table.py deleted file mode 100644 index 3c3caf695870..000000000000 --- a/bigquery/samples/query_external_gcs_temporary_table.py +++ /dev/null @@ -1,44 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def query_external_gcs_temporary_table(): - - # [START bigquery_query_external_gcs_temp] - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # Configure the external data source and query job. - external_config = bigquery.ExternalConfig("CSV") - external_config.source_uris = [ - "gs://cloud-samples-data/bigquery/us-states/us-states.csv" - ] - external_config.schema = [ - bigquery.SchemaField("name", "STRING"), - bigquery.SchemaField("post_abbr", "STRING"), - ] - external_config.options.skip_leading_rows = 1 - table_id = "us_states" - job_config = bigquery.QueryJobConfig(table_definitions={table_id: external_config}) - - # Example query to find states starting with 'W'. - sql = 'SELECT * FROM `{}` WHERE name LIKE "W%"'.format(table_id) - - query_job = client.query(sql, job_config=job_config) # Make an API request. - - w_states = list(query_job) # Wait for the job to complete. - print("There are {} states with names starting with W.".format(len(w_states))) - # [END bigquery_query_external_gcs_temp] diff --git a/bigquery/samples/query_external_sheets_permanent_table.py b/bigquery/samples/query_external_sheets_permanent_table.py deleted file mode 100644 index 915e9acc303a..000000000000 --- a/bigquery/samples/query_external_sheets_permanent_table.py +++ /dev/null @@ -1,74 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def query_external_sheets_permanent_table(dataset_id): - - # [START bigquery_query_external_sheets_perm] - from google.cloud import bigquery - import google.auth - - # Create credentials with Drive & BigQuery API scopes. - # Both APIs must be enabled for your project before running this code. - credentials, project = google.auth.default( - scopes=[ - "https://www.googleapis.com/auth/drive", - "https://www.googleapis.com/auth/bigquery", - ] - ) - - # Construct a BigQuery client object. - client = bigquery.Client(credentials=credentials, project=project) - - # TODO(developer): Set dataset_id to the ID of the dataset to fetch. - # dataset_id = "your-project.your_dataset" - - # Configure the external data source. - dataset = client.get_dataset(dataset_id) - table_id = "us_states" - schema = [ - bigquery.SchemaField("name", "STRING"), - bigquery.SchemaField("post_abbr", "STRING"), - ] - table = bigquery.Table(dataset.table(table_id), schema=schema) - external_config = bigquery.ExternalConfig("GOOGLE_SHEETS") - # Use a shareable link or grant viewing access to the email address you - # used to authenticate with BigQuery (this example Sheet is public). - sheet_url = ( - "https://docs.google.com/spreadsheets" - "/d/1i_QCL-7HcSyUZmIbP9E6lO_T5u3HnpLe7dnpHaijg_E/edit?usp=sharing" - ) - external_config.source_uris = [sheet_url] - external_config.options.skip_leading_rows = 1 # Optionally skip header row. - external_config.options.range = ( - "us-states!A20:B49" # Optionally set range of the sheet to query from. - ) - table.external_data_configuration = external_config - - # Create a permanent table linked to the Sheets file. - table = client.create_table(table) # Make an API request. - - # Example query to find states starting with "W". - sql = 'SELECT * FROM `{}.{}` WHERE name LIKE "W%"'.format(dataset_id, table_id) - - query_job = client.query(sql) # Make an API request. - - # Wait for the query to complete. - w_states = list(query_job) - print( - "There are {} states with names starting with W in the selected range.".format( - len(w_states) - ) - ) - # [END bigquery_query_external_sheets_perm] diff --git a/bigquery/samples/query_external_sheets_temporary_table.py b/bigquery/samples/query_external_sheets_temporary_table.py deleted file mode 100644 index 1b70e9531f96..000000000000 --- a/bigquery/samples/query_external_sheets_temporary_table.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def query_external_sheets_temporary_table(): - - # [START bigquery_query_external_sheets_temp] - # [START bigquery_auth_drive_scope] - from google.cloud import bigquery - import google.auth - - # Create credentials with Drive & BigQuery API scopes. - # Both APIs must be enabled for your project before running this code. - credentials, project = google.auth.default( - scopes=[ - "https://www.googleapis.com/auth/drive", - "https://www.googleapis.com/auth/bigquery", - ] - ) - - # Construct a BigQuery client object. - client = bigquery.Client(credentials=credentials, project=project) - # [END bigquery_auth_drive_scope] - - # Configure the external data source and query job. - external_config = bigquery.ExternalConfig("GOOGLE_SHEETS") - - # Use a shareable link or grant viewing access to the email address you - # used to authenticate with BigQuery (this example Sheet is public). - sheet_url = ( - "https://docs.google.com/spreadsheets" - "/d/1i_QCL-7HcSyUZmIbP9E6lO_T5u3HnpLe7dnpHaijg_E/edit?usp=sharing" - ) - external_config.source_uris = [sheet_url] - external_config.schema = [ - bigquery.SchemaField("name", "STRING"), - bigquery.SchemaField("post_abbr", "STRING"), - ] - external_config.options.skip_leading_rows = 1 # Optionally skip header row. - external_config.options.range = ( - "us-states!A20:B49" # Optionally set range of the sheet to query from. - ) - table_id = "us_states" - job_config = bigquery.QueryJobConfig(table_definitions={table_id: external_config}) - - # Example query to find states starting with "W". - sql = 'SELECT * FROM `{}` WHERE name LIKE "W%"'.format(table_id) - - query_job = client.query(sql, job_config=job_config) # Make an API request. - - # Wait for the query to complete. - w_states = list(query_job) - print( - "There are {} states with names starting with W in the selected range.".format( - len(w_states) - ) - ) - # [END bigquery_query_external_sheets_temp] diff --git a/bigquery/samples/query_no_cache.py b/bigquery/samples/query_no_cache.py deleted file mode 100644 index e380f0b15de0..000000000000 --- a/bigquery/samples/query_no_cache.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def query_no_cache(): - - # [START bigquery_query_no_cache] - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - job_config = bigquery.QueryJobConfig(use_query_cache=False) - sql = """ - SELECT corpus - FROM `bigquery-public-data.samples.shakespeare` - GROUP BY corpus; - """ - query_job = client.query(sql, job_config=job_config) # Make an API request. - - for row in query_job: - print(row) - # [END bigquery_query_no_cache] diff --git a/bigquery/samples/query_pagination.py b/bigquery/samples/query_pagination.py deleted file mode 100644 index 57a4212cf664..000000000000 --- a/bigquery/samples/query_pagination.py +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def query_pagination(): - - # [START bigquery_query_pagination] - - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - query = """ - SELECT name, SUM(number) as total_people - FROM `bigquery-public-data.usa_names.usa_1910_2013` - GROUP BY name - ORDER BY total_people DESC - """ - query_job = client.query(query) # Make an API request. - query_job.result() # Wait for the query to complete. - - # Get the destination table for the query results. - # - # All queries write to a destination table. If a destination table is not - # specified, the BigQuery populates it with a reference to a temporary - # anonymous table after the query completes. - destination = query_job.destination - - # Get the schema (and other properties) for the destination table. - # - # A schema is useful for converting from BigQuery types to Python types. - destination = client.get_table(destination) - - # Download rows. - # - # The client library automatically handles pagination. - print("The query data:") - rows = client.list_rows(destination, max_results=20) - for row in rows: - print("name={}, count={}".format(row["name"], row["total_people"])) - # [END bigquery_query_pagination] diff --git a/bigquery/samples/query_script.py b/bigquery/samples/query_script.py deleted file mode 100644 index 9390d352dd40..000000000000 --- a/bigquery/samples/query_script.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def query_script(): - # [START bigquery_query_script] - - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # Run a SQL script. - sql_script = """ - -- Declare a variable to hold names as an array. - DECLARE top_names ARRAY; - - -- Build an array of the top 100 names from the year 2017. - SET top_names = ( - SELECT ARRAY_AGG(name ORDER BY number DESC LIMIT 100) - FROM `bigquery-public-data.usa_names.usa_1910_2013` - WHERE year = 2000 - ); - - -- Which names appear as words in Shakespeare's plays? - SELECT - name AS shakespeare_name - FROM UNNEST(top_names) AS name - WHERE name IN ( - SELECT word - FROM `bigquery-public-data.samples.shakespeare` - ); - """ - parent_job = client.query(sql_script) - - # Wait for the whole script to finish. - rows_iterable = parent_job.result() - print("Script created {} child jobs.".format(parent_job.num_child_jobs)) - - # Fetch result rows for the final sub-job in the script. - rows = list(rows_iterable) - print( - "{} of the top 100 names from year 2000 also appear in Shakespeare's works.".format( - len(rows) - ) - ) - - # Fetch jobs created by the SQL script. - child_jobs_iterable = client.list_jobs(parent_job=parent_job) - for child_job in child_jobs_iterable: - child_rows = list(child_job.result()) - print( - "Child job with ID {} produced {} row(s).".format( - child_job.job_id, len(child_rows) - ) - ) - - # [END bigquery_query_script] diff --git a/bigquery/samples/query_to_arrow.py b/bigquery/samples/query_to_arrow.py deleted file mode 100644 index 4a57992d13c2..000000000000 --- a/bigquery/samples/query_to_arrow.py +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def query_to_arrow(): - - # [START bigquery_query_to_arrow] - - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - sql = """ - WITH races AS ( - SELECT "800M" AS race, - [STRUCT("Rudisha" as name, [23.4, 26.3, 26.4, 26.1] as splits), - STRUCT("Makhloufi" as name, [24.5, 25.4, 26.6, 26.1] as splits), - STRUCT("Murphy" as name, [23.9, 26.0, 27.0, 26.0] as splits), - STRUCT("Bosse" as name, [23.6, 26.2, 26.5, 27.1] as splits), - STRUCT("Rotich" as name, [24.7, 25.6, 26.9, 26.4] as splits), - STRUCT("Lewandowski" as name, [25.0, 25.7, 26.3, 27.2] as splits), - STRUCT("Kipketer" as name, [23.2, 26.1, 27.3, 29.4] as splits), - STRUCT("Berian" as name, [23.7, 26.1, 27.0, 29.3] as splits)] - AS participants) - SELECT - race, - participant - FROM races r - CROSS JOIN UNNEST(r.participants) as participant; - """ - query_job = client.query(sql) - arrow_table = query_job.to_arrow() # Make an API request. - - print( - "Downloaded {} rows, {} columns.".format( - arrow_table.num_rows, arrow_table.num_columns - ) - ) - print("\nSchema:\n{}".format(repr(arrow_table.schema))) - # [END bigquery_query_to_arrow] - return arrow_table diff --git a/bigquery/samples/table_exists.py b/bigquery/samples/table_exists.py deleted file mode 100644 index 152d95534add..000000000000 --- a/bigquery/samples/table_exists.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def table_exists(table_id): - - # [START bigquery_table_exists] - from google.cloud import bigquery - from google.cloud.exceptions import NotFound - - client = bigquery.Client() - - # TODO(developer): Set table_id to the ID of the table to determine existence. - # table_id = "your-project.your_dataset.your_table" - - try: - client.get_table(table_id) # Make an API request. - print("Table {} already exists.".format(table_id)) - except NotFound: - print("Table {} is not found.".format(table_id)) - # [END bigquery_table_exists] diff --git a/bigquery/samples/table_insert_rows.py b/bigquery/samples/table_insert_rows.py deleted file mode 100644 index 130f9dbbddf2..000000000000 --- a/bigquery/samples/table_insert_rows.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def table_insert_rows(table_id): - - # [START bigquery_table_insert_rows] - - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set table_id to the ID of the model to fetch. - # table_id = "your-project.your_dataset.your_table" - - table = client.get_table(table_id) # Make an API request. - rows_to_insert = [(u"Phred Phlyntstone", 32), (u"Wylma Phlyntstone", 29)] - - errors = client.insert_rows(table, rows_to_insert) # Make an API request. - if errors == []: - print("New rows have been added.") - # [END bigquery_table_insert_rows] diff --git a/bigquery/samples/table_insert_rows_explicit_none_insert_ids.py b/bigquery/samples/table_insert_rows_explicit_none_insert_ids.py deleted file mode 100644 index 2410ba1765fc..000000000000 --- a/bigquery/samples/table_insert_rows_explicit_none_insert_ids.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def table_insert_rows_explicit_none_insert_ids(table_id): - - # [START bigquery_table_insert_rows_explicit_none_insert_ids] - - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set table_id to the ID of the model to fetch. - # table_id = "your-project.your_dataset.your_table" - - table = client.get_table(table_id) # Make an API request. - rows_to_insert = [(u"Phred Phlyntstone", 32), (u"Wylma Phlyntstone", 29)] - - errors = client.insert_rows( - table, rows_to_insert, row_ids=[None] * len(rows_to_insert) - ) # Make an API request. - if errors == []: - print("New rows have been added.") - # [END bigquery_table_insert_rows_explicit_none_insert_ids] diff --git a/bigquery/samples/tests/__init__.py b/bigquery/samples/tests/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/bigquery/samples/tests/conftest.py b/bigquery/samples/tests/conftest.py deleted file mode 100644 index d80085dd3425..000000000000 --- a/bigquery/samples/tests/conftest.py +++ /dev/null @@ -1,167 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import datetime -import uuid - -import google.auth -import mock -import pytest - -from google.cloud import bigquery -from google.cloud import bigquery_v2 - - -@pytest.fixture(scope="session", autouse=True) -def client(): - credentials, project = google.auth.default( - scopes=[ - "https://www.googleapis.com/auth/drive", - "https://www.googleapis.com/auth/bigquery", - ] - ) - real_client = bigquery.Client(credentials=credentials, project=project) - mock_client = mock.create_autospec(bigquery.Client) - mock_client.return_value = real_client - bigquery.Client = mock_client - return real_client - - -@pytest.fixture -def random_table_id(dataset_id): - now = datetime.datetime.now() - random_table_id = "example_table_{}_{}".format( - now.strftime("%Y%m%d%H%M%S"), uuid.uuid4().hex[:8] - ) - return "{}.{}".format(dataset_id, random_table_id) - - -@pytest.fixture -def random_dataset_id(client): - now = datetime.datetime.now() - random_dataset_id = "example_dataset_{}_{}".format( - now.strftime("%Y%m%d%H%M%S"), uuid.uuid4().hex[:8] - ) - yield "{}.{}".format(client.project, random_dataset_id) - client.delete_dataset(random_dataset_id, delete_contents=True, not_found_ok=True) - - -@pytest.fixture -def random_routine_id(dataset_id): - now = datetime.datetime.now() - random_routine_id = "example_routine_{}_{}".format( - now.strftime("%Y%m%d%H%M%S"), uuid.uuid4().hex[:8] - ) - return "{}.{}".format(dataset_id, random_routine_id) - - -@pytest.fixture -def dataset_id(client): - now = datetime.datetime.now() - dataset_id = "python_dataset_sample_{}_{}".format( - now.strftime("%Y%m%d%H%M%S"), uuid.uuid4().hex[:8] - ) - dataset = client.create_dataset(dataset_id) - yield "{}.{}".format(dataset.project, dataset.dataset_id) - client.delete_dataset(dataset, delete_contents=True, not_found_ok=True) - - -@pytest.fixture -def table_id(client, dataset_id): - now = datetime.datetime.now() - table_id = "python_table_sample_{}_{}".format( - now.strftime("%Y%m%d%H%M%S"), uuid.uuid4().hex[:8] - ) - - table = bigquery.Table("{}.{}".format(dataset_id, table_id)) - table = client.create_table(table) - yield "{}.{}.{}".format(table.project, table.dataset_id, table.table_id) - client.delete_table(table, not_found_ok=True) - - -@pytest.fixture -def table_with_schema_id(client, dataset_id): - now = datetime.datetime.now() - table_id = "python_table_with_schema_{}_{}".format( - now.strftime("%Y%m%d%H%M%S"), uuid.uuid4().hex[:8] - ) - schema = [ - bigquery.SchemaField("full_name", "STRING"), - bigquery.SchemaField("age", "INTEGER"), - ] - table = bigquery.Table("{}.{}".format(dataset_id, table_id), schema=schema) - table = client.create_table(table) - yield "{}.{}.{}".format(table.project, table.dataset_id, table.table_id) - client.delete_table(table, not_found_ok=True) - - -@pytest.fixture -def table_with_data_id(): - return "bigquery-public-data.samples.shakespeare" - - -@pytest.fixture -def routine_id(client, dataset_id): - now = datetime.datetime.now() - routine_id = "python_routine_sample_{}_{}".format( - now.strftime("%Y%m%d%H%M%S"), uuid.uuid4().hex[:8] - ) - - routine = bigquery.Routine("{}.{}".format(dataset_id, routine_id)) - routine.type_ = "SCALAR_FUNCTION" - routine.language = "SQL" - routine.body = "x * 3" - routine.arguments = [ - bigquery.RoutineArgument( - name="x", - data_type=bigquery_v2.types.StandardSqlDataType( - type_kind=bigquery_v2.enums.StandardSqlDataType.TypeKind.INT64 - ), - ) - ] - - routine = client.create_routine(routine) - yield "{}.{}.{}".format(routine.project, routine.dataset_id, routine.routine_id) - client.delete_routine(routine, not_found_ok=True) - - -@pytest.fixture -def model_id(client, dataset_id): - model_id = "{}.{}".format(dataset_id, uuid.uuid4().hex) - - # The only way to create a model resource is via SQL. - # Use a very small dataset (2 points), to train a model quickly. - sql = """ - CREATE MODEL `{}` - OPTIONS ( - model_type='linear_reg', - max_iteration=1, - learn_rate=0.4, - learn_rate_strategy='constant' - ) AS ( - SELECT 'a' AS f1, 2.0 AS label - UNION ALL - SELECT 'b' AS f1, 3.8 AS label - ) - """.format( - model_id - ) - - client.query(sql).result() - return model_id - - -@pytest.fixture -def kms_key_name(): - return "projects/cloud-samples-tests/locations/us/keyRings/test/cryptoKeys/test" diff --git a/bigquery/samples/tests/test_add_empty_column.py b/bigquery/samples/tests/test_add_empty_column.py deleted file mode 100644 index d89fcb6b7022..000000000000 --- a/bigquery/samples/tests/test_add_empty_column.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import add_empty_column - - -def test_add_empty_column(capsys, table_id): - - add_empty_column.add_empty_column(table_id) - out, err = capsys.readouterr() - assert "A new column has been added." in out diff --git a/bigquery/samples/tests/test_browse_table_data.py b/bigquery/samples/tests/test_browse_table_data.py deleted file mode 100644 index a5f647bdbda2..000000000000 --- a/bigquery/samples/tests/test_browse_table_data.py +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import browse_table_data - - -def test_browse_table_data(capsys, table_with_data_id): - - browse_table_data.browse_table_data(table_with_data_id) - out, err = capsys.readouterr() - assert "Downloaded 164656 rows from table {}".format(table_with_data_id) in out - assert "Downloaded 10 rows from table {}".format(table_with_data_id) in out - assert "Selected 2 columns from table {}".format(table_with_data_id) in out - assert "Downloaded 10 rows from table {}".format(table_with_data_id) in out - assert "word" in out - assert "LVII" in out diff --git a/bigquery/samples/tests/test_client_list_jobs.py b/bigquery/samples/tests/test_client_list_jobs.py deleted file mode 100644 index 896950a8253f..000000000000 --- a/bigquery/samples/tests/test_client_list_jobs.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import client_list_jobs -from .. import create_job - - -def test_client_list_jobs(capsys, client): - - job = create_job.create_job() - client.cancel_job(job.job_id) - job.cancel() - client_list_jobs.client_list_jobs() - out, err = capsys.readouterr() - assert "Started job: {}".format(job.job_id) in out - assert "Last 10 jobs:" in out - assert "Jobs from the last ten minutes:" in out - assert "Last 10 jobs run by all users:" in out - assert "Last 10 jobs done:" in out diff --git a/bigquery/samples/tests/test_client_load_partitioned_table.py b/bigquery/samples/tests/test_client_load_partitioned_table.py deleted file mode 100644 index f1d72a8587c6..000000000000 --- a/bigquery/samples/tests/test_client_load_partitioned_table.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import client_load_partitioned_table - - -def test_client_load_partitioned_table(capsys, random_table_id): - - client_load_partitioned_table.client_load_partitioned_table(random_table_id) - out, err = capsys.readouterr() - assert "Loaded 50 rows to table {}".format(random_table_id) in out diff --git a/bigquery/samples/tests/test_client_query.py b/bigquery/samples/tests/test_client_query.py deleted file mode 100644 index 810c46a17943..000000000000 --- a/bigquery/samples/tests/test_client_query.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import client_query - - -def test_client_query(capsys,): - - client_query.client_query() - out, err = capsys.readouterr() - assert "The query data:" in out - assert "name=James, count=272793" in out diff --git a/bigquery/samples/tests/test_client_query_add_column.py b/bigquery/samples/tests/test_client_query_add_column.py deleted file mode 100644 index 254533f78778..000000000000 --- a/bigquery/samples/tests/test_client_query_add_column.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from google.cloud import bigquery - -from .. import client_query_add_column - - -def test_client_query_add_column(capsys, random_table_id, client): - - schema = [ - bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"), - bigquery.SchemaField("age", "INTEGER", mode="REQUIRED"), - ] - - client.create_table(bigquery.Table(random_table_id, schema=schema)) - - client_query_add_column.client_query_add_column(random_table_id) - out, err = capsys.readouterr() - assert "Table {} contains 2 columns".format(random_table_id) in out - assert "Table {} now contains 3 columns".format(random_table_id) in out diff --git a/bigquery/samples/tests/test_client_query_batch.py b/bigquery/samples/tests/test_client_query_batch.py deleted file mode 100644 index c5e19985dda9..000000000000 --- a/bigquery/samples/tests/test_client_query_batch.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import client_query_batch - - -def test_client_query_batch(capsys,): - - job = client_query_batch.client_query_batch() - out, err = capsys.readouterr() - assert "Job {} is currently in state {}".format(job.job_id, job.state) in out diff --git a/bigquery/samples/tests/test_client_query_destination_table.py b/bigquery/samples/tests/test_client_query_destination_table.py deleted file mode 100644 index 6bcdd498a215..000000000000 --- a/bigquery/samples/tests/test_client_query_destination_table.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import client_query_destination_table - - -def test_client_query_destination_table(capsys, table_id): - - client_query_destination_table.client_query_destination_table(table_id) - out, err = capsys.readouterr() - assert "Query results loaded to the table {}".format(table_id) in out diff --git a/bigquery/samples/tests/test_client_query_destination_table_cmek.py b/bigquery/samples/tests/test_client_query_destination_table_cmek.py deleted file mode 100644 index 4f9e3bc9a944..000000000000 --- a/bigquery/samples/tests/test_client_query_destination_table_cmek.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import client_query_destination_table_cmek - - -def test_client_query_destination_table_cmek(capsys, random_table_id, kms_key_name): - - client_query_destination_table_cmek.client_query_destination_table_cmek( - random_table_id, kms_key_name - ) - out, err = capsys.readouterr() - assert "The destination table is written using the encryption configuration" in out diff --git a/bigquery/samples/tests/test_client_query_destination_table_legacy.py b/bigquery/samples/tests/test_client_query_destination_table_legacy.py deleted file mode 100644 index 46077497b1c7..000000000000 --- a/bigquery/samples/tests/test_client_query_destination_table_legacy.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import client_query_destination_table_legacy - - -def test_client_query_destination_table_legacy(capsys, random_table_id): - - client_query_destination_table_legacy.client_query_destination_table_legacy( - random_table_id - ) - out, err = capsys.readouterr() - assert "Query results loaded to the table {}".format(random_table_id) in out diff --git a/bigquery/samples/tests/test_client_query_dry_run.py b/bigquery/samples/tests/test_client_query_dry_run.py deleted file mode 100644 index 5cbf2e3fae6e..000000000000 --- a/bigquery/samples/tests/test_client_query_dry_run.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import client_query_dry_run - - -def test_client_query_dry_run(capsys,): - - query_job = client_query_dry_run.client_query_dry_run() - out, err = capsys.readouterr() - assert "This query will process" in out - assert query_job.state == "DONE" - assert query_job.dry_run - assert query_job.total_bytes_processed > 0 diff --git a/bigquery/samples/tests/test_client_query_legacy_sql.py b/bigquery/samples/tests/test_client_query_legacy_sql.py deleted file mode 100644 index ab240fad1a1e..000000000000 --- a/bigquery/samples/tests/test_client_query_legacy_sql.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import re - -from .. import client_query_legacy_sql - - -def test_client_query_legacy_sql(capsys,): - - client_query_legacy_sql.client_query_legacy_sql() - out, err = capsys.readouterr() - assert re.search(r"(Row[\w(){}:', ]+)$", out) diff --git a/bigquery/samples/tests/test_client_query_relax_column.py b/bigquery/samples/tests/test_client_query_relax_column.py deleted file mode 100644 index 0c5b7aa6f982..000000000000 --- a/bigquery/samples/tests/test_client_query_relax_column.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from google.cloud import bigquery - -from .. import client_query_relax_column - - -def test_client_query_relax_column(capsys, random_table_id, client): - - schema = [ - bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"), - bigquery.SchemaField("age", "INTEGER", mode="REQUIRED"), - ] - - client.create_table(bigquery.Table(random_table_id, schema=schema)) - - client_query_relax_column.client_query_relax_column(random_table_id) - out, err = capsys.readouterr() - assert "2 fields in the schema are required." in out - assert "0 fields in the schema are now required." in out diff --git a/bigquery/samples/tests/test_client_query_w_array_params.py b/bigquery/samples/tests/test_client_query_w_array_params.py deleted file mode 100644 index 07e0294e93d1..000000000000 --- a/bigquery/samples/tests/test_client_query_w_array_params.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import client_query_w_array_params - - -def test_client_query_w_array_params(capsys,): - - client_query_w_array_params.client_query_w_array_params() - out, err = capsys.readouterr() - assert "James" in out diff --git a/bigquery/samples/tests/test_client_query_w_named_params.py b/bigquery/samples/tests/test_client_query_w_named_params.py deleted file mode 100644 index 2970dfdc47bd..000000000000 --- a/bigquery/samples/tests/test_client_query_w_named_params.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import client_query_w_named_params - - -def test_client_query_w_named_params(capsys,): - - client_query_w_named_params.client_query_w_named_params() - out, err = capsys.readouterr() - assert "the" in out diff --git a/bigquery/samples/tests/test_client_query_w_positional_params.py b/bigquery/samples/tests/test_client_query_w_positional_params.py deleted file mode 100644 index e41ffa825584..000000000000 --- a/bigquery/samples/tests/test_client_query_w_positional_params.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import client_query_w_positional_params - - -def test_client_query_w_positional_params(capsys,): - - client_query_w_positional_params.client_query_w_positional_params() - out, err = capsys.readouterr() - assert "the" in out diff --git a/bigquery/samples/tests/test_client_query_w_struct_params.py b/bigquery/samples/tests/test_client_query_w_struct_params.py deleted file mode 100644 index 03083a3a72c7..000000000000 --- a/bigquery/samples/tests/test_client_query_w_struct_params.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import client_query_w_struct_params - - -def test_client_query_w_struct_params(capsys,): - - client_query_w_struct_params.client_query_w_struct_params() - out, err = capsys.readouterr() - assert "1" in out - assert "foo" in out diff --git a/bigquery/samples/tests/test_client_query_w_timestamp_params.py b/bigquery/samples/tests/test_client_query_w_timestamp_params.py deleted file mode 100644 index 9dddcb9a0e5d..000000000000 --- a/bigquery/samples/tests/test_client_query_w_timestamp_params.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import client_query_w_timestamp_params - - -def test_client_query_w_timestamp_params(capsys,): - - client_query_w_timestamp_params.client_query_w_timestamp_params() - out, err = capsys.readouterr() - assert "2016, 12, 7, 9, 0" in out diff --git a/bigquery/samples/tests/test_copy_table.py b/bigquery/samples/tests/test_copy_table.py deleted file mode 100644 index 0b95c5443777..000000000000 --- a/bigquery/samples/tests/test_copy_table.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import copy_table - - -def test_copy_table(capsys, table_with_data_id, random_table_id, client): - - copy_table.copy_table(table_with_data_id, random_table_id) - out, err = capsys.readouterr() - assert "A copy of the table created." in out - assert ( - client.get_table(random_table_id).num_rows - == client.get_table(table_with_data_id).num_rows - ) diff --git a/bigquery/samples/tests/test_copy_table_cmek.py b/bigquery/samples/tests/test_copy_table_cmek.py deleted file mode 100644 index ac04675c989d..000000000000 --- a/bigquery/samples/tests/test_copy_table_cmek.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import copy_table_cmek - - -def test_copy_table_cmek(capsys, random_table_id, table_with_data_id, kms_key_name): - - copy_table_cmek.copy_table_cmek(random_table_id, table_with_data_id, kms_key_name) - out, err = capsys.readouterr() - assert "A copy of the table created" in out diff --git a/bigquery/samples/tests/test_copy_table_multiple_source.py b/bigquery/samples/tests/test_copy_table_multiple_source.py deleted file mode 100644 index 45c6d34f5a41..000000000000 --- a/bigquery/samples/tests/test_copy_table_multiple_source.py +++ /dev/null @@ -1,52 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import six -from google.cloud import bigquery - -from .. import copy_table_multiple_source - - -def test_copy_table_multiple_source(capsys, random_table_id, random_dataset_id, client): - - dataset = bigquery.Dataset(random_dataset_id) - dataset.location = "US" - dataset = client.create_dataset(dataset) - table_data = {"table1": b"Washington,WA", "table2": b"California,CA"} - for table_id, data in table_data.items(): - table_ref = dataset.table(table_id) - job_config = bigquery.LoadJobConfig( - schema=[ - bigquery.SchemaField("name", "STRING"), - bigquery.SchemaField("post_abbr", "STRING"), - ] - ) - body = six.BytesIO(data) - client.load_table_from_file( - body, table_ref, location="US", job_config=job_config - ).result() - - table_ids = [ - "{}.table1".format(random_dataset_id), - "{}.table2".format(random_dataset_id), - ] - - copy_table_multiple_source.copy_table_multiple_source(random_table_id, table_ids) - dest_table = client.get_table(random_table_id) - out, err = capsys.readouterr() - assert ( - "The tables {} have been appended to {}".format(table_ids, random_table_id) - in out - ) - assert dest_table.num_rows > 0 diff --git a/bigquery/samples/tests/test_create_dataset.py b/bigquery/samples/tests/test_create_dataset.py deleted file mode 100644 index a000038030e1..000000000000 --- a/bigquery/samples/tests/test_create_dataset.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import create_dataset - - -def test_create_dataset(capsys, random_dataset_id): - - create_dataset.create_dataset(random_dataset_id) - out, err = capsys.readouterr() - assert "Created dataset {}".format(random_dataset_id) in out diff --git a/bigquery/samples/tests/test_create_job.py b/bigquery/samples/tests/test_create_job.py deleted file mode 100644 index eab4b3e485f9..000000000000 --- a/bigquery/samples/tests/test_create_job.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import create_job - - -def test_create_job(capsys, client): - query_job = create_job.create_job() - client.cancel_job(query_job.job_id, location=query_job.location) - out, err = capsys.readouterr() - assert "Started job: {}".format(query_job.job_id) in out diff --git a/bigquery/samples/tests/test_create_table.py b/bigquery/samples/tests/test_create_table.py deleted file mode 100644 index 48e52889acce..000000000000 --- a/bigquery/samples/tests/test_create_table.py +++ /dev/null @@ -1,21 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import create_table - - -def test_create_table(capsys, random_table_id): - create_table.create_table(random_table_id) - out, err = capsys.readouterr() - assert "Created table {}".format(random_table_id) in out diff --git a/bigquery/samples/tests/test_create_table_range_partitioned.py b/bigquery/samples/tests/test_create_table_range_partitioned.py deleted file mode 100644 index 9745966bf02b..000000000000 --- a/bigquery/samples/tests/test_create_table_range_partitioned.py +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import create_table_range_partitioned - - -def test_create_table_range_partitioned(capsys, random_table_id): - table = create_table_range_partitioned.create_table_range_partitioned( - random_table_id - ) - out, _ = capsys.readouterr() - assert "Created table {}".format(random_table_id) in out - assert table.range_partitioning.field == "zipcode" - assert table.range_partitioning.range_.start == 0 - assert table.range_partitioning.range_.end == 100000 - assert table.range_partitioning.range_.interval == 10 diff --git a/bigquery/samples/tests/test_dataset_exists.py b/bigquery/samples/tests/test_dataset_exists.py deleted file mode 100644 index 6bc38b4d27ce..000000000000 --- a/bigquery/samples/tests/test_dataset_exists.py +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from google.cloud import bigquery - -from .. import dataset_exists - - -def test_dataset_exists(capsys, random_dataset_id, client): - - dataset_exists.dataset_exists(random_dataset_id) - out, err = capsys.readouterr() - assert "Dataset {} is not found".format(random_dataset_id) in out - dataset = bigquery.Dataset(random_dataset_id) - dataset = client.create_dataset(dataset) - dataset_exists.dataset_exists(random_dataset_id) - out, err = capsys.readouterr() - assert "Dataset {} already exists".format(random_dataset_id) in out diff --git a/bigquery/samples/tests/test_dataset_label_samples.py b/bigquery/samples/tests/test_dataset_label_samples.py deleted file mode 100644 index 0dbb2a76bdd9..000000000000 --- a/bigquery/samples/tests/test_dataset_label_samples.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import delete_dataset_labels -from .. import get_dataset_labels -from .. import label_dataset - - -def test_dataset_label_samples(capsys, dataset_id): - - label_dataset.label_dataset(dataset_id) - out, err = capsys.readouterr() - assert "Labels added to {}".format(dataset_id) in out - - get_dataset_labels.get_dataset_labels(dataset_id) - out, err = capsys.readouterr() - assert "color: green" in out - - dataset = delete_dataset_labels.delete_dataset_labels(dataset_id) - out, err = capsys.readouterr() - assert "Labels deleted from {}".format(dataset_id) in out - assert dataset.labels.get("color") is None diff --git a/bigquery/samples/tests/test_delete_dataset.py b/bigquery/samples/tests/test_delete_dataset.py deleted file mode 100644 index 1f9b3c823fb9..000000000000 --- a/bigquery/samples/tests/test_delete_dataset.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import delete_dataset - - -def test_delete_dataset(capsys, dataset_id): - - delete_dataset.delete_dataset(dataset_id) - out, err = capsys.readouterr() - assert "Deleted dataset '{}'.".format(dataset_id) in out diff --git a/bigquery/samples/tests/test_delete_table.py b/bigquery/samples/tests/test_delete_table.py deleted file mode 100644 index 7065743b0485..000000000000 --- a/bigquery/samples/tests/test_delete_table.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import delete_table - - -def test_delete_table(capsys, table_id): - - delete_table.delete_table(table_id) - out, err = capsys.readouterr() - assert "Deleted table '{}'.".format(table_id) in out diff --git a/bigquery/samples/tests/test_download_public_data.py b/bigquery/samples/tests/test_download_public_data.py deleted file mode 100644 index 82297b2032f6..000000000000 --- a/bigquery/samples/tests/test_download_public_data.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging - -from .. import download_public_data - - -def test_download_public_data(caplog, capsys): - # Enable debug-level logging to verify the BigQuery Storage API is used. - caplog.set_level(logging.DEBUG) - - download_public_data.download_public_data() - out, _ = capsys.readouterr() - assert "year" in out - assert "gender" in out - assert "name" in out - - assert any( - "Started reading table 'bigquery-public-data.usa_names.usa_1910_current' with BQ Storage API session" - in message - for message in caplog.messages - ) diff --git a/bigquery/samples/tests/test_download_public_data_sandbox.py b/bigquery/samples/tests/test_download_public_data_sandbox.py deleted file mode 100644 index e322cb2e54c9..000000000000 --- a/bigquery/samples/tests/test_download_public_data_sandbox.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging - -from .. import download_public_data_sandbox - - -def test_download_public_data_sandbox(caplog, capsys): - # Enable debug-level logging to verify the BigQuery Storage API is used. - caplog.set_level(logging.DEBUG) - - download_public_data_sandbox.download_public_data_sandbox() - out, err = capsys.readouterr() - assert "year" in out - assert "gender" in out - assert "name" in out - - assert any( - # An anonymous table is used because this sample reads from query results. - ("Started reading table" in message and "BQ Storage API session" in message) - for message in caplog.messages - ) diff --git a/bigquery/samples/tests/test_get_dataset.py b/bigquery/samples/tests/test_get_dataset.py deleted file mode 100644 index 3afdb00d39bd..000000000000 --- a/bigquery/samples/tests/test_get_dataset.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import get_dataset - - -def test_get_dataset(capsys, dataset_id): - - get_dataset.get_dataset(dataset_id) - out, err = capsys.readouterr() - assert dataset_id in out diff --git a/bigquery/samples/tests/test_get_table.py b/bigquery/samples/tests/test_get_table.py deleted file mode 100644 index 8bbd0681b584..000000000000 --- a/bigquery/samples/tests/test_get_table.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from google.cloud import bigquery - -from .. import get_table - - -def test_get_table(capsys, random_table_id, client): - - schema = [ - bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"), - bigquery.SchemaField("age", "INTEGER", mode="REQUIRED"), - ] - - table = bigquery.Table(random_table_id, schema) - table.description = "Sample Table" - table = client.create_table(table) - - get_table.get_table(random_table_id) - out, err = capsys.readouterr() - assert "Got table '{}'.".format(random_table_id) in out - assert "full_name" in out - assert "Table description: Sample Table" in out - assert "Table has 0 rows" in out - client.delete_table(table, not_found_ok=True) diff --git a/bigquery/samples/tests/test_list_datasets.py b/bigquery/samples/tests/test_list_datasets.py deleted file mode 100644 index 1610d0e4a3ba..000000000000 --- a/bigquery/samples/tests/test_list_datasets.py +++ /dev/null @@ -1,21 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import list_datasets - - -def test_list_datasets(capsys, dataset_id, client): - list_datasets.list_datasets() - out, err = capsys.readouterr() - assert "Datasets in project {}:".format(client.project) in out diff --git a/bigquery/samples/tests/test_list_datasets_by_label.py b/bigquery/samples/tests/test_list_datasets_by_label.py deleted file mode 100644 index 5b375f4f4ee5..000000000000 --- a/bigquery/samples/tests/test_list_datasets_by_label.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import list_datasets_by_label - - -def test_list_datasets_by_label(capsys, dataset_id, client): - dataset = client.get_dataset(dataset_id) - dataset.labels = {"color": "green"} - dataset = client.update_dataset(dataset, ["labels"]) - list_datasets_by_label.list_datasets_by_label() - out, err = capsys.readouterr() - assert dataset_id in out diff --git a/bigquery/samples/tests/test_list_tables.py b/bigquery/samples/tests/test_list_tables.py deleted file mode 100644 index f9426aa53d21..000000000000 --- a/bigquery/samples/tests/test_list_tables.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import list_tables - - -def test_list_tables(capsys, dataset_id, table_id): - - list_tables.list_tables(dataset_id) - out, err = capsys.readouterr() - assert "Tables contained in '{}':".format(dataset_id) in out - assert table_id in out diff --git a/bigquery/samples/tests/test_load_table_dataframe.py b/bigquery/samples/tests/test_load_table_dataframe.py deleted file mode 100644 index 2286660469ff..000000000000 --- a/bigquery/samples/tests/test_load_table_dataframe.py +++ /dev/null @@ -1,72 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import pytest - -from .. import load_table_dataframe - - -pandas = pytest.importorskip("pandas") -pyarrow = pytest.importorskip("pyarrow") - - -def test_load_table_dataframe(capsys, client, random_table_id): - - table = load_table_dataframe.load_table_dataframe(random_table_id) - out, _ = capsys.readouterr() - expected_column_names = [ - "wikidata_id", - "title", - "release_year", - "length_minutes", - "release_date", - "dvd_release", - ] - assert "Loaded 4 rows and {} columns".format(len(expected_column_names)) in out - - column_names = [field.name for field in table.schema] - assert column_names == expected_column_names - column_types = [field.field_type for field in table.schema] - assert column_types == [ - "STRING", - "STRING", - "INTEGER", - "FLOAT", - "TIMESTAMP", - "TIMESTAMP", - ] - - df = client.list_rows(table).to_dataframe() - df.sort_values("release_year", inplace=True) - assert df["title"].tolist() == [ - u"And Now for Something Completely Different", - u"Monty Python and the Holy Grail", - u"Life of Brian", - u"The Meaning of Life", - ] - assert df["release_year"].tolist() == [1971, 1975, 1979, 1983] - assert df["length_minutes"].tolist() == [88.0, 91.5, 94.25, 112.5] - assert df["release_date"].tolist() == [ - pandas.Timestamp("1971-09-28T22:59:07+00:00"), - pandas.Timestamp("1975-04-09T22:59:02+00:00"), - pandas.Timestamp("1979-08-18T03:59:05+00:00"), - pandas.Timestamp("1983-05-09T11:00:00+00:00"), - ] - assert df["dvd_release"].tolist() == [ - pandas.Timestamp("2003-10-22T10:00:00+00:00"), - pandas.Timestamp("2002-07-16T09:00:00+00:00"), - pandas.Timestamp("2008-01-14T08:00:00+00:00"), - pandas.Timestamp("2002-01-22T07:00:00+00:00"), - ] - assert df["wikidata_id"].tolist() == [u"Q16403", u"Q25043", u"Q24953", u"Q24980"] diff --git a/bigquery/samples/tests/test_load_table_file.py b/bigquery/samples/tests/test_load_table_file.py deleted file mode 100644 index a7ebe768201a..000000000000 --- a/bigquery/samples/tests/test_load_table_file.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os - -from google.cloud import bigquery - -from .. import load_table_file - - -def test_load_table_file(capsys, random_table_id, client): - - samples_test_dir = os.path.abspath(os.path.dirname(__file__)) - file_path = os.path.join( - samples_test_dir, "..", "..", "tests", "data", "people.csv" - ) - table = load_table_file.load_table_file(file_path, random_table_id) - - out, _ = capsys.readouterr() - assert "Loaded 2 rows and 2 columns" in out - - rows = list(client.list_rows(table)) # Make an API request. - assert len(rows) == 2 - # Order is not preserved, so compare individually - row1 = bigquery.Row(("Wylma Phlyntstone", 29), {"full_name": 0, "age": 1}) - assert row1 in rows - row2 = bigquery.Row(("Phred Phlyntstone", 32), {"full_name": 0, "age": 1}) - assert row2 in rows diff --git a/bigquery/samples/tests/test_load_table_uri_avro.py b/bigquery/samples/tests/test_load_table_uri_avro.py deleted file mode 100644 index 0be29d6b35ae..000000000000 --- a/bigquery/samples/tests/test_load_table_uri_avro.py +++ /dev/null @@ -1,21 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import load_table_uri_avro - - -def test_load_table_uri_avro(capsys, random_table_id): - load_table_uri_avro.load_table_uri_avro(random_table_id) - out, _ = capsys.readouterr() - assert "Loaded 50 rows." in out diff --git a/bigquery/samples/tests/test_load_table_uri_cmek.py b/bigquery/samples/tests/test_load_table_uri_cmek.py deleted file mode 100644 index c15dad9a754f..000000000000 --- a/bigquery/samples/tests/test_load_table_uri_cmek.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import load_table_uri_cmek - - -def test_load_table_uri_cmek(capsys, random_table_id, kms_key_name): - - load_table_uri_cmek.load_table_uri_cmek(random_table_id, kms_key_name) - out, _ = capsys.readouterr() - assert "A table loaded with encryption configuration key" in out diff --git a/bigquery/samples/tests/test_load_table_uri_csv.py b/bigquery/samples/tests/test_load_table_uri_csv.py deleted file mode 100644 index fbcc69358466..000000000000 --- a/bigquery/samples/tests/test_load_table_uri_csv.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import load_table_uri_csv - - -def test_load_table_uri_csv(capsys, random_table_id): - - load_table_uri_csv.load_table_uri_csv(random_table_id) - out, _ = capsys.readouterr() - assert "Loaded 50 rows." in out diff --git a/bigquery/samples/tests/test_load_table_uri_json.py b/bigquery/samples/tests/test_load_table_uri_json.py deleted file mode 100644 index e054cb07ac3f..000000000000 --- a/bigquery/samples/tests/test_load_table_uri_json.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import load_table_uri_json - - -def test_load_table_uri_json(capsys, random_table_id): - - load_table_uri_json.load_table_uri_json(random_table_id) - out, _ = capsys.readouterr() - assert "Loaded 50 rows." in out diff --git a/bigquery/samples/tests/test_load_table_uri_orc.py b/bigquery/samples/tests/test_load_table_uri_orc.py deleted file mode 100644 index 96dc72022b0a..000000000000 --- a/bigquery/samples/tests/test_load_table_uri_orc.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import load_table_uri_orc - - -def test_load_table_uri_orc(capsys, random_table_id): - - load_table_uri_orc.load_table_uri_orc(random_table_id) - out, _ = capsys.readouterr() - assert "Loaded 50 rows." in out diff --git a/bigquery/samples/tests/test_load_table_uri_parquet.py b/bigquery/samples/tests/test_load_table_uri_parquet.py deleted file mode 100644 index 81ba3fcef604..000000000000 --- a/bigquery/samples/tests/test_load_table_uri_parquet.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import load_table_uri_parquet - - -def test_load_table_uri_json(capsys, random_table_id): - - load_table_uri_parquet.load_table_uri_parquet(random_table_id) - out, _ = capsys.readouterr() - assert "Loaded 50 rows." in out diff --git a/bigquery/samples/tests/test_model_samples.py b/bigquery/samples/tests/test_model_samples.py deleted file mode 100644 index ebefad846642..000000000000 --- a/bigquery/samples/tests/test_model_samples.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import delete_model -from .. import get_model -from .. import list_models -from .. import update_model - - -def test_model_samples(capsys, dataset_id, model_id): - """Since creating a model is a long operation, test all model samples in - the same test, following a typical end-to-end flow. - """ - get_model.get_model(model_id) - out, err = capsys.readouterr() - assert model_id in out - - list_models.list_models(dataset_id) - out, err = capsys.readouterr() - assert "Models contained in '{}':".format(dataset_id) in out - - update_model.update_model(model_id) - out, err = capsys.readouterr() - assert "This model was modified from a Python program." in out - - delete_model.delete_model(model_id) - out, err = capsys.readouterr() - assert "Deleted model '{}'.".format(model_id) in out diff --git a/bigquery/samples/tests/test_query_external_gcs_temporary_table.py b/bigquery/samples/tests/test_query_external_gcs_temporary_table.py deleted file mode 100644 index 022b327be21b..000000000000 --- a/bigquery/samples/tests/test_query_external_gcs_temporary_table.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import query_external_gcs_temporary_table - - -def test_query_external_gcs_temporary_table(capsys,): - - query_external_gcs_temporary_table.query_external_gcs_temporary_table() - out, err = capsys.readouterr() - assert "There are 4 states with names starting with W." in out diff --git a/bigquery/samples/tests/test_query_external_sheets_permanent_table.py b/bigquery/samples/tests/test_query_external_sheets_permanent_table.py deleted file mode 100644 index a00930cad881..000000000000 --- a/bigquery/samples/tests/test_query_external_sheets_permanent_table.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import query_external_sheets_permanent_table - - -def test_query_external_sheets_permanent_table(capsys, dataset_id): - - query_external_sheets_permanent_table.query_external_sheets_permanent_table( - dataset_id - ) - out, err = capsys.readouterr() - assert "There are 2 states with names starting with W in the selected range." in out diff --git a/bigquery/samples/tests/test_query_external_sheets_temporary_table.py b/bigquery/samples/tests/test_query_external_sheets_temporary_table.py deleted file mode 100644 index 8274787cb644..000000000000 --- a/bigquery/samples/tests/test_query_external_sheets_temporary_table.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import query_external_sheets_temporary_table - - -def test_query_external_sheets_temporary_table(capsys): - - query_external_sheets_temporary_table.query_external_sheets_temporary_table() - out, err = capsys.readouterr() - assert "There are 2 states with names starting with W in the selected range." in out diff --git a/bigquery/samples/tests/test_query_no_cache.py b/bigquery/samples/tests/test_query_no_cache.py deleted file mode 100644 index df17d0d0b04f..000000000000 --- a/bigquery/samples/tests/test_query_no_cache.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import re - -from .. import query_no_cache - - -def test_query_no_cache(capsys,): - - query_no_cache.query_no_cache() - out, err = capsys.readouterr() - assert re.search(r"(Row[\w(){}:', ]+)$", out) diff --git a/bigquery/samples/tests/test_query_pagination.py b/bigquery/samples/tests/test_query_pagination.py deleted file mode 100644 index 7ab049c8ce7c..000000000000 --- a/bigquery/samples/tests/test_query_pagination.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import query_pagination - - -def test_query_pagination(capsys,): - - query_pagination.query_pagination() - out, _ = capsys.readouterr() - assert "The query data:" in out - assert "name=James, count=4942431" in out diff --git a/bigquery/samples/tests/test_query_script.py b/bigquery/samples/tests/test_query_script.py deleted file mode 100644 index 037664d369ee..000000000000 --- a/bigquery/samples/tests/test_query_script.py +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import query_script - - -def test_query_script(capsys,): - - query_script.query_script() - out, _ = capsys.readouterr() - assert "Script created 2 child jobs." in out - assert ( - "53 of the top 100 names from year 2000 also appear in Shakespeare's works." - in out - ) - assert "produced 53 row(s)" in out - assert "produced 1 row(s)" in out diff --git a/bigquery/samples/tests/test_query_to_arrow.py b/bigquery/samples/tests/test_query_to_arrow.py deleted file mode 100644 index 77d3f7130305..000000000000 --- a/bigquery/samples/tests/test_query_to_arrow.py +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import pyarrow - -from .. import query_to_arrow - - -def test_query_to_arrow(capsys,): - - arrow_table = query_to_arrow.query_to_arrow() - out, err = capsys.readouterr() - assert "Downloaded 8 rows, 2 columns." in out - arrow_schema = arrow_table.schema - assert arrow_schema.names == ["race", "participant"] - assert pyarrow.types.is_string(arrow_schema.types[0]) - assert pyarrow.types.is_struct(arrow_schema.types[1]) diff --git a/bigquery/samples/tests/test_routine_samples.py b/bigquery/samples/tests/test_routine_samples.py deleted file mode 100644 index a4467c59a896..000000000000 --- a/bigquery/samples/tests/test_routine_samples.py +++ /dev/null @@ -1,101 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from google.cloud import bigquery -from google.cloud import bigquery_v2 - - -def test_create_routine(capsys, random_routine_id): - from .. import create_routine - - create_routine.create_routine(random_routine_id) - out, err = capsys.readouterr() - assert "Created routine {}".format(random_routine_id) in out - - -def test_create_routine_ddl(capsys, random_routine_id, client): - from .. import create_routine_ddl - - create_routine_ddl.create_routine_ddl(random_routine_id) - routine = client.get_routine(random_routine_id) - out, err = capsys.readouterr() - - assert "Created routine {}".format(random_routine_id) in out - return routine - assert routine.type_ == "SCALAR_FUNCTION" - assert routine.language == "SQL" - expected_arguments = [ - bigquery.RoutineArgument( - name="arr", - data_type=bigquery_v2.types.StandardSqlDataType( - type_kind=bigquery_v2.enums.StandardSqlDataType.TypeKind.ARRAY, - array_element_type=bigquery_v2.types.StandardSqlDataType( - type_kind=bigquery_v2.enums.StandardSqlDataType.TypeKind.STRUCT, - struct_type=bigquery_v2.types.StandardSqlStructType( - fields=[ - bigquery_v2.types.StandardSqlField( - name="name", - type=bigquery_v2.types.StandardSqlDataType( - type_kind=bigquery_v2.enums.StandardSqlDataType.TypeKind.STRING - ), - ), - bigquery_v2.types.StandardSqlField( - name="val", - type=bigquery_v2.types.StandardSqlDataType( - type_kind=bigquery_v2.enums.StandardSqlDataType.TypeKind.INT64 - ), - ), - ] - ), - ), - ), - ) - ] - assert routine.arguments == expected_arguments - - -def test_list_routines(capsys, dataset_id, routine_id): - from .. import list_routines - - list_routines.list_routines(dataset_id) - out, err = capsys.readouterr() - assert "Routines contained in dataset {}:".format(dataset_id) in out - assert routine_id in out - - -def test_get_routine(capsys, routine_id): - from .. import get_routine - - get_routine.get_routine(routine_id) - out, err = capsys.readouterr() - assert "Routine '{}':".format(routine_id) in out - assert "Type: 'SCALAR_FUNCTION'" in out - assert "Language: 'SQL'" in out - assert "Name: 'x'" in out - assert "Type: 'type_kind: INT64\n'" in out - - -def test_delete_routine(capsys, routine_id): - from .. import delete_routine - - delete_routine.delete_routine(routine_id) - out, err = capsys.readouterr() - assert "Deleted routine {}.".format(routine_id) in out - - -def test_update_routine(routine_id): - from .. import update_routine - - routine = update_routine.update_routine(routine_id) - assert routine.body == "x * 4" diff --git a/bigquery/samples/tests/test_table_exists.py b/bigquery/samples/tests/test_table_exists.py deleted file mode 100644 index d1f579a64528..000000000000 --- a/bigquery/samples/tests/test_table_exists.py +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from google.cloud import bigquery - -from .. import table_exists - - -def test_table_exists(capsys, random_table_id, client): - - table_exists.table_exists(random_table_id) - out, err = capsys.readouterr() - assert "Table {} is not found.".format(random_table_id) in out - table = bigquery.Table(random_table_id) - table = client.create_table(table) - table_exists.table_exists(random_table_id) - out, err = capsys.readouterr() - assert "Table {} already exists.".format(random_table_id) in out diff --git a/bigquery/samples/tests/test_table_insert_rows.py b/bigquery/samples/tests/test_table_insert_rows.py deleted file mode 100644 index 72b51df9c485..000000000000 --- a/bigquery/samples/tests/test_table_insert_rows.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from google.cloud import bigquery - -from .. import table_insert_rows - - -def test_table_insert_rows(capsys, random_table_id, client): - - schema = [ - bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"), - bigquery.SchemaField("age", "INTEGER", mode="REQUIRED"), - ] - - table = bigquery.Table(random_table_id, schema=schema) - table = client.create_table(table) - - table_insert_rows.table_insert_rows(random_table_id) - out, err = capsys.readouterr() - assert "New rows have been added." in out diff --git a/bigquery/samples/tests/test_table_insert_rows_explicit_none_insert_ids.py b/bigquery/samples/tests/test_table_insert_rows_explicit_none_insert_ids.py deleted file mode 100644 index c6199894a72c..000000000000 --- a/bigquery/samples/tests/test_table_insert_rows_explicit_none_insert_ids.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from google.cloud import bigquery - -from .. import table_insert_rows_explicit_none_insert_ids as mut - - -def test_table_insert_rows_explicit_none_insert_ids(capsys, random_table_id, client): - - schema = [ - bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"), - bigquery.SchemaField("age", "INTEGER", mode="REQUIRED"), - ] - - table = bigquery.Table(random_table_id, schema=schema) - table = client.create_table(table) - - mut.table_insert_rows_explicit_none_insert_ids(random_table_id) - out, err = capsys.readouterr() - assert "New rows have been added." in out diff --git a/bigquery/samples/tests/test_undelete_table.py b/bigquery/samples/tests/test_undelete_table.py deleted file mode 100644 index a070abdbd36b..000000000000 --- a/bigquery/samples/tests/test_undelete_table.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import undelete_table - - -def test_undelete_table(capsys, table_with_schema_id, random_table_id): - undelete_table.undelete_table(table_with_schema_id, random_table_id) - out, _ = capsys.readouterr() - assert ( - "Copied data from deleted table {} to {}".format( - table_with_schema_id, random_table_id - ) - in out - ) diff --git a/bigquery/samples/tests/test_update_dataset_access.py b/bigquery/samples/tests/test_update_dataset_access.py deleted file mode 100644 index 4c0aa835baf0..000000000000 --- a/bigquery/samples/tests/test_update_dataset_access.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import update_dataset_access - - -def test_update_dataset_access(capsys, dataset_id): - - update_dataset_access.update_dataset_access(dataset_id) - out, err = capsys.readouterr() - assert ( - "Updated dataset '{}' with modified user permissions.".format(dataset_id) in out - ) diff --git a/bigquery/samples/tests/test_update_dataset_default_partition_expiration.py b/bigquery/samples/tests/test_update_dataset_default_partition_expiration.py deleted file mode 100644 index a5a8e6b5202c..000000000000 --- a/bigquery/samples/tests/test_update_dataset_default_partition_expiration.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import update_dataset_default_partition_expiration - - -def test_update_dataset_default_partition_expiration(capsys, dataset_id): - - ninety_days_ms = 90 * 24 * 60 * 60 * 1000 # in milliseconds - - update_dataset_default_partition_expiration.update_dataset_default_partition_expiration( - dataset_id - ) - out, _ = capsys.readouterr() - assert ( - "Updated dataset {} with new default partition expiration {}".format( - dataset_id, ninety_days_ms - ) - in out - ) diff --git a/bigquery/samples/tests/test_update_dataset_default_table_expiration.py b/bigquery/samples/tests/test_update_dataset_default_table_expiration.py deleted file mode 100644 index b0f7013228e6..000000000000 --- a/bigquery/samples/tests/test_update_dataset_default_table_expiration.py +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import update_dataset_default_table_expiration - - -def test_update_dataset_default_table_expiration(capsys, dataset_id): - - one_day_ms = 24 * 60 * 60 * 1000 # in milliseconds - - update_dataset_default_table_expiration.update_dataset_default_table_expiration( - dataset_id - ) - out, err = capsys.readouterr() - assert ( - "Updated dataset {} with new expiration {}".format(dataset_id, one_day_ms) - in out - ) diff --git a/bigquery/samples/tests/test_update_dataset_description.py b/bigquery/samples/tests/test_update_dataset_description.py deleted file mode 100644 index e4ff586c7bc2..000000000000 --- a/bigquery/samples/tests/test_update_dataset_description.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import update_dataset_description - - -def test_update_dataset_description(capsys, dataset_id): - - update_dataset_description.update_dataset_description(dataset_id) - out, err = capsys.readouterr() - assert "Updated description." in out diff --git a/bigquery/samples/tests/test_update_table_require_partition_filter.py b/bigquery/samples/tests/test_update_table_require_partition_filter.py deleted file mode 100644 index 7e9ca6f2b44f..000000000000 --- a/bigquery/samples/tests/test_update_table_require_partition_filter.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from google.cloud import bigquery - -from .. import update_table_require_partition_filter - - -def test_update_table_require_partition_filter(capsys, random_table_id, client): - - # Make a partitioned table. - schema = [bigquery.SchemaField("transaction_timestamp", "TIMESTAMP")] - table = bigquery.Table(random_table_id, schema=schema) - table.time_partitioning = bigquery.TimePartitioning(field="transaction_timestamp") - table = client.create_table(table) - - update_table_require_partition_filter.update_table_require_partition_filter( - random_table_id - ) - out, _ = capsys.readouterr() - assert ( - "Updated table '{}' with require_partition_filter=True".format(random_table_id) - in out - ) diff --git a/bigquery/samples/undelete_table.py b/bigquery/samples/undelete_table.py deleted file mode 100644 index 18b15801ffee..000000000000 --- a/bigquery/samples/undelete_table.py +++ /dev/null @@ -1,66 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from google.api_core import datetime_helpers - - -def undelete_table(table_id, recovered_table_id): - # [START bigquery_undelete_table] - import time - - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Choose a table to recover. - # table_id = "your-project.your_dataset.your_table" - - # TODO(developer): Choose a new table ID for the recovered table data. - # recovery_table_id = "your-project.your_dataset.your_table_recovered" - - # TODO(developer): Choose an appropriate snapshot point as epoch - # milliseconds. For this example, we choose the current time as we're about - # to delete the table immediately afterwards. - snapshot_epoch = int(time.time() * 1000) - - # [START_EXCLUDE] - # Due to very short lifecycle of the table, ensure we're not picking a time - # prior to the table creation due to time drift between backend and client. - table = client.get_table(table_id) - created_epoch = datetime_helpers.to_milliseconds(table.created) - if created_epoch > snapshot_epoch: - snapshot_epoch = created_epoch - # [END_EXCLUDE] - - # "Accidentally" delete the table. - client.delete_table(table_id) # Make an API request. - - # Construct the restore-from table ID using a snapshot decorator. - snapshot_table_id = "{}@{}".format(table_id, snapshot_epoch) - - # Construct and run a copy job. - job = client.copy_table( - snapshot_table_id, - recovered_table_id, - # Must match the source and destination tables location. - location="US", - ) # Make an API request. - - job.result() # Wait for the job to complete. - - print( - "Copied data from deleted table {} to {}".format(table_id, recovered_table_id) - ) - # [END bigquery_undelete_table] diff --git a/bigquery/samples/update_dataset_access.py b/bigquery/samples/update_dataset_access.py deleted file mode 100644 index 6e844cc90799..000000000000 --- a/bigquery/samples/update_dataset_access.py +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def update_dataset_access(dataset_id): - - # [START bigquery_update_dataset_access] - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set dataset_id to the ID of the dataset to fetch. - # dataset_id = 'your-project.your_dataset' - - dataset = client.get_dataset(dataset_id) # Make an API request. - - entry = bigquery.AccessEntry( - role="READER", - entity_type="userByEmail", - entity_id="sample.bigquery.dev@gmail.com", - ) - - entries = list(dataset.access_entries) - entries.append(entry) - dataset.access_entries = entries - - dataset = client.update_dataset(dataset, ["access_entries"]) # Make an API request. - - full_dataset_id = "{}.{}".format(dataset.project, dataset.dataset_id) - print( - "Updated dataset '{}' with modified user permissions.".format(full_dataset_id) - ) - # [END bigquery_update_dataset_access] diff --git a/bigquery/samples/update_dataset_default_partition_expiration.py b/bigquery/samples/update_dataset_default_partition_expiration.py deleted file mode 100644 index 18cfb92db9b4..000000000000 --- a/bigquery/samples/update_dataset_default_partition_expiration.py +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def update_dataset_default_partition_expiration(dataset_id): - - # [START bigquery_update_dataset_partition_expiration] - - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set dataset_id to the ID of the dataset to fetch. - # dataset_id = 'your-project.your_dataset' - - dataset = client.get_dataset(dataset_id) # Make an API request. - - # Set the default partition expiration (applies to new tables, only) in - # milliseconds. This example sets the default expiration to 90 days. - dataset.default_partition_expiration_ms = 90 * 24 * 60 * 60 * 1000 - - dataset = client.update_dataset( - dataset, ["default_partition_expiration_ms"] - ) # Make an API request. - - print( - "Updated dataset {}.{} with new default partition expiration {}".format( - dataset.project, dataset.dataset_id, dataset.default_partition_expiration_ms - ) - ) - # [END bigquery_update_dataset_partition_expiration] diff --git a/bigquery/samples/update_dataset_default_table_expiration.py b/bigquery/samples/update_dataset_default_table_expiration.py deleted file mode 100644 index b7e5cea9b20d..000000000000 --- a/bigquery/samples/update_dataset_default_table_expiration.py +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def update_dataset_default_table_expiration(dataset_id): - - # [START bigquery_update_dataset_expiration] - - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set dataset_id to the ID of the dataset to fetch. - # dataset_id = 'your-project.your_dataset' - - dataset = client.get_dataset(dataset_id) # Make an API request. - dataset.default_table_expiration_ms = 24 * 60 * 60 * 1000 # In milliseconds. - - dataset = client.update_dataset( - dataset, ["default_table_expiration_ms"] - ) # Make an API request. - - full_dataset_id = "{}.{}".format(dataset.project, dataset.dataset_id) - print( - "Updated dataset {} with new expiration {}".format( - full_dataset_id, dataset.default_table_expiration_ms - ) - ) - # [END bigquery_update_dataset_expiration] diff --git a/bigquery/samples/update_dataset_description.py b/bigquery/samples/update_dataset_description.py deleted file mode 100644 index 0732b1c618e8..000000000000 --- a/bigquery/samples/update_dataset_description.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def update_dataset_description(dataset_id): - - # [START bigquery_update_dataset_description] - - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set dataset_id to the ID of the dataset to fetch. - # dataset_id = 'your-project.your_dataset' - - dataset = client.get_dataset(dataset_id) # Make an API request. - dataset.description = "Updated description." - dataset = client.update_dataset(dataset, ["description"]) # Make an API request. - - full_dataset_id = "{}.{}".format(dataset.project, dataset.dataset_id) - print( - "Updated dataset '{}' with description '{}'.".format( - full_dataset_id, dataset.description - ) - ) - # [END bigquery_update_dataset_description] diff --git a/bigquery/samples/update_model.py b/bigquery/samples/update_model.py deleted file mode 100644 index db262d8cc43c..000000000000 --- a/bigquery/samples/update_model.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def update_model(model_id): - """Sample ID: go/samples-tracker/1533""" - - # [START bigquery_update_model_description] - - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set model_id to the ID of the model to fetch. - # model_id = 'your-project.your_dataset.your_model' - - model = client.get_model(model_id) # Make an API request. - model.description = "This model was modified from a Python program." - model = client.update_model(model, ["description"]) # Make an API request. - - full_model_id = "{}.{}.{}".format(model.project, model.dataset_id, model.model_id) - print( - "Updated model '{}' with description '{}'.".format( - full_model_id, model.description - ) - ) - # [END bigquery_update_model_description] diff --git a/bigquery/samples/update_routine.py b/bigquery/samples/update_routine.py deleted file mode 100644 index 61c6855b5041..000000000000 --- a/bigquery/samples/update_routine.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def update_routine(routine_id): - - # [START bigquery_update_routine] - - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set the fully-qualified ID for the routine. - # routine_id = "my-project.my_dataset.my_routine" - - routine = client.get_routine(routine_id) - - routine.body = "x * 4" - - routine = client.update_routine( - routine, - [ - "body", - # Due to a limitation of the API, - # all fields are required, not just - # those that have been updated. - "arguments", - "language", - "type_", - "return_type", - ], - ) # Make an API request. - # [END bigquery_update_routine] - return routine diff --git a/bigquery/samples/update_table_require_partition_filter.py b/bigquery/samples/update_table_require_partition_filter.py deleted file mode 100644 index cf1d532774b2..000000000000 --- a/bigquery/samples/update_table_require_partition_filter.py +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def update_table_require_partition_filter(table_id): - - # [START bigquery_update_table_require_partition_filter] - - from google.cloud import bigquery - - # Construct a BigQuery client object. - client = bigquery.Client() - - # TODO(developer): Set table_id to the ID of the model to fetch. - # table_id = 'your-project.your_dataset.your_table' - - table = client.get_table(table_id) # Make an API request. - table.require_partition_filter = True - table = client.update_table(table, ["require_partition_filter"]) - - # View table properties - print( - "Updated table '{}.{}.{}' with require_partition_filter={}.".format( - table.project, - table.dataset_id, - table.table_id, - table.require_partition_filter, - ) - ) - # [END bigquery_update_table_require_partition_filter] diff --git a/bigquery/setup.cfg b/bigquery/setup.cfg deleted file mode 100644 index 2a9acf13daa9..000000000000 --- a/bigquery/setup.cfg +++ /dev/null @@ -1,2 +0,0 @@ -[bdist_wheel] -universal = 1 diff --git a/bigquery/setup.py b/bigquery/setup.py deleted file mode 100644 index 378c4fc1b4ce..000000000000 --- a/bigquery/setup.py +++ /dev/null @@ -1,121 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import io -import os - -import setuptools - - -# Package metadata. - -name = "google-cloud-bigquery" -description = "Google BigQuery API client library" -version = "1.24.0" -# Should be one of: -# 'Development Status :: 3 - Alpha' -# 'Development Status :: 4 - Beta' -# 'Development Status :: 5 - Production/Stable' -release_status = "Development Status :: 5 - Production/Stable" -dependencies = [ - 'enum34; python_version < "3.4"', - "google-auth >= 1.9.0, < 2.0dev", - "google-api-core >= 1.15.0, < 2.0dev", - "google-cloud-core >= 1.1.0, < 2.0dev", - "google-resumable-media >= 0.5.0, < 0.6dev", - "protobuf >= 3.6.0", - "six >=1.13.0,< 2.0.0dev", -] -extras = { - "bqstorage": [ - "google-cloud-bigquery-storage >= 0.6.0, <2.0.0dev", - # Bad Linux release for 0.14.0. - # https://issues.apache.org/jira/browse/ARROW-5868 - "pyarrow>=0.13.0, != 0.14.0", - ], - "pandas": ["pandas>=0.17.1"], - # Exclude PyArrow dependency from Windows Python 2.7. - 'pyarrow: platform_system != "Windows" or python_version >= "3.4"': [ - # Bad Linux release for 0.14.0. - # https://issues.apache.org/jira/browse/ARROW-5868 - "pyarrow>=0.4.1, != 0.14.0" - ], - "tqdm": ["tqdm >= 4.0.0, <5.0.0dev"], - "fastparquet": ["fastparquet", "python-snappy"], -} - -all_extras = [] - -for extra in extras: - if extra == "fastparquet": - # Skip fastparquet from "all" because it is redundant with pyarrow and - # creates a dependency on pre-release versions of numpy. See: - # https://github.com/googleapis/google-cloud-python/issues/8549 - continue - all_extras.extend(extras[extra]) - -extras["all"] = all_extras - -# Setup boilerplate below this line. - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -# Only include packages under the 'google' namespace. Do not include tests, -# benchmarks, etc. -packages = [ - package for package in setuptools.find_packages() if package.startswith("google") -] - -# Determine which namespaces are needed. -namespaces = ["google"] -if "google.cloud" in packages: - namespaces.append("google.cloud") - - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url="https://github.com/GoogleCloudPlatform/google-cloud-python", - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.5", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - namespace_packages=namespaces, - install_requires=dependencies, - extras_require=extras, - python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*", - include_package_data=True, - zip_safe=False, -) diff --git a/bigquery/synth.metadata b/bigquery/synth.metadata deleted file mode 100644 index ef9fc79c57d3..000000000000 --- a/bigquery/synth.metadata +++ /dev/null @@ -1,770 +0,0 @@ -{ - "updateTime": "2020-01-29T13:17:11.693204Z", - "sources": [ - { - "generator": { - "name": "artman", - "version": "0.44.4", - "dockerImage": "googleapis/artman@sha256:19e945954fc960a4bdfee6cb34695898ab21a8cf0bac063ee39b91f00a1faec8" - } - }, - { - "git": { - "name": "googleapis", - "remote": "https://github.com/googleapis/googleapis.git", - "sha": "cf3b61102ed5f36b827bc82ec39be09525f018c8", - "internalRef": "292034635", - "log": "cf3b61102ed5f36b827bc82ec39be09525f018c8\n Fix to protos for v1p1beta1 release of Cloud Security Command Center\n\nPiperOrigin-RevId: 292034635\n\n4e1cfaa7c0fede9e65d64213ca3da1b1255816c0\nUpdate the public proto to support UTF-8 encoded id for CatalogService API, increase the ListCatalogItems deadline to 300s and some minor documentation change\n\nPiperOrigin-RevId: 292030970\n\n9c483584f8fd5a1b862ae07973f4cc7bb3e46648\nasset: add annotations to v1p1beta1\n\nPiperOrigin-RevId: 292009868\n\ne19209fac29731d0baf6d9ac23da1164f7bdca24\nAdd the google.rpc.context.AttributeContext message to the open source\ndirectories.\n\nPiperOrigin-RevId: 291999930\n\nae5662960573f279502bf98a108a35ba1175e782\noslogin API: move file level option on top of the file to avoid protobuf.js bug.\n\nPiperOrigin-RevId: 291990506\n\neba3897fff7c49ed85d3c47fc96fe96e47f6f684\nAdd cc_proto_library and cc_grpc_library targets for Spanner and IAM protos.\n\nPiperOrigin-RevId: 291988651\n\n8e981acfd9b97ea2f312f11bbaa7b6c16e412dea\nBeta launch for PersonDetection and FaceDetection features.\n\nPiperOrigin-RevId: 291821782\n\n994e067fae3b21e195f7da932b08fff806d70b5d\nasset: add annotations to v1p2beta1\n\nPiperOrigin-RevId: 291815259\n\n244e1d2c89346ca2e0701b39e65552330d68545a\nAdd Playable Locations service\n\nPiperOrigin-RevId: 291806349\n\n909f8f67963daf45dd88d020877fb9029b76788d\nasset: add annotations to v1beta2\n\nPiperOrigin-RevId: 291805301\n\n3c39a1d6e23c1ef63c7fba4019c25e76c40dfe19\nKMS: add file-level message for CryptoKeyPath, it is defined in gapic yaml but not\nin proto files.\n\nPiperOrigin-RevId: 291420695\n\nc6f3f350b8387f8d1b85ed4506f30187ebaaddc3\ncontaineranalysis: update v1beta1 and bazel build with annotations\n\nPiperOrigin-RevId: 291401900\n\n92887d74b44e4e636252b7b8477d0d2570cd82db\nfix: fix the location of grpc config file.\n\nPiperOrigin-RevId: 291396015\n\ne26cab8afd19d396b929039dac5d874cf0b5336c\nexpr: add default_host and method_signature annotations to CelService\n\nPiperOrigin-RevId: 291240093\n\n06093ae3952441c34ec176d1f7431b8765cec0be\nirm: fix v1alpha2 bazel build by adding missing proto imports\n\nPiperOrigin-RevId: 291227940\n\na8a2514af326e4673063f9a3c9d0ef1091c87e6c\nAdd proto annotation for cloud/irm API\n\nPiperOrigin-RevId: 291217859\n\n8d16f76de065f530d395a4c7eabbf766d6a120fd\nGenerate Memcache v1beta2 API protos and gRPC ServiceConfig files\n\nPiperOrigin-RevId: 291008516\n\n3af1dabd93df9a9f17bf3624d3b875c11235360b\ngrafeas: Add containeranalysis default_host to Grafeas service\n\nPiperOrigin-RevId: 290965849\n\nbe2663fa95e31cba67d0cd62611a6674db9f74b7\nfix(google/maps/roads): add missing opening bracket\n\nPiperOrigin-RevId: 290964086\n\nfacc26550a0af0696e0534bc9cae9df14275aa7c\nUpdating v2 protos with the latest inline documentation (in comments) and adding a per-service .yaml file.\n\nPiperOrigin-RevId: 290952261\n\ncda99c1f7dc5e4ca9b1caeae1dc330838cbc1461\nChange api_name to 'asset' for v1p1beta1\n\nPiperOrigin-RevId: 290800639\n\n94e9e90c303a820ce40643d9129e7f0d2054e8a1\nAdds Google Maps Road service\n\nPiperOrigin-RevId: 290795667\n\na3b23dcb2eaecce98c600c7d009451bdec52dbda\nrpc: new message ErrorInfo, other comment updates\n\nPiperOrigin-RevId: 290781668\n\n26420ef4e46c37f193c0fbe53d6ebac481de460e\nAdd proto definition for Org Policy v1.\n\nPiperOrigin-RevId: 290771923\n\n7f0dab8177cf371ae019a082e2512de7ac102888\nPublish Routes Preferred API v1 service definitions.\n\nPiperOrigin-RevId: 290326986\n\nad6e508d0728e1d1bca6e3f328cd562718cb772d\nFix: Qualify resource type references with \"jobs.googleapis.com/\"\n\nPiperOrigin-RevId: 290285762\n\n58e770d568a2b78168ddc19a874178fee8265a9d\ncts client library\n\nPiperOrigin-RevId: 290146169\n\naf9daa4c3b4c4a8b7133b81588dd9ffd37270af2\nAdd more programming language options to public proto\n\nPiperOrigin-RevId: 290144091\n\nd9f2bbf2df301ef84641d4cec7c828736a0bd907\ntalent: add missing resource.proto dep to Bazel build target\n\nPiperOrigin-RevId: 290143164\n\n3b3968237451d027b42471cd28884a5a1faed6c7\nAnnotate Talent API.\nAdd gRPC service config for retry.\nUpdate bazel file with google.api.resource dependency.\n\nPiperOrigin-RevId: 290125172\n\n0735b4b096872960568d1f366bfa75b7b0e1f1a3\nWeekly library update.\n\nPiperOrigin-RevId: 289939042\n\n8760d3d9a4543d7f9c0d1c7870aca08b116e4095\nWeekly library update.\n\nPiperOrigin-RevId: 289939020\n\n8607df842f782a901805187e02fff598145b0b0e\nChange Talent API timeout to 30s.\n\nPiperOrigin-RevId: 289912621\n\n908155991fe32570653bcb72ecfdcfc896642f41\nAdd Recommendations AI V1Beta1\n\nPiperOrigin-RevId: 289901914\n\n5c9a8c2bebd8b71aa66d1cc473edfaac837a2c78\nAdding no-arg method signatures for ListBillingAccounts and ListServices\n\nPiperOrigin-RevId: 289891136\n\n50b0e8286ac988b0593bd890eb31fef6ea2f5767\nlongrunning: add grpc service config and default_host annotation to operations.proto\n\nPiperOrigin-RevId: 289876944\n\n6cac27dabe51c54807b0401698c32d34998948a9\n Updating default deadline for Cloud Security Command Center's v1 APIs.\n\nPiperOrigin-RevId: 289875412\n\nd99df0d67057a233c711187e0689baa4f8e6333d\nFix: Correct spelling in C# namespace option\n\nPiperOrigin-RevId: 289709813\n\n2fa8d48165cc48e35b0c62e6f7bdade12229326c\nfeat: Publish Recommender v1 to GitHub.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289619243\n\n9118db63d1ab493a2e44a3b4973fde810a835c49\nfirestore: don't retry reads that fail with Aborted\n\nFor transaction reads that fail with ABORTED, we need to rollback and start a new transaction. Our current configuration makes it so that GAPIC retries ABORTED reads multiple times without making any progress. Instead, we should retry at the transaction level.\n\nPiperOrigin-RevId: 289532382\n\n1dbfd3fe4330790b1e99c0bb20beb692f1e20b8a\nFix bazel build\nAdd other langauges (Java was already there) for bigquery/storage/v1alpha2 api.\n\nPiperOrigin-RevId: 289519766\n\nc06599cdd7d11f8d3fd25f8d3249e5bb1a3d5d73\nInitial commit of google.cloud.policytroubleshooter API, The API helps in troubleshooting GCP policies. Refer https://cloud.google.com/iam/docs/troubleshooting-access for more information\n\nPiperOrigin-RevId: 289491444\n\nfce7d80fa16ea241e87f7bc33d68595422e94ecd\nDo not pass samples option for Artman config of recommender v1 API.\n\nPiperOrigin-RevId: 289477403\n\nef179e8c61436297e6bb124352e47e45c8c80cb1\nfix: Address missing Bazel dependency.\n\nBazel builds stopped working in 06ec6d5 because\nthe google/longrunning/operations.proto file took\nan import from google/api/client.proto, but that\nimport was not added to BUILD.bazel.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289446074\n\n8841655b242c84fd691d77d7bcf21b61044f01ff\nMigrate Data Labeling v1beta1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289446026\n\n06ec6d5d053fff299eaa6eaa38afdd36c5e2fc68\nAdd annotations to google.longrunning.v1\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289413169\n\n0480cf40be1d3cc231f4268a2fdb36a8dd60e641\nMigrate IAM Admin v1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289411084\n\n1017173e9adeb858587639af61889ad970c728b1\nSpecify a C# namespace for BigQuery Connection v1beta1\n\nPiperOrigin-RevId: 289396763\n\nb08714b378e8e5b0c4ecdde73f92c36d6303b4b6\nfix: Integrate latest proto-docs-plugin fix.\nFixes dialogflow v2\n\nPiperOrigin-RevId: 289189004\n\n51217a67e79255ee1f2e70a6a3919df082513327\nCreate BUILD file for recommender v1\n\nPiperOrigin-RevId: 289183234\n\nacacd87263c0a60e458561b8b8ce9f67c760552a\nGenerate recommender v1 API protos and gRPC ServiceConfig files\n\nPiperOrigin-RevId: 289177510\n\n9d2f7133b97720b1fa3601f6dcd30760ba6d8a1e\nFix kokoro build script\n\nPiperOrigin-RevId: 289166315\n\nc43a67530d2a47a0220cad20ca8de39b3fbaf2c5\ncloudtasks: replace missing RPC timeout config for v2beta2 and v2beta3\n\nPiperOrigin-RevId: 289162391\n\n4cefc229a9197236fc0adf02d69b71c0c5cf59de\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 289158456\n\n56f263fe959c50786dab42e3c61402d32d1417bd\nCatalog API: Adding config necessary to build client libraries\n\nPiperOrigin-RevId: 289149879\n\n4543762b23a57fc3c53d409efc3a9affd47b6ab3\nFix Bazel build\nbilling/v1 and dialogflow/v2 remain broken (not bazel-related issues).\nBilling has wrong configuration, dialogflow failure is caused by a bug in documentation plugin.\n\nPiperOrigin-RevId: 289140194\n\nc9dce519127b97e866ca133a01157f4ce27dcceb\nUpdate Bigtable docs\n\nPiperOrigin-RevId: 289114419\n\n802c5c5f2bf94c3facb011267d04e71942e0d09f\nMigrate DLP to proto annotations (but not GAPIC v2).\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289102579\n\n6357f30f2ec3cff1d8239d18b707ff9d438ea5da\nRemove gRPC configuration file that was in the wrong place.\n\nPiperOrigin-RevId: 289096111\n\n360a8792ed62f944109d7e22d613a04a010665b4\n Protos for v1p1beta1 release of Cloud Security Command Center\n\nPiperOrigin-RevId: 289011995\n\na79211c20c4f2807eec524d00123bf7c06ad3d6e\nRoll back containeranalysis v1 to GAPIC v1.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288999068\n\n9e60345ba603e03484a8aaa33ce5ffa19c1c652b\nPublish Routes Preferred API v1 proto definitions.\n\nPiperOrigin-RevId: 288941399\n\nd52885b642ad2aa1f42b132ee62dbf49a73e1e24\nMigrate the service management API to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288909426\n\n6ace586805c08896fef43e28a261337fcf3f022b\ncloudtasks: replace missing RPC timeout config\n\nPiperOrigin-RevId: 288783603\n\n51d906cabee4876b12497054b15b05d4a50ad027\nImport of Grafeas from Github.\n\nUpdate BUILD.bazel accordingly.\n\nPiperOrigin-RevId: 288783426\n\n5ef42bcd363ba0440f0ee65b3c80b499e9067ede\nMigrate Recommender v1beta1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288713066\n\n94f986afd365b7d7e132315ddcd43d7af0e652fb\nMigrate Container Analysis v1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288708382\n\n7a751a279184970d3b6ba90e4dd4d22a382a0747\nRemove Container Analysis v1alpha1 (nobody publishes it).\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288707473\n\n3c0d9c71242e70474b2b640e15bb0a435fd06ff0\nRemove specious annotation from BigQuery Data Transfer before\nanyone accidentally does anything that uses it.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288701604\n\n1af307a4764bd415ef942ac5187fa1def043006f\nMigrate BigQuery Connection to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288698681\n\n08b488e0660c59842a7dee0e3e2b65d9e3a514a9\nExposing cloud_catalog.proto (This API is already available through REST)\n\nPiperOrigin-RevId: 288625007\n\na613482977e11ac09fa47687a5d1b5a01efcf794\nUpdate the OS Login v1beta API description to render better in the UI.\n\nPiperOrigin-RevId: 288547940\n\n5e182b8d9943f1b17008d69d4c7e865dc83641a7\nUpdate the OS Login API description to render better in the UI.\n\nPiperOrigin-RevId: 288546443\n\ncb79155f596e0396dd900da93872be7066f6340d\nFix: Add a resource annotation for Agent\nFix: Correct the service name in annotations for Intent and SessionEntityType\n\nPiperOrigin-RevId: 288441307\n\nf7f6e9daec3315fd47cb638789bd8415bf4a27cc\nAdded cloud asset api v1p1beta1\n\nPiperOrigin-RevId: 288427239\n\nf2880f5b342c6345f3dcaad24fcb3c6ca9483654\nBilling account API: Adding config necessary to build client libraries\n\nPiperOrigin-RevId: 288351810\n\ndc250ffe071729f8f8bef9d6fd0fbbeb0254c666\nFix: Remove incorrect resource annotations in requests\n\nPiperOrigin-RevId: 288321208\n\n91ef2d9dd69807b0b79555f22566fb2d81e49ff9\nAdd GAPIC annotations to Cloud KMS (but do not migrate the GAPIC config yet).\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 287999179\n\n4d45a6399e9444fbddaeb1c86aabfde210723714\nRefreshing Cloud Billing API protos.\n\nThis exposes the following API methods:\n- UpdateBillingAccount\n- CreateBillingAccount\n- GetIamPolicy\n- SetIamPolicy\n- TestIamPermissions\n\nThere are also some new fields to support the management of sub-accounts.\n\nPiperOrigin-RevId: 287908369\n\nec285d3d230810147ebbf8d5b691ee90320c6d2d\nHide not yet implemented update_transforms message\n\nPiperOrigin-RevId: 287608953\n\na202fb3b91cd0e4231be878b0348afd17067cbe2\nBigQuery Storage Write API v1alpha2 clients. The service is enabled by whitelist only.\n\nPiperOrigin-RevId: 287379998\n\n650d7f1f8adb0cfaf37b3ce2241c3168f24efd4d\nUpdate Readme.md to match latest Bazel updates\n090d98aea20270e3be4b64240775588f7ce50ff8\ndocs(bigtable): Fix library release level listed in generated documentation\n\nPiperOrigin-RevId: 287308849\n\n2c28f646ca77b1d57550368be22aa388adde2e66\nfirestore: retry reads that fail with contention\n\nPiperOrigin-RevId: 287250665\n\nfd3091fbe9b2083cabc53dc50c78035658bfc4eb\nSync timeout in grpc config back to 10s for tasks API with github googelapis gapic config.\n\nPiperOrigin-RevId: 287207067\n\n49dd7d856a6f77c0cf7e5cb3334423e5089a9e8a\nbazel: Integrate bazel-2.0.0 compatibility fixes\n\nPiperOrigin-RevId: 287205644\n\n46e52fd64973e815cae61e78b14608fe7aa7b1df\nbazel: Integrate bazel build file generator\n\nTo generate/update BUILD.bazel files for any particular client or a batch of clients:\n```\nbazel run //:build_gen -- --src=google/example/library\n```\n\nPiperOrigin-RevId: 286958627\n\n1a380ea21dea9b6ac6ad28c60ad96d9d73574e19\nBigQuery Storage Read API v1beta2 clients.\n\nPiperOrigin-RevId: 286616241\n\n5f3f1d0f1c06b6475a17d995e4f7a436ca67ec9e\nAdd Artman config for secretmanager.\n\nPiperOrigin-RevId: 286598440\n\n50af0530730348f1e3697bf3c70261f7daaf2981\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 286491002\n\n91818800384f4ed26961aea268910b1a2ec58cc8\nFor Data Catalog API,\n1. Add support for marking a tag template field as required when creating a new tag template.\n2. Add support for updating a tag template field from required to optional.\n\nPiperOrigin-RevId: 286490262\n\nff4a2047b3d66f38c9b22197c370ed0d02fc0238\nWeekly library update.\n\nPiperOrigin-RevId: 286484215\n\n192c14029861752a911ed434fd6ee5b850517cd9\nWeekly library update.\n\nPiperOrigin-RevId: 286484165\n\nd9e328eaf790d4e4346fbbf32858160f497a03e0\nFix bazel build (versions 1.x)\n\nBump gapic-generator and resource names plugins to the latest version.\n\nPiperOrigin-RevId: 286469287\n\n0ca305403dcc50e31ad9477c9b6241ddfd2056af\nsecretmanager client package name option updates for java and go\n\nPiperOrigin-RevId: 286439553\n\nade4803e8a1a9e3efd249c8c86895d2f12eb2aaa\niam credentials: publish v1 protos containing annotations\n\nPiperOrigin-RevId: 286418383\n\n03e5708e5f8d1909dcb74b25520309e59ebf24be\nsecuritycenter: add missing proto deps for Bazel build\n\nPiperOrigin-RevId: 286417075\n\n8b991eb3eb82483b0ca1f1361a9c8e5b375c4747\nAdd secretmanager client package name options.\n\nPiperOrigin-RevId: 286415883\n\nd400cb8d45df5b2ae796b909f098a215b2275c1d\ndialogflow: add operation_info annotations to BatchUpdateEntities and BatchDeleteEntities.\n\nPiperOrigin-RevId: 286312673\n\nf2b25232db397ebd4f67eb901a2a4bc99f7cc4c6\nIncreased the default timeout time for all the Cloud Security Command Center client libraries.\n\nPiperOrigin-RevId: 286263771\n\ncb2f1eefd684c7efd56fd375cde8d4084a20439e\nExposing new Resource fields in the SecurityCenterProperties proto, added more comments to the filter logic for these Resource fields, and updated the response proto for the ListFindings API with the new Resource fields.\n\nPiperOrigin-RevId: 286263092\n\n73cebb20432b387c3d8879bb161b517d60cf2552\nUpdate v1beta2 clusters and jobs to include resource ids in GRPC header.\n\nPiperOrigin-RevId: 286261392\n\n1b4e453d51c0bd77e7b73896cdd8357d62768d83\nsecuritycenter: publish v1beta1 protos with annotations\n\nPiperOrigin-RevId: 286228860\n\na985eeda90ae98e8519d2320bee4dec148eb8ccb\nAdd default retry configurations for speech_v1p1beta1.\n\nSettings are copied from speech_gapic.legacy.yaml. The Python client library is being generated with timeouts that are too low. See https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2578\n\nPiperOrigin-RevId: 286191318\n\n3352100a15ede383f5ab3c34599f7a10a3d066fe\nMake importing rule with the same name (but different aliases) from different repositories possible.\n\nThis is needed to allow monolitic gapic-generator and microgenerators coexist during transition period.\n\nTo plug a microgenerator:\n\n1) Add corresponding rules bidnings under `switched_rules_by_language` in repository_rules.bzl:\n rules[\"go_gapic_library2\"] = _switch(\n go and grpc and gapic,\n \"@gapic_generator_go//rules_go_gapic/go_gapic.bzl\",\n \"go_gapic_library\",\n )\n\n2) Import microgenerator in WORKSPACE (the above example assumes that the generator was imported under name \"gapic_generator_go\").\n\n3) To migrate an API from monolith to micro generator (this is done per API and per language) modify the corresponding load statement in the API's BUILD.bazel file. For example, for the example above, to migrate to go microgenerator modify the go-specific load statement in BUILD.bazel file of a specific API (which you want to migrate) to the following:\n\nload(\n \"@com_google_googleapis_imports//:imports.bzl\",\n \"go_gapic_assembly_pkg\",\n go_gapic_library = \"go_gapic_library2\",\n \"go_proto_library\",\n \"go_test\",\n)\n\nPiperOrigin-RevId: 286065440\n\n6ad2bb13bc4b0f3f785517f0563118f6ca52ddfd\nUpdated v1beta1 protos for the client:\n- added support for GenericSignedAttestation which has a generic Signature\n- added support for CVSSv3 and WindowsDetail in Vulnerability\n- documentation updates\n\nPiperOrigin-RevId: 286008145\n\nfe1962e49999a832eed8162c45f23096336a9ced\nAdMob API v1 20191210\n\nBasic account info, mediation and network report available. See https://developers.google.com/admob/api/release-notes for more details.\n\nPiperOrigin-RevId: 285894502\n\n41fc1403738b61427f3a798ca9750ef47eb9c0f2\nAnnotate the required fields for the Monitoring Dashboards API\n\nPiperOrigin-RevId: 285824386\n\n27d0e0f202cbe91bf155fcf36824a87a5764ef1e\nRemove inappropriate resource_reference annotations for UpdateWorkflowTemplateRequest.template.\n\nPiperOrigin-RevId: 285802643\n\ne5c4d3a2b5b5bef0a30df39ebb27711dc98dee64\nAdd Artman BUILD.bazel file for the Monitoring Dashboards API\n\nPiperOrigin-RevId: 285445602\n\n2085a0d3c76180ee843cf2ecef2b94ca5266be31\nFix path in the artman config for Monitoring Dashboard API.\n\nPiperOrigin-RevId: 285233245\n\n2da72dfe71e4cca80902f9e3e125c40f02c2925b\nAdd Artman and GAPIC configs for the Monitoring Dashboards API.\n\nPiperOrigin-RevId: 285211544\n\n9f6eeebf1f30f51ffa02acea5a71680fe592348e\nAdd annotations to Dataproc v1. (Also forwarding comment changes from internal source control.)\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 285197557\n\n19c4589a3cb44b3679f7b3fba88365b3d055d5f8\noslogin: fix v1beta retry configuration\n\nPiperOrigin-RevId: 285013366\n\nee3f02926d0f8a0bc13f8d716581aad20f575751\nAdd Monitoring Dashboards API protocol buffers to Google Cloud Monitoring API.\n\nPiperOrigin-RevId: 284982647\n\ne47fdd266542386e5e7346697f90476e96dc7ee8\nbigquery datatransfer: Remove non-publicly available DataSourceService.\n\nPiperOrigin-RevId: 284822593\n\n6156f433fd1d9d5e4a448d6c6da7f637921d92ea\nAdds OSConfig v1beta protos and initial client library config\n\nPiperOrigin-RevId: 284799663\n\n6cc9499e225a4f6a5e34fe07e390f67055d7991c\nAdd datetime.proto to google/type/BUILD.bazel\n\nPiperOrigin-RevId: 284643689\n\nfe7dd5277e39ffe0075729c61e8d118d7527946d\nCosmetic changes to proto comment as part of testing internal release instructions.\n\nPiperOrigin-RevId: 284608712\n\n68d109adad726b89f74276d2f4b2ba6aac6ec04a\nAdd annotations to securitycenter v1, but leave GAPIC v1 in place.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 284580511\n\ndf8a1707a910fc17c71407a75547992fd1864c51\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 284568564\n\na69a974976221ce3bb944901b739418b85d6408c\nclient library update\n\nPiperOrigin-RevId: 284463979\n\na4adac3a12aca6e3a792c9c35ee850435fe7cf7e\nAdded DateTime, TimeZone, and Month proto files to google/type\n\nPiperOrigin-RevId: 284277770\n\ned5dec392906078db4f7745fe4f11d34dd401ae9\nchange common resources from message-level annotations to file-level annotations.\n\nPiperOrigin-RevId: 284236794\n\na00e2c575ef1b637667b4ebe96b8c228b2ddb273\nbigquerydatatransfer: change resource type TransferRun to Run to be consistent with gapic configs\nbigquerydatatransfer: add missing patterns for DataSource, TransferConfig and Run (to allow the location segment)\nbigquerydatatransfer: add file-level Parent resource type (to allow the location segement)\nbigquerydatatransfer: update grpc service config with correct retry delays\n\nPiperOrigin-RevId: 284234378\n\nb10e4547017ca529ac8d183e839f3c272e1c13de\ncloud asset: replace required fields for batchgetassethistory. Correct the time out duration.\n\nPiperOrigin-RevId: 284059574\n\n6690161e3dcc3367639a2ec10db67bf1cf392550\nAdd default retry configurations for speech_v1.\n\nSettings are copied from speech_gapic.legacy.yaml. The Python client library is being generated with timeouts that are too low. See https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2578\n\nPiperOrigin-RevId: 284035915\n\n9b2635ef91e114f0357bdb87652c26a8f59316d5\ncloudtasks: fix gapic v2 config\n\nPiperOrigin-RevId: 284020555\n\ne5676ba8b863951a8ed0bfd6046e1db38062743c\nReinstate resource name handling in GAPIC config for Asset v1.\n\nPiperOrigin-RevId: 283993903\n\nf337f7fb702c85833b7b6ca56afaf9a1bf32c096\nOSConfig AgentEndpoint: add LookupEffectiveGuestPolicy rpc\n\nPiperOrigin-RevId: 283989762\n\nc0ac9b55f2e2efd0ee525b3a6591a1b09330e55a\nInclude real time feed api into v1 version\n\nPiperOrigin-RevId: 283845474\n\n2427a3a0f6f4222315362d973d91a082a3a884a7\nfirestore admin: update v1 protos with annotations & retry config\n\nPiperOrigin-RevId: 283826605\n\n555e844dbe04af50a8f55fe1217fa9d39a0a80b2\nchore: publish retry configs for iam admin, cloud asset, and remoteworkers\n\nPiperOrigin-RevId: 283801979\n\n6311dc536668849142d1fe5cd9fc46da66d1f77f\nfirestore: update v1beta1 protos with annotations and retry config\n\nPiperOrigin-RevId: 283794315\n\nda0edeeef953b05eb1524d514d2e9842ac2df0fd\nfeat: publish several retry config files for client generation\n\nPiperOrigin-RevId: 283614497\n\n59a78053537e06190f02d0a7ffb792c34e185c5a\nRemoving TODO comment\n\nPiperOrigin-RevId: 283592535\n\n8463992271d162e2aff1d5da5b78db11f2fb5632\nFix bazel build\n\nPiperOrigin-RevId: 283589351\n\n3bfcb3d8df10dfdba58f864d3bdb8ccd69364669\nPublic client library for bebop_jobs_api_20191118_1_RC3 release.\n\nPiperOrigin-RevId: 283568877\n\n27ab0db61021d267c452b34d149161a7bf0d9f57\nfirestore: publish annotated protos and new retry config\n\nPiperOrigin-RevId: 283565148\n\n38dc36a2a43cbab4a2a9183a43dd0441670098a9\nfeat: add http annotations for operations calls\n\nPiperOrigin-RevId: 283384331\n\n366caab94906975af0e17822e372f1d34e319d51\ndatastore: add a legacy artman config for PHP generation\n\nPiperOrigin-RevId: 283378578\n\n82944da21578a53b74e547774cf62ed31a05b841\nMigrate container v1beta1 to GAPIC v2.\n\nPiperOrigin-RevId: 283342796\n\n584dcde5826dd11ebe222016b7b208a4e1196f4b\nRemove resource name annotation for UpdateKeyRequest.key, because it's the resource, not a name.\n\nPiperOrigin-RevId: 283167368\n\n6ab0171e3688bfdcf3dbc4056e2df6345e843565\nAdded resource annotation for Key message.\n\nPiperOrigin-RevId: 283066965\n\n86c1a2db1707a25cec7d92f8850cc915163ec3c3\nExpose Admin API methods for Key manipulation.\n\nPiperOrigin-RevId: 282988776\n\n3ddad085965896ffb205d44cb0c0616fe3def10b\nC++ targets: correct deps so they build, rename them from trace* to cloudtrace*\nto match the proto names.\n\nPiperOrigin-RevId: 282857635\n\ne9389365a971ad6457ceb9646c595e79dfdbdea5\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 282810797\n\ne42eaaa9abed3c4d63d64f790bd3191448dbbca6\nPut back C++ targets for cloud trace v2 api.\n\nPiperOrigin-RevId: 282803841\n\nd8896a3d8a191702a9e39f29cf4c2e16fa05f76d\nAdd initial BUILD.bazel for secretmanager.googleapis.com\n\nPiperOrigin-RevId: 282674885\n\n2cc56cb83ea3e59a6364e0392c29c9e23ad12c3a\nCreate sample for list recommendations\n\nPiperOrigin-RevId: 282665402\n\nf88e2ca65790e3b44bb3455e4779b41de1bf7136\nbump Go to ga\n\nPiperOrigin-RevId: 282651105\n\naac86d932b3cefd7d746f19def6935d16d6235e0\nDocumentation update. Add location_id in preparation for regionalization.\n\nPiperOrigin-RevId: 282586371\n\n5b501cd384f6b842486bd41acce77854876158e7\nMigrate Datastore Admin to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 282570874\n\n6a16d474d5be201b20a27646e2009c4dfde30452\nMigrate Datastore to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 282564329\n\n74bd9b95ac8c70b883814e4765a725cffe43d77c\nmark Go lib ga\n\nPiperOrigin-RevId: 282562558\n\nf7b3d434f44f6a77cf6c37cae5474048a0639298\nAdd secretmanager.googleapis.com protos\n\nPiperOrigin-RevId: 282546399\n\nc34a911aaa0660a45f5a556578f764f135e6e060\niot: bump Go GAPIC to GA release level\n\nPiperOrigin-RevId: 282494787\n\n79b7f1c5ba86859dbf70aa6cd546057c1002cdc0\nPut back C++ targets.\nPrevious change overrode custom C++ targets made by external teams. This PR puts those targets back.\n\nPiperOrigin-RevId: 282458292\n\n06a840781d2dc1b0a28e03e30fb4b1bfb0b29d1e\nPopulate BAZEL.build files for around 100 APIs (all APIs we publish) in all 7 langauges.\n\nPiperOrigin-RevId: 282449910\n\n777b580a046c4fa84a35e1d00658b71964120bb0\nCreate BUILD file for recommender v1beta1\n\nPiperOrigin-RevId: 282068850\n\n48b385b6ef71dfe2596490ea34c9a9a434e74243\nGenerate recommender v1beta1 gRPC ServiceConfig file\n\nPiperOrigin-RevId: 282067795\n\n8395b0f1435a4d7ce8737b3b55392627758bd20c\nfix: Set timeout to 25s, because Tasks fails for any deadline above 30s.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 282017295\n\n3ba7ddc4b2acf532bdfb0004ca26311053c11c30\nfix: Shift Ruby and PHP to legacy GAPIC YAMLs for back-compat.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 281852671\n\nad6f0c002194c3ec6c13d592d911d122d2293931\nRemove unneeded yaml files\n\nPiperOrigin-RevId: 281835839\n\n1f42588e4373750588152cdf6f747de1cadbcbef\nrefactor: Migrate Tasks beta 2 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 281769558\n\n902b51f2073e9958a2aba441f7f7ac54ea00966d\nrefactor: Migrate Tasks to GAPIC v2 (for real this time).\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 281769522\n\n17561f59970eede87f61ef6e9c322fa1198a2f4d\nMigrate Tasks Beta 3 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 281769519\n\nf95883b15a1ddd58eb7e3583fdefe7b00505faa3\nRegenerate recommender v1beta1 protos and sanitized yaml\n\nPiperOrigin-RevId: 281765245\n\n9a52df54c626b36699a058013d1735a166933167\nadd gRPC ServiceConfig for grafeas v1\n\nPiperOrigin-RevId: 281762754\n\n7a79d682ef40c5ca39c3fca1c0901a8e90021f8a\nfix: Roll back Tasks GAPIC v2 while we investigate C# issue.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 281758548\n\n3fc31491640a90f029f284289e7e97f78f442233\nMigrate Tasks to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 281751187\n\n5bc0fecee454f857cec042fb99fe2d22e1bff5bc\nfix: adds operation HTTP rules back to v1p1beta1 config\n\nPiperOrigin-RevId: 281635572\n\n5364a19284a1333b3ffe84e4e78a1919363d9f9c\nbazel: Fix build\n\n1) Update to latest gapic-generator (has iam resource names fix for java).\n2) Fix non-trivial issues with oslogin (resources defined in sibling package to the one they are used from) and monitoring.\n3) Fix trivial missing dependencies in proto_library targets for other apis.\n\nThis is to prepare the repository to being populated with BUILD.bazel files for all supported apis (101 API) in all 7 languages.\n\nPiperOrigin-RevId: 281618750\n\n0aa77cbe45538d5e5739eb637db3f2940b912789\nUpdating common proto files in google/type/ with their latest versions.\n\nPiperOrigin-RevId: 281603926\n\nd47e1b4485b3effbb2298eb10dd13a544c0f66dc\nfix: replace Speech Recognize RPC retry_codes_name for non-standard assignment\n\nPiperOrigin-RevId: 281594037\n\n16543773103e2619d2b5f52456264de5bb9be104\nRegenerating public protos for datacatalog, also adding gRPC service config.\n\nPiperOrigin-RevId: 281423227\n\n328ebe76adb06128d12547ed70107fb841aebf4e\nChange custom data type from String to google.protobuf.Struct to be consistent with other docs such as\nhttps://developers.google.com/actions/smarthome/develop/process-intents#response_format\n\nPiperOrigin-RevId: 281402467\n\n5af83f47b9656261cafcf88b0b3334521ab266b3\n(internal change without visible public changes)\n\nPiperOrigin-RevId: 281334391\n\nc53ed56649583a149382bd88d3c427be475b91b6\nFix typo in protobuf docs.\n\nPiperOrigin-RevId: 281293109\n\nd8dd7fe8d5304f7bd1c52207703d7f27d5328c5a\nFix build by adding missing deps.\n\nPiperOrigin-RevId: 281088257\n\n3ef5ffd7351809d75c1332d2eaad1f24d9c318e4\nMigrate Error Reporting v1beta1 to proto annotations / GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 281075722\n\n418ee8e24a56b5959e1c1defa4b6c97f883be379\nTrace v2: Add remaining proto annotations, migrate to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 281068859\n\nc89394342a9ef70acaf73a6959e04b943fbc817b\nThis change updates an outdated comment for the feature importance proto field since they are no longer in [0, 1] for online predictions.\n\nPiperOrigin-RevId: 280761373\n\n1ec8b8e2c3c8f41d7d2b22c594c025276d6a4ae6\nCode refactoring\n\nPiperOrigin-RevId: 280760149\n\n427a22b04039f93b769d89accd6f487413f667c1\nImport automl operation protos.\n\nPiperOrigin-RevId: 280703572\n\n45749a04dac104e986f6cc47da3baf7c8bb6f9b0\nfix: bigqueryconnection_gapic.yaml to reflect proto annotations\n\n* remove connection_credential resource\n* make CreateCredentialRequest.connection_id optional\n* shuffle field ordering in CreateCredential flattening\n\nPiperOrigin-RevId: 280685438\n\n8385366aa1e5d7796793db02a9c5e167d1fd8f17\nRevert the Trace v2 GAPIC for now.\nCommitter: @lukesneeringer\n\nPiperOrigin-RevId: 280669295\n\n5c8ab2c072d557c2f4c4e54b544394e2d62202d5\nMigrate Trace v1 and Trace v2 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 280667429\n\nf6808ff4e8b966cd571e99279d4a2780ed97dff2\nRename the `endpoint_urls` field to `endpoint_uris` to be consistent with\nGoogle API nomenclature.\n\nPiperOrigin-RevId: 280581337\n\n1935fb8889686f5c9d107f11b3c6870fc3aa7cdc\nComment updates\n\nPiperOrigin-RevId: 280451656\n\n0797fd5b9029d630e68a0899734715d62ad38e33\nComment updates\n\nPiperOrigin-RevId: 280451600\n\n9bc8d07b8b749e791d16c8d559526928ceaf1994\nRollback of \"Migrate Cloud Error Reporting to proto annotations & GAPIC v2.\"\n\nPiperOrigin-RevId: 280445975\n\nf8720321aecf4aab42e03602ac2c67f9777d9170\nfix: bigtable retry config in GAPIC v2\n\nPiperOrigin-RevId: 280434856\n\nb11664ba64f92d96d748e0dd9724d006dcafd120\nMigrate Cloud Error Reporting to proto annotations & GAPIC v2.\n\nPiperOrigin-RevId: 280432937\n\n4f747bda9b099b4426f495985680d16d0227fa5f\n1. Change DataCatalog package name in java from com.google.cloud.datacatalog to com.google.cloud.datacatalog.v1beta1 (API version is included in the package). *This is a breaking change.*\n\n2. Add API for Taxonomies (PolicyTagManager and PolicyTagManagerSerialization services).\n\n3. Minor changes to documentation.\n\nPiperOrigin-RevId: 280394936\n\nbc76ffd87360ce1cd34e3a6eac28afd5e1efda76\nUse rules_proto bzl files to load proto_library\n\nThis makes googleapis forward compatible with Bazel incompatible change https://github.com/bazelbuild/bazel/issues/8922.\n\nThis CL was created by adding @rules_proto to the WORKSPACE file and then running:\n\nfind . -name BUILD.bazel | \\\n while read build; do \\\n buildifier --lint=fix --warnings=load $build; \\\n done\n\nSince buildifier cannot be told not to reformat the BUILD file, some files are reformatted.\n\nPiperOrigin-RevId: 280356106\n\n218164b3deba1075979c9dca5f71461379e42dd1\nMake the `permissions` argument in TestIamPermissions required.\n\nPiperOrigin-RevId: 280279014\n\ndec8fd8ea5dc464496606189ba4b8949188639c8\nUpdating Cloud Billing Budget API documentation for clarity.\n\nPiperOrigin-RevId: 280225437\n\na667ffab90deb5e2669eb40ec7b61ec96a3d0454\nIntroduced detailed status message for CreateTimeSeries: CreateTimeSeriesSummary replaces CreateTimeSeriesError, which is now deprecated and unused.\n\nPiperOrigin-RevId: 280221707\n\nbe0a25eceec8916633447a37af0ecea801b85186\nMigrate Bigtable API to GAPIC v2 config.\n\nPiperOrigin-RevId: 280199643\n\n88bbf96b90089994ed16208a0f38cdd07f743742\nFix location of monitoring.yaml in Artman config for monitoring v3.\n\nPiperOrigin-RevId: 280134477\n\ndbaa01a20303758eed0c5a95ad2239ea306ad9a5\nUpdate namespace for PHP.\n\nPiperOrigin-RevId: 280085199\n\nf73b3796a635b2026a590d5133af7fa1f0eb807b\nStandardize pub/sub client default settings across clients:\n- Add retry codes for streaming pull\n- Decrease publish's max_rpc_timeout (mini-timeout) from 10 mins to 1 min\n- Decrease publish's total timeout from 10 mins to 1 min\n- Increase publish batching threshold from 10 to 100 elements\n- Increase publish batching size threshold from 1 KiB to 1 MiB\n\nPiperOrigin-RevId: 280044012\n\n822172613e1d93bede3beaf78b123c42a5876e2b\nReplace local_repository with http_archive in WORKSPACE\n\nPiperOrigin-RevId: 280039052\n\n6a8c7914d1b79bd832b5157a09a9332e8cbd16d4\nAdded notification_supported_by_agent to indicate whether the agent is sending notifications to Google or not.\n\nPiperOrigin-RevId: 279991530\n\n675de3dc9ab98cc1cf54216ad58c933ede54e915\nAdd an endpoint_urls field to the instance admin proto and adds a field_mask field to the GetInstanceRequest.\n\nPiperOrigin-RevId: 279982263\n\nf69562be0608904932bdcfbc5ad8b9a22d9dceb8\nAdds some clarification to IAM Policy public proto comments about the policy versioning compliance check for etag-less SetIamPolicy requests.\n\nPiperOrigin-RevId: 279774957\n\n4e86b2538758e3155e867d1cb4155ee91de7c6e9\nDocumentation update. Add the new action for sending metrics to Stackdriver.\n\nPiperOrigin-RevId: 279768476\n\neafaf30b7a3af0bc72f323fe6a6827327d3cad75\nfix: Restore deleted field to avoid a breaking change.\n\nPiperOrigin-RevId: 279760458\n\ned13a73f3054a29b764f104feaa503820b75140a\nAdd GAPIC annotations to the GKE API.\n\nPiperOrigin-RevId: 279734275\n\n6b125955bf0d6377b96f205e5d187e9d524b7ea2\nUpdate timeouts to 1 hour for default and streaming RPCs.\n\nPiperOrigin-RevId: 279657866\n\n989b304c8a6cfe72bdd7cb264e0d71b784db9421\nAdd Service Monitoring (Service and ServiceLevelObjective) protocol buffers to Google Cloud Monitoring API.\n\nPiperOrigin-RevId: 279649144\n\n1ef3bed9594674bb571ce20418af307505e3f609\nUpdating configs for AgentEndpoint to fix the client library generation.\n\nPiperOrigin-RevId: 279518887\n\n34e661f58d58fa57da8ed113a3d8bb3de26b307d\nUpdate v1beta2 clusters and jobs to include resource ids in GRPC header.\n\nPiperOrigin-RevId: 279417429\n\n248abde06efb7e5a3d81b84de02c8272122b0c3b\nIntegrate GAPIC Python Bazel Extensions\n\nAlso configure python build for the following clients as an example:\n\ndiaglogflow/v2\nlanguage/v1\ntexttospeech/v1\nfirestore/v1beta1\npubsub/v1\n\nPiperOrigin-RevId: 279406526\n\n7ffbf721e29b8806e0c8947c5dd0cdddc02de72a\nOSConfig Agentendpoint: Rename ReportTaskStart to StartNextTask\n\nPiperOrigin-RevId: 279389774\n\n2642d8688bab8981c8a5153b7578f9ff8460a37c\nAgentendpoint API: minor doc updates, addition of exclusive_packages|patches to PatchConfigs.\n\nPiperOrigin-RevId: 279326626\n\nd323b287c782802242005072d15f1474d7d10819\nDocumentation changes.\n\nPiperOrigin-RevId: 279234903\n\n29927f71d92d59551a42272ab7c6e97e8413af78\nPublishing Billing Budgets v1alpha1 API.\n\nPiperOrigin-RevId: 279176561\n\nff413d36f8358818d76fa92006f2d8f608843093\nAdding gRPC service config for Billing Budgets API.\n\nPiperOrigin-RevId: 279175129\n\n3eb91187709cc96bb890c110f518505f65ffd95d\nagentendpoint: removes all gapic languages except Go from artman config\n\nPiperOrigin-RevId: 279173968\n\na34950f968c7944a1036551b545557edcc18c767\nFix bazel build.\n\nUpdate gapic-generator and protoc-java-resource-name plugin dependencies to the latest versions.\n\nThe following clients remain broken because of bugs in gapic-generator and/or corresponding configs\n\ngoogle/cloud/iot/v1\ngoogle/cloud/oslogin/v1\ngoogle/spanner/admin/instance/v1\ngoogle/cloud/oslogin/v1\n\nPiperOrigin-RevId: 279171061\n\n0ed34e9fdf601dfc37eb24c40e17495b86771ff4\nAdds agentendpoint protos and initial client library config\n\nPiperOrigin-RevId: 279147036\n\ncad1d3b365a90c2a9f014b84a2a1acb55c15480f\nUpdates to MediaCard\n\nPiperOrigin-RevId: 279100776\n\n05556c26b633c153f2eca62aeafbcd62705f41b7\nUpdates to MediaCard\n\nPiperOrigin-RevId: 279100278\n\n2275670a746ab2bc03ebba0d914b45320ea15af4\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 278922329\n\n5691fcb7c1a926b52577aa1834f31d9c50efda54\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 278731899\n\ncb542d6f5f1c9431ec4181d9cfd7f8d8c953e60b\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 278688708\n\n311e73f017a474c9a41f2a41b00d5d704ff191c5\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 278658917\n\n521ce65c04266df83dde9e2cfd8b2caf057cab45\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 278656745\n\nf06bab1c11b7a6dcd15c50525da44c4b2ff3ef3d\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 278627678\n\n8c6569ced063c08a48272de2e887860d0c40d388\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 278552094\n\n21262f41c4445d24bf441e2a5c250a4207348008\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 278486499\n\ndf366ed5ee26ebb73511127b4c329a98ecdd1f7b\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 278469200\n\n58bc0f51b1270975b532f5847d9e9e0ff5cdc592\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 278368388\n\ne0935db8bfe6fd901ee5d2104b0e1865682899f7\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 278368327\n\naf4a739e9d810eb033903f1aa44c615ab729760d\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 278132545\n\naac770126e2def40dcc387f50e8007b21c869e58\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 278016738\n\n271fed175d16501fb988e02b891166e9718ff141\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 277992079\n\n597951d86beb120bc18428f70ffe0d5b97c70620\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 277991975\n\nbba93d7148ff203d400a4929cd0fbc7dafd8dae2\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 277920288\n\n5b86376273637f5ce3844f29bf8cb1c4aceaea2d\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 277850256\n\n8bc65fb6973a281e8fb9e5c12080644a550322c9\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 277813826\n\n30a6ca0f1a98f1777c94fc22094c892c2a43e0ef\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 277811318\n\n6bef7bd6184390a4e7aa8f09382d7d97afeccfc4\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 277789040\n\naa33c92d79760f2a03ba9b42f855f7a821ed9147\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 277759754\n\na4933867265e2b1cbc70f876a4312a92116c36ad\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 277759298\n\nb21f96290006525e039b9bd1acddeeae407ae1ff\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 277750396\n\n93661a24048eb64755fbbeedd7f6a207d1b4d8dc\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 277748718\n\nc0e494ca955a4fdd9ad460a5890a354ec3a3a0ff\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 277673798\n\n4e952e7e2bb0dd2ef389d552d48f44c8dc4b5f8f\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 277595731\n\n78883c8de959f7a9870c332ab0e3d788b13dd763\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 277528057\n\n7c4cf35d5fe3b8ad664bd219edd6d9f28a788b64\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 277334937\n\nf28342c58c1df57c92e967961e1eaa641d447dde\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 277311984\n\n" - } - } - ], - "destinations": [ - { - "client": { - "source": "googleapis", - "apiName": "bigquery", - "apiVersion": "v2", - "language": "python", - "generator": "gapic", - "config": "google/cloud/bigquery/artman_bigquery_v2.yaml" - } - } - ], - "newFiles": [ - { - "path": ".coveragerc" - }, - { - "path": ".flake8" - }, - { - "path": ".gitignore" - }, - { - "path": ".repo-metadata.json" - }, - { - "path": "CHANGELOG.md" - }, - { - "path": "LICENSE" - }, - { - "path": "MANIFEST.in" - }, - { - "path": "README.rst" - }, - { - "path": "benchmark/README.md" - }, - { - "path": "benchmark/benchmark.py" - }, - { - "path": "benchmark/queries.json" - }, - { - "path": "docs/.gitignore" - }, - { - "path": "docs/README.rst" - }, - { - "path": "docs/_static/custom.css" - }, - { - "path": "docs/_templates/layout.html" - }, - { - "path": "docs/changelog.md" - }, - { - "path": "docs/conf.py" - }, - { - "path": "docs/dbapi.rst" - }, - { - "path": "docs/gapic/v2/enums.rst" - }, - { - "path": "docs/gapic/v2/types.rst" - }, - { - "path": "docs/generated/google.cloud.bigquery.magics.html" - }, - { - "path": "docs/index.rst" - }, - { - "path": "docs/magics.rst" - }, - { - "path": "docs/reference.rst" - }, - { - "path": "docs/snippets.py" - }, - { - "path": "docs/usage.html" - }, - { - "path": "docs/usage/client.rst" - }, - { - "path": "docs/usage/datasets.rst" - }, - { - "path": "docs/usage/encryption.rst" - }, - { - "path": "docs/usage/index.rst" - }, - { - "path": "docs/usage/jobs.rst" - }, - { - "path": "docs/usage/pandas.rst" - }, - { - "path": "docs/usage/queries.rst" - }, - { - "path": "docs/usage/tables.rst" - }, - { - "path": "google/__init__.py" - }, - { - "path": "google/cloud/__init__.py" - }, - { - "path": "google/cloud/bigquery/__init__.py" - }, - { - "path": "google/cloud/bigquery/_helpers.py" - }, - { - "path": "google/cloud/bigquery/_http.py" - }, - { - "path": "google/cloud/bigquery/_pandas_helpers.py" - }, - { - "path": "google/cloud/bigquery/client.py" - }, - { - "path": "google/cloud/bigquery/dataset.py" - }, - { - "path": "google/cloud/bigquery/dbapi/__init__.py" - }, - { - "path": "google/cloud/bigquery/dbapi/_helpers.py" - }, - { - "path": "google/cloud/bigquery/dbapi/connection.py" - }, - { - "path": "google/cloud/bigquery/dbapi/cursor.py" - }, - { - "path": "google/cloud/bigquery/dbapi/exceptions.py" - }, - { - "path": "google/cloud/bigquery/dbapi/types.py" - }, - { - "path": "google/cloud/bigquery/encryption_configuration.py" - }, - { - "path": "google/cloud/bigquery/enums.py" - }, - { - "path": "google/cloud/bigquery/external_config.py" - }, - { - "path": "google/cloud/bigquery/job.py" - }, - { - "path": "google/cloud/bigquery/magics.py" - }, - { - "path": "google/cloud/bigquery/model.py" - }, - { - "path": "google/cloud/bigquery/query.py" - }, - { - "path": "google/cloud/bigquery/retry.py" - }, - { - "path": "google/cloud/bigquery/routine.py" - }, - { - "path": "google/cloud/bigquery/schema.py" - }, - { - "path": "google/cloud/bigquery/table.py" - }, - { - "path": "google/cloud/bigquery_v2/__init__.py" - }, - { - "path": "google/cloud/bigquery_v2/gapic/__init__.py" - }, - { - "path": "google/cloud/bigquery_v2/gapic/enums.py" - }, - { - "path": "google/cloud/bigquery_v2/proto/__init__.py" - }, - { - "path": "google/cloud/bigquery_v2/proto/encryption_config.proto" - }, - { - "path": "google/cloud/bigquery_v2/proto/encryption_config_pb2.py" - }, - { - "path": "google/cloud/bigquery_v2/proto/encryption_config_pb2_grpc.py" - }, - { - "path": "google/cloud/bigquery_v2/proto/location_metadata.proto" - }, - { - "path": "google/cloud/bigquery_v2/proto/location_metadata_pb2.py" - }, - { - "path": "google/cloud/bigquery_v2/proto/location_metadata_pb2_grpc.py" - }, - { - "path": "google/cloud/bigquery_v2/proto/model.proto" - }, - { - "path": "google/cloud/bigquery_v2/proto/model_pb2.py" - }, - { - "path": "google/cloud/bigquery_v2/proto/model_pb2_grpc.py" - }, - { - "path": "google/cloud/bigquery_v2/proto/model_reference.proto" - }, - { - "path": "google/cloud/bigquery_v2/proto/model_reference_pb2.py" - }, - { - "path": "google/cloud/bigquery_v2/proto/model_reference_pb2_grpc.py" - }, - { - "path": "google/cloud/bigquery_v2/proto/standard_sql.proto" - }, - { - "path": "google/cloud/bigquery_v2/proto/standard_sql_pb2.py" - }, - { - "path": "google/cloud/bigquery_v2/proto/standard_sql_pb2_grpc.py" - }, - { - "path": "google/cloud/bigquery_v2/types.py" - }, - { - "path": "noxfile.py" - }, - { - "path": "pylint.config.py" - }, - { - "path": "samples/__init__.py" - }, - { - "path": "samples/add_empty_column.py" - }, - { - "path": "samples/browse_table_data.py" - }, - { - "path": "samples/client_list_jobs.py" - }, - { - "path": "samples/client_load_partitioned_table.py" - }, - { - "path": "samples/client_query.py" - }, - { - "path": "samples/client_query_add_column.py" - }, - { - "path": "samples/client_query_batch.py" - }, - { - "path": "samples/client_query_destination_table.py" - }, - { - "path": "samples/client_query_destination_table_cmek.py" - }, - { - "path": "samples/client_query_destination_table_legacy.py" - }, - { - "path": "samples/client_query_dry_run.py" - }, - { - "path": "samples/client_query_legacy_sql.py" - }, - { - "path": "samples/client_query_relax_column.py" - }, - { - "path": "samples/client_query_w_array_params.py" - }, - { - "path": "samples/client_query_w_named_params.py" - }, - { - "path": "samples/client_query_w_positional_params.py" - }, - { - "path": "samples/client_query_w_struct_params.py" - }, - { - "path": "samples/client_query_w_timestamp_params.py" - }, - { - "path": "samples/copy_table.py" - }, - { - "path": "samples/copy_table_cmek.py" - }, - { - "path": "samples/copy_table_multiple_source.py" - }, - { - "path": "samples/create_dataset.py" - }, - { - "path": "samples/create_job.py" - }, - { - "path": "samples/create_routine.py" - }, - { - "path": "samples/create_routine_ddl.py" - }, - { - "path": "samples/create_table.py" - }, - { - "path": "samples/create_table_range_partitioned.py" - }, - { - "path": "samples/dataset_exists.py" - }, - { - "path": "samples/delete_dataset.py" - }, - { - "path": "samples/delete_dataset_labels.py" - }, - { - "path": "samples/delete_model.py" - }, - { - "path": "samples/delete_routine.py" - }, - { - "path": "samples/delete_table.py" - }, - { - "path": "samples/download_public_data.py" - }, - { - "path": "samples/download_public_data_sandbox.py" - }, - { - "path": "samples/get_dataset.py" - }, - { - "path": "samples/get_dataset_labels.py" - }, - { - "path": "samples/get_model.py" - }, - { - "path": "samples/get_routine.py" - }, - { - "path": "samples/get_table.py" - }, - { - "path": "samples/label_dataset.py" - }, - { - "path": "samples/list_datasets.py" - }, - { - "path": "samples/list_datasets_by_label.py" - }, - { - "path": "samples/list_models.py" - }, - { - "path": "samples/list_routines.py" - }, - { - "path": "samples/list_tables.py" - }, - { - "path": "samples/load_table_dataframe.py" - }, - { - "path": "samples/query_external_gcs_temporary_table.py" - }, - { - "path": "samples/query_external_sheets_permanent_table.py" - }, - { - "path": "samples/query_external_sheets_temporary_table.py" - }, - { - "path": "samples/query_no_cache.py" - }, - { - "path": "samples/query_pagination.py" - }, - { - "path": "samples/query_script.py" - }, - { - "path": "samples/query_to_arrow.py" - }, - { - "path": "samples/table_exists.py" - }, - { - "path": "samples/table_insert_rows.py" - }, - { - "path": "samples/table_insert_rows_explicit_none_insert_ids.py" - }, - { - "path": "samples/tests/__init__.py" - }, - { - "path": "samples/tests/conftest.py" - }, - { - "path": "samples/tests/test_add_empty_column.py" - }, - { - "path": "samples/tests/test_browse_table_data.py" - }, - { - "path": "samples/tests/test_client_list_jobs.py" - }, - { - "path": "samples/tests/test_client_load_partitioned_table.py" - }, - { - "path": "samples/tests/test_client_query.py" - }, - { - "path": "samples/tests/test_client_query_add_column.py" - }, - { - "path": "samples/tests/test_client_query_batch.py" - }, - { - "path": "samples/tests/test_client_query_destination_table.py" - }, - { - "path": "samples/tests/test_client_query_destination_table_cmek.py" - }, - { - "path": "samples/tests/test_client_query_destination_table_legacy.py" - }, - { - "path": "samples/tests/test_client_query_dry_run.py" - }, - { - "path": "samples/tests/test_client_query_legacy_sql.py" - }, - { - "path": "samples/tests/test_client_query_relax_column.py" - }, - { - "path": "samples/tests/test_client_query_w_array_params.py" - }, - { - "path": "samples/tests/test_client_query_w_named_params.py" - }, - { - "path": "samples/tests/test_client_query_w_positional_params.py" - }, - { - "path": "samples/tests/test_client_query_w_struct_params.py" - }, - { - "path": "samples/tests/test_client_query_w_timestamp_params.py" - }, - { - "path": "samples/tests/test_copy_table.py" - }, - { - "path": "samples/tests/test_copy_table_cmek.py" - }, - { - "path": "samples/tests/test_copy_table_multiple_source.py" - }, - { - "path": "samples/tests/test_create_dataset.py" - }, - { - "path": "samples/tests/test_create_job.py" - }, - { - "path": "samples/tests/test_create_table.py" - }, - { - "path": "samples/tests/test_create_table_range_partitioned.py" - }, - { - "path": "samples/tests/test_dataset_exists.py" - }, - { - "path": "samples/tests/test_dataset_label_samples.py" - }, - { - "path": "samples/tests/test_delete_dataset.py" - }, - { - "path": "samples/tests/test_delete_table.py" - }, - { - "path": "samples/tests/test_download_public_data.py" - }, - { - "path": "samples/tests/test_download_public_data_sandbox.py" - }, - { - "path": "samples/tests/test_get_dataset.py" - }, - { - "path": "samples/tests/test_get_table.py" - }, - { - "path": "samples/tests/test_list_datasets.py" - }, - { - "path": "samples/tests/test_list_datasets_by_label.py" - }, - { - "path": "samples/tests/test_list_tables.py" - }, - { - "path": "samples/tests/test_load_table_dataframe.py" - }, - { - "path": "samples/tests/test_model_samples.py" - }, - { - "path": "samples/tests/test_query_external_gcs_temporary_table.py" - }, - { - "path": "samples/tests/test_query_external_sheets_permanent_table.py" - }, - { - "path": "samples/tests/test_query_external_sheets_temporary_table.py" - }, - { - "path": "samples/tests/test_query_no_cache.py" - }, - { - "path": "samples/tests/test_query_pagination.py" - }, - { - "path": "samples/tests/test_query_script.py" - }, - { - "path": "samples/tests/test_query_to_arrow.py" - }, - { - "path": "samples/tests/test_routine_samples.py" - }, - { - "path": "samples/tests/test_table_exists.py" - }, - { - "path": "samples/tests/test_table_insert_rows.py" - }, - { - "path": "samples/tests/test_table_insert_rows_explicit_none_insert_ids.py" - }, - { - "path": "samples/tests/test_undelete_table.py" - }, - { - "path": "samples/tests/test_update_dataset_access.py" - }, - { - "path": "samples/tests/test_update_dataset_default_partition_expiration.py" - }, - { - "path": "samples/tests/test_update_dataset_default_table_expiration.py" - }, - { - "path": "samples/tests/test_update_dataset_description.py" - }, - { - "path": "samples/tests/test_update_table_require_partition_filter.py" - }, - { - "path": "samples/undelete_table.py" - }, - { - "path": "samples/update_dataset_access.py" - }, - { - "path": "samples/update_dataset_default_partition_expiration.py" - }, - { - "path": "samples/update_dataset_default_table_expiration.py" - }, - { - "path": "samples/update_dataset_description.py" - }, - { - "path": "samples/update_model.py" - }, - { - "path": "samples/update_routine.py" - }, - { - "path": "samples/update_table_require_partition_filter.py" - }, - { - "path": "setup.cfg" - }, - { - "path": "setup.py" - }, - { - "path": "synth.metadata" - }, - { - "path": "synth.py" - }, - { - "path": "tests/__init__.py" - }, - { - "path": "tests/data/characters.json" - }, - { - "path": "tests/data/characters.jsonl" - }, - { - "path": "tests/data/colors.avro" - }, - { - "path": "tests/data/people.csv" - }, - { - "path": "tests/data/schema.json" - }, - { - "path": "tests/scrub_datasets.py" - }, - { - "path": "tests/system.py" - }, - { - "path": "tests/unit/__init__.py" - }, - { - "path": "tests/unit/enums/__init__.py" - }, - { - "path": "tests/unit/enums/test_standard_sql_data_types.py" - }, - { - "path": "tests/unit/helpers.py" - }, - { - "path": "tests/unit/model/__init__.py" - }, - { - "path": "tests/unit/model/test_model.py" - }, - { - "path": "tests/unit/model/test_model_reference.py" - }, - { - "path": "tests/unit/routine/__init__.py" - }, - { - "path": "tests/unit/routine/test_routine.py" - }, - { - "path": "tests/unit/routine/test_routine_argument.py" - }, - { - "path": "tests/unit/routine/test_routine_reference.py" - }, - { - "path": "tests/unit/test__helpers.py" - }, - { - "path": "tests/unit/test__http.py" - }, - { - "path": "tests/unit/test__pandas_helpers.py" - }, - { - "path": "tests/unit/test_client.py" - }, - { - "path": "tests/unit/test_dataset.py" - }, - { - "path": "tests/unit/test_dbapi__helpers.py" - }, - { - "path": "tests/unit/test_dbapi_connection.py" - }, - { - "path": "tests/unit/test_dbapi_cursor.py" - }, - { - "path": "tests/unit/test_dbapi_types.py" - }, - { - "path": "tests/unit/test_encryption_configuration.py" - }, - { - "path": "tests/unit/test_external_config.py" - }, - { - "path": "tests/unit/test_job.py" - }, - { - "path": "tests/unit/test_magics.py" - }, - { - "path": "tests/unit/test_query.py" - }, - { - "path": "tests/unit/test_retry.py" - }, - { - "path": "tests/unit/test_schema.py" - }, - { - "path": "tests/unit/test_signature_compatibility.py" - }, - { - "path": "tests/unit/test_table.py" - } - ] -} \ No newline at end of file diff --git a/bigquery/synth.py b/bigquery/synth.py deleted file mode 100644 index a20426d3910f..000000000000 --- a/bigquery/synth.py +++ /dev/null @@ -1,60 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This script is used to synthesize generated parts of this library.""" - -import synthtool as s -from synthtool import gcp - -gapic = gcp.GAPICGenerator() - -version = 'v2' - -library = gapic.py_library( - 'bigquery', - version, - config_path='/google/cloud/bigquery/' - 'artman_bigquery_v2.yaml', - artman_output_name='bigquery-v2', - include_protos=True, -) - -s.move( - [ - library / "google/cloud/bigquery_v2/gapic/enums.py", - library / "google/cloud/bigquery_v2/types.py", - library / "google/cloud/bigquery_v2/proto/location*", - library / "google/cloud/bigquery_v2/proto/encryption_config*", - library / "google/cloud/bigquery_v2/proto/model*", - library / "google/cloud/bigquery_v2/proto/standard_sql*", - ], -) - -# Fix up proto docs that are missing summary line. -s.replace( - "google/cloud/bigquery_v2/proto/model_pb2.py", - '"""Attributes:', - '"""Protocol buffer.\n\n Attributes:', -) -s.replace( - "google/cloud/bigquery_v2/proto/encryption_config_pb2.py", - '"""Attributes:', - '"""Encryption configuration.\n\n Attributes:', -) - -# Remove non-ascii characters from docstrings for Python 2.7. -# Format quoted strings as plain text. -s.replace("google/cloud/bigquery_v2/proto/*.py", "[“”]", '``') - -s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/bigquery/tests/__init__.py b/bigquery/tests/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/bigquery/tests/data/characters.json b/bigquery/tests/data/characters.json deleted file mode 100644 index d38636810196..000000000000 --- a/bigquery/tests/data/characters.json +++ /dev/null @@ -1,68 +0,0 @@ -[ - { - "Age" : "111", - "Spells" : [], - "Name" : "Bilbo", - "Weight" : 67.2, - "TeaTime" : "10:00:00", - "NextVacation" : "2017-09-22", - "FavoriteTime" : "2031-04-01T05:09:27", - "IsMagic" : false - }, - { - "Age" : "1000", - "Name" : "Gandalf", - "Spells" : [ - { - "Name" : "Skydragon", - "Properties" : [ - { - "Power" : 1, - "Name" : "Flying" - }, - { - "Name" : "Creature", - "Power" : 1 - }, - { - "Power" : 11, - "Name" : "Explodey" - } - ], - "LastUsed" : "2015-10-31 23:59:56 UTC", - "Icon" : "iVBORw0KGgoAAAANSUhEUgAAAB4AAAAgCAYAAAAFQMh/AAAAAXNSR0IArs4c6QAAA9lJREFUSA21lk9OVEEQxvsRDImoiMG9mLjjCG5mEg7gEfQGsIcF7p0EDsBBSJiNO7ZsFRZqosb/QkSj7fer7ur33sw8GDFUUq+7q6vqq6qu7pkQzqG4EeI521e7FePVgM9cGPYwhCi6UO8qFOK+YY+Br66ujsmmxb84Yzwp6zCsxjJfWVkxnMsEMGuWHZ9Wcz11cM48hkq0vLwc1tbW4mAwqDpcdIqnMmgF0JMv2CiGnZ2dcHR0FA4PD8Pe3t5U/tx6bCSlb+JT8XfxT3HsUek0Li0tRdjWl+z6iRF+FNA1hXPDQ/IMNyRg3s8bD/OaZS+VP+9cOLSa64cA34oXZWagDkRzAaJxXaE+ufc4rCN7LrazZ2+8+STtpAL8WYDvpTaHKlkB2iQARMvb2+H27m4YaL7zaDtUw1BZAASi6T8T2UZnPZV2pvnJfCH5p8bewcGB6TrIfz8wBZgHQ83kjpuj6RBYQpuo09Tvmpd7TPe+ktZN8cKwS92KWXGuaqWowlYEwthtMcWOZUNJc8at+zuF/Xkqo69baS7P+AvWjYwJ4jyHXXsEnd74ZO/Pq+uXUuv6WNlso6cvnDsZB1V/unJab3D1/KrJDw9NCM9wHf2FK2ejTKMejnBHfGtfH7LGGCdQDqaqJgfgzWjXK1nYV4jRbPGnxUT7cqUaZfJrVZeOm9QmB21L6xXgbu/ScsYusJFMoU0x2fsamRJOd6kOYDRLUxv94ENZe8+0gM+0dyz+KgU7X8rLHHCIOZyrna4y6ykIu0YCs02TBXmk3PZssmEgaTxTo83xjCIjoE21h0Yah3MrV4+9kR8MaabGze+9NEILGAFE5nMOiiA32KnAr/sb7tED3nzlzC4dB38WMC+EjaqHfqvUKHi2gJPdWQ6AbH8hgyQ7QY6jvjj3QZWvX6pUAtduTX5Dss96Q7NI9RQRJeeKvRFbt0v2gb1Gx/PooJsztn1c1DqpAU3Hde2dB2aEHBhjgOFjMeDvxLafjQ3YZQSgOcHJZX611H45sGLHWvYTz9hiURlpNoBZvxb/Ft9lAQ1DmBfUiR+j1hAPkMBTE9L9+zLva1QvGFHurRBaZ5xLVitoBviiRkD/sIMDztKA5FA0b9/0OclzO2/XAQymJ0TcghZwEo9/AX8gMeAJMOvIsWWt5bwCoiFhVSllrdH0t5Q1JHAFlKJNkvTVdn2GHb9KdmacMT+d/Os05imJUccRX2YuZ93Sxf0Ilc4DPDeAq5SAvFEAY94cQc6BA26dzb4HWAJI4DPmQE5KCVUyvb2FcDZem7JdT2ggKUP3xX6n9XNq1DpzSf4Cy4ZqSlmM8d8AAAAASUVORK5CYII=", - "DiscoveredBy" : "Firebreather" - } - ], - "NextVacation" : "2666-06-06", - "TeaTime" : "15:00:00", - "Weight" : 198.6, - "FavoriteTime" : "2001-12-19T23:59:59", - "FavoriteNumber" : "3.141592654", - "IsMagic" : true - }, - { - "Weight" : 128.3, - "TeaTime" : "12:00:00", - "NextVacation" : "2017-03-14", - "IsMagic" : true, - "FavoriteTime" : "2000-10-31T23:27:46", - "Age" : "17", - "FavoriteNumber" : "13", - "Spells" : [ - { - "LastUsed" : "2017-02-14 12:07:23 UTC", - "Properties" : [ - { - "Name" : "Makes you look crazy", - "Power" : 1 - } - ], - "Icon" : "iVBORw0KGgoAAAANSUhEUgAAAEAAAABACAYAAACqaXHeAAAAAXNSR0IArs4c6QAAABxpRE9UAAAAAgAAAAAAAAAgAAAAKAAAACAAAAAgAAABxj2CfowAAAGSSURBVHgB7Jc9TsNAEIX3JDkCPUV6KlpKFHEGlD4nyA04ACUXQKTgCEipUnKGNEbP0otentayicZ24SlWs7tjO/N9u/5J2b2+NUtuZcnwYE8BuQPyGZAPwXwLLPk5kG+BJa9+fgfkh1B+CeancL4F8i2Q/wWm/S/w+XFoTseftn0dvhu0OXfhpM+AGvzcEiYVAFisPqE9zrETJhHAlXfg2lglMK9z0f3RBfB+ZyRUV3x+erzsEIjjOBqc1xtNAIrvguybV3A9lkVHxlEE6GrrPb/ZvAySwlUnfCmlPQ+R8JCExvGtcRQBLFwj4FGkznX1VYDKPG/f2/MjwCksXACgdNUxJjwK9xwl4JihOwTFR0kIF+CABEPRnvsvPFctMoYKqAFSAFaMwB4pp3Y+bodIYL9WmIAaIOHxo7W8wiHvAjTvhUeNwwSgeAeAABbqOewC5hBdwFD4+9+7puzXV9fS6/b1wwT4tsaYAhwOOQdUQch5vgZCeAhAv3ZM31yYAAUgvApQQQ6n5w6FB/RVe1jdJOAPAAD//1eMQwoAAAGQSURBVO1UMU4DQQy8X9AgWopIUINEkS4VlJQo4gvwAV7AD3gEH4iSgidESpWSXyyZExP5lr0c7K5PsXBhec/2+jzjuWtent9CLdtu1mG5+gjz+WNr7IsY7eH+tvO+xfuqk4vz7CH91edFaF5v9nb6dBKm13edvrL+0Lk5lMzJkQDeJSkkgHF6mR8CHwMHCQR/NAQQGD0BAlwK4FCefQiefq+A2Vn29tG7igLAfmwcnJu/nJy3BMQkMN9HEPr8AL3bfBv7Bp+7/SoExMDjZwKEJwmyhnnmQIQEBIlz2x0iKoAvJkAC6TsTIH6MqRrEWUMSZF2zAwqT4Eu/e6pzFAIkmNSZ4OFT+VYBIIF//UqbJwnF/4DU0GwOn8r/JQYCpPGufEfJuZiA37ycQw/5uFeqPq4pfR6FADmkBCXjfWdZj3NfXW58dAJyB9W65wRoMWulryvAyqa05nQFaDFrpa8rwMqmtOZ0BWgxa6WvK8DKprTmdAVoMWulryvAyqa05nQFaDFrpa8rwMqmtOb89wr4AtQ4aPoL6yVpAAAAAElFTkSuQmCC", - "Name" : "Talking cats", - "DiscoveredBy" : "Salem" - } - ], - "Name" : "Sabrina" - } -] diff --git a/bigquery/tests/data/characters.jsonl b/bigquery/tests/data/characters.jsonl deleted file mode 100644 index 42b5bdc6a152..000000000000 --- a/bigquery/tests/data/characters.jsonl +++ /dev/null @@ -1,3 +0,0 @@ -{"Name":"Bilbo","Age":"111","Weight":67.2,"IsMagic":false,"Spells":[],"TeaTime":"10:00:00","NextVacation":"2017-09-22","FavoriteTime":"2031-04-01T05:09:27","FavoriteNumber":"111"} -{"Name":"Gandalf","Age":"1000","Weight":198.6,"IsMagic":true,"Spells":[{"Name": "Skydragon", "Icon":"iVBORw0KGgoAAAANSUhEUgAAAB4AAAAgCAYAAAAFQMh/AAAAAXNSR0IArs4c6QAAA9lJREFUSA21lk9OVEEQxvsRDImoiMG9mLjjCG5mEg7gEfQGsIcF7p0EDsBBSJiNO7ZsFRZqosb/QkSj7fer7ur33sw8GDFUUq+7q6vqq6qu7pkQzqG4EeI521e7FePVgM9cGPYwhCi6UO8qFOK+YY+Br66ujsmmxb84Yzwp6zCsxjJfWVkxnMsEMGuWHZ9Wcz11cM48hkq0vLwc1tbW4mAwqDpcdIqnMmgF0JMv2CiGnZ2dcHR0FA4PD8Pe3t5U/tx6bCSlb+JT8XfxT3HsUek0Li0tRdjWl+z6iRF+FNA1hXPDQ/IMNyRg3s8bD/OaZS+VP+9cOLSa64cA34oXZWagDkRzAaJxXaE+ufc4rCN7LrazZ2+8+STtpAL8WYDvpTaHKlkB2iQARMvb2+H27m4YaL7zaDtUw1BZAASi6T8T2UZnPZV2pvnJfCH5p8bewcGB6TrIfz8wBZgHQ83kjpuj6RBYQpuo09Tvmpd7TPe+ktZN8cKwS92KWXGuaqWowlYEwthtMcWOZUNJc8at+zuF/Xkqo69baS7P+AvWjYwJ4jyHXXsEnd74ZO/Pq+uXUuv6WNlso6cvnDsZB1V/unJab3D1/KrJDw9NCM9wHf2FK2ejTKMejnBHfGtfH7LGGCdQDqaqJgfgzWjXK1nYV4jRbPGnxUT7cqUaZfJrVZeOm9QmB21L6xXgbu/ScsYusJFMoU0x2fsamRJOd6kOYDRLUxv94ENZe8+0gM+0dyz+KgU7X8rLHHCIOZyrna4y6ykIu0YCs02TBXmk3PZssmEgaTxTo83xjCIjoE21h0Yah3MrV4+9kR8MaabGze+9NEILGAFE5nMOiiA32KnAr/sb7tED3nzlzC4dB38WMC+EjaqHfqvUKHi2gJPdWQ6AbH8hgyQ7QY6jvjj3QZWvX6pUAtduTX5Dss96Q7NI9RQRJeeKvRFbt0v2gb1Gx/PooJsztn1c1DqpAU3Hde2dB2aEHBhjgOFjMeDvxLafjQ3YZQSgOcHJZX611H45sGLHWvYTz9hiURlpNoBZvxb/Ft9lAQ1DmBfUiR+j1hAPkMBTE9L9+zLva1QvGFHurRBaZ5xLVitoBviiRkD/sIMDztKA5FA0b9/0OclzO2/XAQymJ0TcghZwEo9/AX8gMeAJMOvIsWWt5bwCoiFhVSllrdH0t5Q1JHAFlKJNkvTVdn2GHb9KdmacMT+d/Os05imJUccRX2YuZ93Sxf0Ilc4DPDeAq5SAvFEAY94cQc6BA26dzb4HWAJI4DPmQE5KCVUyvb2FcDZem7JdT2ggKUP3xX6n9XNq1DpzSf4Cy4ZqSlmM8d8AAAAASUVORK5CYII=","DiscoveredBy":"Firebreather","Properties":[{"Name":"Flying","Power":1},{"Name":"Creature","Power":1},{"Name":"Explodey","Power":11}],"LastUsed":"2015-10-31 23:59:56 UTC"}],"TeaTime":"15:00:00","NextVacation":"2666-06-06","FavoriteTime":"2001-12-19T23:59:59","FavoriteNumber":"1.618033989"} -{"Name":"Sabrina","Age":"17","Weight":128.3,"IsMagic":true,"Spells":[{"Name": "Talking cats", "Icon":"iVBORw0KGgoAAAANSUhEUgAAAEAAAABACAYAAACqaXHeAAAAAXNSR0IArs4c6QAAABxpRE9UAAAAAgAAAAAAAAAgAAAAKAAAACAAAAAgAAABxj2CfowAAAGSSURBVHgB7Jc9TsNAEIX3JDkCPUV6KlpKFHEGlD4nyA04ACUXQKTgCEipUnKGNEbP0otentayicZ24SlWs7tjO/N9u/5J2b2+NUtuZcnwYE8BuQPyGZAPwXwLLPk5kG+BJa9+fgfkh1B+CeancL4F8i2Q/wWm/S/w+XFoTseftn0dvhu0OXfhpM+AGvzcEiYVAFisPqE9zrETJhHAlXfg2lglMK9z0f3RBfB+ZyRUV3x+erzsEIjjOBqc1xtNAIrvguybV3A9lkVHxlEE6GrrPb/ZvAySwlUnfCmlPQ+R8JCExvGtcRQBLFwj4FGkznX1VYDKPG/f2/MjwCksXACgdNUxJjwK9xwl4JihOwTFR0kIF+CABEPRnvsvPFctMoYKqAFSAFaMwB4pp3Y+bodIYL9WmIAaIOHxo7W8wiHvAjTvhUeNwwSgeAeAABbqOewC5hBdwFD4+9+7puzXV9fS6/b1wwT4tsaYAhwOOQdUQch5vgZCeAhAv3ZM31yYAAUgvApQQQ6n5w6FB/RVe1jdJOAPAAD//1eMQwoAAAGQSURBVO1UMU4DQQy8X9AgWopIUINEkS4VlJQo4gvwAV7AD3gEH4iSgidESpWSXyyZExP5lr0c7K5PsXBhec/2+jzjuWtent9CLdtu1mG5+gjz+WNr7IsY7eH+tvO+xfuqk4vz7CH91edFaF5v9nb6dBKm13edvrL+0Lk5lMzJkQDeJSkkgHF6mR8CHwMHCQR/NAQQGD0BAlwK4FCefQiefq+A2Vn29tG7igLAfmwcnJu/nJy3BMQkMN9HEPr8AL3bfBv7Bp+7/SoExMDjZwKEJwmyhnnmQIQEBIlz2x0iKoAvJkAC6TsTIH6MqRrEWUMSZF2zAwqT4Eu/e6pzFAIkmNSZ4OFT+VYBIIF//UqbJwnF/4DU0GwOn8r/JQYCpPGufEfJuZiA37ycQw/5uFeqPq4pfR6FADmkBCXjfWdZj3NfXW58dAJyB9W65wRoMWulryvAyqa05nQFaDFrpa8rwMqmtOZ0BWgxa6WvK8DKprTmdAVoMWulryvAyqa05nQFaDFrpa8rwMqmtOb89wr4AtQ4aPoL6yVpAAAAAElFTkSuQmCC","DiscoveredBy":"Salem","Properties":[{"Name":"Makes you look crazy","Power":1}],"LastUsed":"2017-02-14 12:07:23 UTC"}],"TeaTime":"12:00:00","NextVacation":"2017-03-14","FavoriteTime":"2000-10-31T23:27:46","FavoriteNumber":"13"} diff --git a/bigquery/tests/data/colors.avro b/bigquery/tests/data/colors.avro deleted file mode 100644 index e0133fd027f49093d638b1f2d82e70948306b2dd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 308 zcmZ9Hu}T9$5Qa6#9`=kDIcBjhE-Qo%Q{6Rdmy z@gZz&M6ePI_d;S}`Op8)_s<+x7u!4gymeH1F$2Tj2MZRu8rXz65ac8)nB}I|hlJ$T z($`${GeM?=D{g9pveN3Z0oTQdBoWOM5rH{PbDoQS*XMy(2+^NvKK7iiaMJ() diff --git a/bigquery/tests/data/people.csv b/bigquery/tests/data/people.csv deleted file mode 100644 index d3c7d063892a..000000000000 --- a/bigquery/tests/data/people.csv +++ /dev/null @@ -1,3 +0,0 @@ -full_name,age -Phred Phlyntstone,32 -Wylma Phlyntstone,29 \ No newline at end of file diff --git a/bigquery/tests/data/schema.json b/bigquery/tests/data/schema.json deleted file mode 100644 index 6a36e55e579a..000000000000 --- a/bigquery/tests/data/schema.json +++ /dev/null @@ -1,88 +0,0 @@ -{ - "fields" : [ - { - "type" : "STRING", - "name" : "Name", - "mode" : "NULLABLE" - }, - { - "name" : "Age", - "mode" : "NULLABLE", - "type" : "INTEGER" - }, - { - "type" : "FLOAT", - "name" : "Weight", - "mode" : "NULLABLE" - }, - { - "mode" : "NULLABLE", - "name" : "IsMagic", - "type" : "BOOLEAN" - }, - { - "name" : "Spells", - "fields" : [ - { - "mode" : "NULLABLE", - "name" : "Name", - "type" : "STRING" - }, - { - "mode" : "NULLABLE", - "name" : "LastUsed", - "type" : "TIMESTAMP" - }, - { - "type" : "STRING", - "mode" : "NULLABLE", - "name" : "DiscoveredBy" - }, - { - "name" : "Properties", - "fields" : [ - { - "name" : "Name", - "mode" : "NULLABLE", - "type" : "STRING" - }, - { - "type" : "FLOAT", - "name" : "Power", - "mode" : "NULLABLE" - } - ], - "mode" : "REPEATED", - "type" : "RECORD" - }, - { - "mode" : "NULLABLE", - "name" : "Icon", - "type" : "BYTES" - } - ], - "mode" : "REPEATED", - "type" : "RECORD" - }, - { - "type" : "TIME", - "mode" : "NULLABLE", - "name" : "TeaTime" - }, - { - "type" : "DATE", - "name" : "NextVacation", - "mode" : "NULLABLE" - }, - { - "mode" : "NULLABLE", - "name" : "FavoriteTime", - "type" : "DATETIME" - }, - { - "mode" : "NULLABLE", - "name" : "FavoriteNumber", - "type" : "NUMERIC" - } - ] -} diff --git a/bigquery/tests/scrub_datasets.py b/bigquery/tests/scrub_datasets.py deleted file mode 100644 index 9a8ab3e7b7c5..000000000000 --- a/bigquery/tests/scrub_datasets.py +++ /dev/null @@ -1,25 +0,0 @@ -import re -import sys - -from google.api_core.exceptions import NotFound -from google.cloud.bigquery import Client - - -def main(prefixes): - client = Client() - - pattern = re.compile("|".join("^{}.*$".format(prefix) for prefix in prefixes)) - - ds_items = list(client.list_datasets()) - for dataset in ds_items: - ds_id = dataset.dataset_id - if pattern.match(ds_id): - print("Deleting dataset: {}".format(ds_id)) - try: - client.delete_dataset(dataset.reference, delete_contents=True) - except NotFound: - print(" NOT FOUND") - - -if __name__ == "__main__": - main(sys.argv[1:]) diff --git a/bigquery/tests/system.py b/bigquery/tests/system.py deleted file mode 100644 index 4a1c032717f5..000000000000 --- a/bigquery/tests/system.py +++ /dev/null @@ -1,2542 +0,0 @@ -# Copyright 2015 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import base64 -import collections -import concurrent.futures -import csv -import datetime -import decimal -import json -import operator -import os -import time -import unittest -import uuid -import re - -import requests -import six -import psutil -import pytest -import pytz - -try: - from google.cloud import bigquery_storage_v1beta1 -except ImportError: # pragma: NO COVER - bigquery_storage_v1beta1 = None -try: - import pandas -except ImportError: # pragma: NO COVER - pandas = None -try: - import pyarrow - import pyarrow.types -except ImportError: # pragma: NO COVER - pyarrow = None -try: - import IPython - from IPython.utils import io - from IPython.testing import tools - from IPython.terminal import interactiveshell -except ImportError: # pragma: NO COVER - IPython = None - -from google.api_core.exceptions import PreconditionFailed -from google.api_core.exceptions import BadRequest -from google.api_core.exceptions import Conflict -from google.api_core.exceptions import Forbidden -from google.api_core.exceptions import GoogleAPICallError -from google.api_core.exceptions import NotFound -from google.api_core.exceptions import InternalServerError -from google.api_core.exceptions import ServiceUnavailable -from google.api_core.exceptions import TooManyRequests -from google.cloud import bigquery -from google.cloud import bigquery_v2 -from google.cloud.bigquery.dataset import Dataset -from google.cloud.bigquery.dataset import DatasetReference -from google.cloud.bigquery.table import Table -from google.cloud._helpers import UTC -from google.cloud.bigquery import dbapi -from google.cloud import storage - -from test_utils.retry import RetryErrors -from test_utils.retry import RetryInstanceState -from test_utils.retry import RetryResult -from test_utils.system import unique_resource_id - - -JOB_TIMEOUT = 120 # 2 minutes -WHERE = os.path.abspath(os.path.dirname(__file__)) - -# Common table data used for many tests. -ROWS = [ - ("Phred Phlyntstone", 32), - ("Bharney Rhubble", 33), - ("Wylma Phlyntstone", 29), - ("Bhettye Rhubble", 27), -] -HEADER_ROW = ("Full Name", "Age") -SCHEMA = [ - bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"), - bigquery.SchemaField("age", "INTEGER", mode="REQUIRED"), -] -TIME_PARTITIONING_CLUSTERING_FIELDS_SCHEMA = [ - bigquery.SchemaField("transaction_time", "TIMESTAMP", mode="REQUIRED"), - bigquery.SchemaField("transaction_id", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("user_email", "STRING", mode="REQUIRED"), - bigquery.SchemaField("store_code", "STRING", mode="REQUIRED"), - bigquery.SchemaField( - "items", - "RECORD", - mode="REPEATED", - fields=[ - bigquery.SchemaField("item_code", "STRING", mode="REQUIRED"), - bigquery.SchemaField("quantity", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("comments", "STRING", mode="NULLABLE"), - bigquery.SchemaField("expiration_date", "DATE", mode="REQUIRED"), - ], - ), -] - -# The VPC-SC team maintains a mirror of the GCS bucket used for code -# samples. The public bucket crosses the configured security boundary. -# See: https://github.com/googleapis/google-cloud-python/issues/8550 -SAMPLES_BUCKET = os.environ.get("GCLOUD_TEST_SAMPLES_BUCKET", "cloud-samples-data") - -retry_storage_errors = RetryErrors( - (TooManyRequests, InternalServerError, ServiceUnavailable) -) - - -def _has_rows(result): - return len(result) > 0 - - -def _make_dataset_id(prefix): - return "%s%s" % (prefix, unique_resource_id()) - - -def _load_json_schema(filename="data/schema.json"): - from google.cloud.bigquery.table import _parse_schema_resource - - json_filename = os.path.join(WHERE, filename) - - with open(json_filename, "r") as schema_file: - return _parse_schema_resource(json.load(schema_file)) - - -def _rate_limit_exceeded(forbidden): - """Predicate: pass only exceptions with 'rateLimitExceeded' as reason.""" - return any(error["reason"] == "rateLimitExceeded" for error in forbidden._errors) - - -# We need to wait to stay within the rate limits. -# The alternative outcome is a 403 Forbidden response from upstream, which -# they return instead of the more appropriate 429. -# See https://cloud.google.com/bigquery/quota-policy -retry_403 = RetryErrors(Forbidden, error_predicate=_rate_limit_exceeded) - - -class Config(object): - """Run-time configuration to be modified at set-up. - - This is a mutable stand-in to allow test set-up to modify - global state. - """ - - CLIENT = None - CURSOR = None - - -def setUpModule(): - Config.CLIENT = bigquery.Client() - Config.CURSOR = dbapi.connect(Config.CLIENT).cursor() - - -class TestBigQuery(unittest.TestCase): - def setUp(self): - self.to_delete = [] - - def tearDown(self): - def _still_in_use(bad_request): - return any( - error["reason"] == "resourceInUse" for error in bad_request._errors - ) - - retry_in_use = RetryErrors(BadRequest, error_predicate=_still_in_use) - retry_storage_errors_conflict = RetryErrors( - (Conflict, TooManyRequests, InternalServerError, ServiceUnavailable) - ) - for doomed in self.to_delete: - if isinstance(doomed, storage.Bucket): - retry_storage_errors_conflict(doomed.delete)(force=True) - elif isinstance(doomed, (Dataset, bigquery.DatasetReference)): - retry_in_use(Config.CLIENT.delete_dataset)(doomed, delete_contents=True) - elif isinstance(doomed, (Table, bigquery.TableReference)): - retry_in_use(Config.CLIENT.delete_table)(doomed) - else: - doomed.delete() - - def test_get_service_account_email(self): - client = Config.CLIENT - - got = client.get_service_account_email() - - self.assertIsInstance(got, six.text_type) - self.assertIn("@", got) - - def _create_bucket(self, bucket_name, location=None): - storage_client = storage.Client() - bucket = storage_client.bucket(bucket_name) - retry_storage_errors(bucket.create)(location=location) - self.to_delete.append(bucket) - - return bucket - - def test_close_releases_open_sockets(self): - current_process = psutil.Process() - conn_count_start = len(current_process.connections()) - - client = Config.CLIENT - client.query( - """ - SELECT - source_year AS year, COUNT(is_male) AS birth_count - FROM `bigquery-public-data.samples.natality` - GROUP BY year - ORDER BY year DESC - LIMIT 15 - """ - ) - - client.close() - - conn_count_end = len(current_process.connections()) - self.assertEqual(conn_count_end, conn_count_start) - - def test_create_dataset(self): - DATASET_ID = _make_dataset_id("create_dataset") - dataset = self.temp_dataset(DATASET_ID) - - self.assertTrue(_dataset_exists(dataset)) - self.assertEqual(dataset.dataset_id, DATASET_ID) - self.assertEqual(dataset.project, Config.CLIENT.project) - - def test_get_dataset(self): - dataset_id = _make_dataset_id("get_dataset") - client = Config.CLIENT - dataset_arg = Dataset(client.dataset(dataset_id)) - dataset_arg.friendly_name = "Friendly" - dataset_arg.description = "Description" - dataset = retry_403(client.create_dataset)(dataset_arg) - self.to_delete.append(dataset) - dataset_ref = client.dataset(dataset_id) - - # Get with a reference. - got = client.get_dataset(dataset_ref) - self.assertEqual(got.friendly_name, "Friendly") - self.assertEqual(got.description, "Description") - - # Get with a string. - got = client.get_dataset(dataset_id) - self.assertEqual(got.friendly_name, "Friendly") - self.assertEqual(got.description, "Description") - - # Get with a fully-qualified string. - got = client.get_dataset("{}.{}".format(client.project, dataset_id)) - self.assertEqual(got.friendly_name, "Friendly") - self.assertEqual(got.description, "Description") - - def test_update_dataset(self): - dataset = self.temp_dataset(_make_dataset_id("update_dataset")) - self.assertTrue(_dataset_exists(dataset)) - self.assertIsNone(dataset.friendly_name) - self.assertIsNone(dataset.description) - self.assertEqual(dataset.labels, {}) - - dataset.friendly_name = "Friendly" - dataset.description = "Description" - dataset.labels = {"priority": "high", "color": "blue"} - ds2 = Config.CLIENT.update_dataset( - dataset, ("friendly_name", "description", "labels") - ) - self.assertEqual(ds2.friendly_name, "Friendly") - self.assertEqual(ds2.description, "Description") - self.assertEqual(ds2.labels, {"priority": "high", "color": "blue"}) - - ds2.labels = { - "color": "green", # change - "shape": "circle", # add - "priority": None, # delete - } - ds3 = Config.CLIENT.update_dataset(ds2, ["labels"]) - self.assertEqual(ds3.labels, {"color": "green", "shape": "circle"}) - - # If we try to update using d2 again, it will fail because the - # previous update changed the ETag. - ds2.description = "no good" - with self.assertRaises(PreconditionFailed): - Config.CLIENT.update_dataset(ds2, ["description"]) - - def test_list_datasets(self): - datasets_to_create = [ - "new" + unique_resource_id(), - "newer" + unique_resource_id(), - "newest" + unique_resource_id(), - ] - for dataset_id in datasets_to_create: - self.temp_dataset(dataset_id) - - # Retrieve the datasets. - iterator = Config.CLIENT.list_datasets() - all_datasets = list(iterator) - self.assertIsNone(iterator.next_page_token) - created = [ - dataset - for dataset in all_datasets - if dataset.dataset_id in datasets_to_create - and dataset.project == Config.CLIENT.project - ] - self.assertEqual(len(created), len(datasets_to_create)) - - def test_list_datasets_w_project(self): - # Retrieve datasets from a different project. - iterator = Config.CLIENT.list_datasets(project="bigquery-public-data") - all_datasets = frozenset([dataset.dataset_id for dataset in iterator]) - self.assertIn("usa_names", all_datasets) - - def test_create_table(self): - dataset = self.temp_dataset(_make_dataset_id("create_table")) - table_id = "test_table" - table_arg = Table(dataset.table(table_id), schema=SCHEMA) - self.assertFalse(_table_exists(table_arg)) - - table = retry_403(Config.CLIENT.create_table)(table_arg) - self.to_delete.insert(0, table) - - self.assertTrue(_table_exists(table)) - self.assertEqual(table.table_id, table_id) - - def test_create_table_w_time_partitioning_w_clustering_fields(self): - from google.cloud.bigquery.table import TimePartitioning - from google.cloud.bigquery.table import TimePartitioningType - - dataset = self.temp_dataset(_make_dataset_id("create_table_tp_cf")) - table_id = "test_table" - table_arg = Table( - dataset.table(table_id), schema=TIME_PARTITIONING_CLUSTERING_FIELDS_SCHEMA - ) - self.assertFalse(_table_exists(table_arg)) - - table_arg.time_partitioning = TimePartitioning(field="transaction_time") - - table_arg.clustering_fields = ["user_email", "store_code"] - table = retry_403(Config.CLIENT.create_table)(table_arg) - self.to_delete.insert(0, table) - - self.assertTrue(_table_exists(table)) - self.assertEqual(table.table_id, table_id) - time_partitioning = table.time_partitioning - self.assertEqual(time_partitioning.type_, TimePartitioningType.DAY) - self.assertEqual(time_partitioning.field, "transaction_time") - self.assertEqual(table.clustering_fields, ["user_email", "store_code"]) - - def test_delete_dataset_with_string(self): - dataset_id = _make_dataset_id("delete_table_true") - dataset_ref = Config.CLIENT.dataset(dataset_id) - retry_403(Config.CLIENT.create_dataset)(Dataset(dataset_ref)) - self.assertTrue(_dataset_exists(dataset_ref)) - Config.CLIENT.delete_dataset(dataset_id) - self.assertFalse(_dataset_exists(dataset_ref)) - - def test_delete_dataset_delete_contents_true(self): - dataset_id = _make_dataset_id("delete_table_true") - dataset = retry_403(Config.CLIENT.create_dataset)( - Dataset(Config.CLIENT.dataset(dataset_id)) - ) - - table_id = "test_table" - table_arg = Table(dataset.table(table_id), schema=SCHEMA) - table = retry_403(Config.CLIENT.create_table)(table_arg) - Config.CLIENT.delete_dataset(dataset, delete_contents=True) - - self.assertFalse(_table_exists(table)) - - def test_delete_dataset_delete_contents_false(self): - from google.api_core import exceptions - - dataset = self.temp_dataset(_make_dataset_id("delete_table_false")) - table_id = "test_table" - table_arg = Table(dataset.table(table_id), schema=SCHEMA) - - retry_403(Config.CLIENT.create_table)(table_arg) - with self.assertRaises(exceptions.BadRequest): - Config.CLIENT.delete_dataset(dataset) - - def test_get_table_w_public_dataset(self): - public = "bigquery-public-data" - dataset_id = "samples" - table_id = "shakespeare" - table_ref = DatasetReference(public, dataset_id).table(table_id) - - # Get table with reference. - table = Config.CLIENT.get_table(table_ref) - self.assertEqual(table.table_id, table_id) - self.assertEqual(table.dataset_id, dataset_id) - self.assertEqual(table.project, public) - schema_names = [field.name for field in table.schema] - self.assertEqual(schema_names, ["word", "word_count", "corpus", "corpus_date"]) - - # Get table with string. - table = Config.CLIENT.get_table("{}.{}.{}".format(public, dataset_id, table_id)) - self.assertEqual(table.table_id, table_id) - self.assertEqual(table.dataset_id, dataset_id) - self.assertEqual(table.project, public) - - def test_list_partitions(self): - table_ref = DatasetReference( - "bigquery-public-data", "ethereum_blockchain" - ).table("blocks") - all_rows = Config.CLIENT.list_partitions(table_ref) - self.assertIn("20180801", all_rows) - self.assertGreater(len(all_rows), 1000) - - def test_list_tables(self): - dataset_id = _make_dataset_id("list_tables") - dataset = self.temp_dataset(dataset_id) - # Retrieve tables before any are created for the dataset. - iterator = Config.CLIENT.list_tables(dataset) - all_tables = list(iterator) - self.assertEqual(all_tables, []) - self.assertIsNone(iterator.next_page_token) - - # Insert some tables to be listed. - tables_to_create = [ - "new" + unique_resource_id(), - "newer" + unique_resource_id(), - "newest" + unique_resource_id(), - ] - for table_name in tables_to_create: - table = Table(dataset.table(table_name), schema=SCHEMA) - created_table = retry_403(Config.CLIENT.create_table)(table) - self.to_delete.insert(0, created_table) - - # Retrieve the tables. - iterator = Config.CLIENT.list_tables(dataset) - all_tables = list(iterator) - self.assertIsNone(iterator.next_page_token) - created = [ - table - for table in all_tables - if (table.table_id in tables_to_create and table.dataset_id == dataset_id) - ] - self.assertEqual(len(created), len(tables_to_create)) - - # List tables with a string ID. - iterator = Config.CLIENT.list_tables(dataset_id) - self.assertGreater(len(list(iterator)), 0) - - # List tables with a fully-qualified string ID. - iterator = Config.CLIENT.list_tables( - "{}.{}".format(Config.CLIENT.project, dataset_id) - ) - self.assertGreater(len(list(iterator)), 0) - - def test_update_table(self): - dataset = self.temp_dataset(_make_dataset_id("update_table")) - - TABLE_NAME = "test_table" - table_arg = Table(dataset.table(TABLE_NAME), schema=SCHEMA) - self.assertFalse(_table_exists(table_arg)) - table = retry_403(Config.CLIENT.create_table)(table_arg) - self.to_delete.insert(0, table) - self.assertTrue(_table_exists(table)) - self.assertIsNone(table.friendly_name) - self.assertIsNone(table.description) - self.assertEqual(table.labels, {}) - table.friendly_name = "Friendly" - table.description = "Description" - table.labels = {"priority": "high", "color": "blue"} - - table2 = Config.CLIENT.update_table( - table, ["friendly_name", "description", "labels"] - ) - - self.assertEqual(table2.friendly_name, "Friendly") - self.assertEqual(table2.description, "Description") - self.assertEqual(table2.labels, {"priority": "high", "color": "blue"}) - - table2.description = None - table2.labels = { - "color": "green", # change - "shape": "circle", # add - "priority": None, # delete - } - table3 = Config.CLIENT.update_table(table2, ["description", "labels"]) - self.assertIsNone(table3.description) - self.assertEqual(table3.labels, {"color": "green", "shape": "circle"}) - - # If we try to update using table2 again, it will fail because the - # previous update changed the ETag. - table2.description = "no good" - with self.assertRaises(PreconditionFailed): - Config.CLIENT.update_table(table2, ["description"]) - - def test_update_table_schema(self): - dataset = self.temp_dataset(_make_dataset_id("update_table")) - - TABLE_NAME = "test_table" - table_arg = Table(dataset.table(TABLE_NAME), schema=SCHEMA) - self.assertFalse(_table_exists(table_arg)) - table = retry_403(Config.CLIENT.create_table)(table_arg) - self.to_delete.insert(0, table) - self.assertTrue(_table_exists(table)) - voter = bigquery.SchemaField("voter", "BOOLEAN", mode="NULLABLE") - schema = table.schema - schema.append(voter) - table.schema = schema - - updated_table = Config.CLIENT.update_table(table, ["schema"]) - - self.assertEqual(len(updated_table.schema), len(schema)) - for found, expected in zip(updated_table.schema, schema): - self.assertEqual(found.name, expected.name) - self.assertEqual(found.field_type, expected.field_type) - self.assertEqual(found.mode, expected.mode) - - @staticmethod - def _fetch_single_page(table, selected_fields=None): - iterator = Config.CLIENT.list_rows(table, selected_fields=selected_fields) - page = six.next(iterator.pages) - return list(page) - - def _create_table_many_columns(self, rowcount): - # Generate a table of maximum width via CREATE TABLE AS SELECT. - # first column is named 'rowval', and has a value from 1..rowcount - # Subsequent column is named col_ and contains the value N*rowval, - # where N is between 1 and 9999 inclusive. - dsname = _make_dataset_id("wide_schema") - dataset = self.temp_dataset(dsname) - table_id = "many_columns" - table_ref = dataset.table(table_id) - self.to_delete.insert(0, table_ref) - colprojections = ",".join( - ["r * {} as col_{}".format(n, n) for n in range(1, 10000)] - ) - sql = """ - CREATE TABLE {}.{} - AS - SELECT - r as rowval, - {} - FROM - UNNEST(GENERATE_ARRAY(1,{},1)) as r - """.format( - dsname, table_id, colprojections, rowcount - ) - query_job = Config.CLIENT.query(sql) - query_job.result() - self.assertEqual(query_job.statement_type, "CREATE_TABLE_AS_SELECT") - self.assertEqual(query_job.ddl_operation_performed, "CREATE") - self.assertEqual(query_job.ddl_target_table, table_ref) - - return table_ref - - def test_query_many_columns(self): - # Test working with the widest schema BigQuery supports, 10k columns. - row_count = 2 - table_ref = self._create_table_many_columns(row_count) - rows = list( - Config.CLIENT.query( - "SELECT * FROM `{}.{}`".format(table_ref.dataset_id, table_ref.table_id) - ) - ) - - self.assertEqual(len(rows), row_count) - - # check field representations adhere to expected values. - correctwidth = 0 - badvals = 0 - for r in rows: - vals = r._xxx_values - rowval = vals[0] - if len(vals) == 10000: - correctwidth = correctwidth + 1 - for n in range(1, 10000): - if vals[n] != rowval * (n): - badvals = badvals + 1 - self.assertEqual(correctwidth, row_count) - self.assertEqual(badvals, 0) - - def test_insert_rows_then_dump_table(self): - NOW_SECONDS = 1448911495.484366 - NOW = datetime.datetime.utcfromtimestamp(NOW_SECONDS).replace(tzinfo=UTC) - ROWS = [ - ("Phred Phlyntstone", 32, NOW), - ("Bharney Rhubble", 33, NOW + datetime.timedelta(seconds=10)), - ("Wylma Phlyntstone", 29, NOW + datetime.timedelta(seconds=20)), - ("Bhettye Rhubble", 27, None), - ] - ROW_IDS = range(len(ROWS)) - - dataset = self.temp_dataset(_make_dataset_id("insert_rows_then_dump")) - TABLE_ID = "test_table" - schema = [ - bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"), - bigquery.SchemaField("age", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("now", "TIMESTAMP"), - ] - table_arg = Table(dataset.table(TABLE_ID), schema=schema) - self.assertFalse(_table_exists(table_arg)) - table = retry_403(Config.CLIENT.create_table)(table_arg) - self.to_delete.insert(0, table) - self.assertTrue(_table_exists(table)) - - errors = Config.CLIENT.insert_rows(table, ROWS, row_ids=ROW_IDS) - self.assertEqual(len(errors), 0) - - rows = () - - # Allow for "warm up" before rows visible. See - # https://cloud.google.com/bigquery/streaming-data-into-bigquery#dataavailability - # 8 tries -> 1 + 2 + 4 + 8 + 16 + 32 + 64 = 127 seconds - retry = RetryResult(_has_rows, max_tries=8) - rows = retry(self._fetch_single_page)(table) - row_tuples = [r.values() for r in rows] - by_age = operator.itemgetter(1) - self.assertEqual(sorted(row_tuples, key=by_age), sorted(ROWS, key=by_age)) - - def test_load_table_from_local_avro_file_then_dump_table(self): - from google.cloud.bigquery.job import SourceFormat - from google.cloud.bigquery.job import WriteDisposition - - TABLE_NAME = "test_table_avro" - ROWS = [ - ("violet", 400), - ("indigo", 445), - ("blue", 475), - ("green", 510), - ("yellow", 570), - ("orange", 590), - ("red", 650), - ] - - dataset = self.temp_dataset(_make_dataset_id("load_local_then_dump")) - table_ref = dataset.table(TABLE_NAME) - table = Table(table_ref) - self.to_delete.insert(0, table) - - with open(os.path.join(WHERE, "data", "colors.avro"), "rb") as avrof: - config = bigquery.LoadJobConfig() - config.source_format = SourceFormat.AVRO - config.write_disposition = WriteDisposition.WRITE_TRUNCATE - job = Config.CLIENT.load_table_from_file( - avrof, table_ref, job_config=config - ) - # Retry until done. - job.result(timeout=JOB_TIMEOUT) - - self.assertEqual(job.output_rows, len(ROWS)) - - table = Config.CLIENT.get_table(table) - rows = self._fetch_single_page(table) - row_tuples = [r.values() for r in rows] - by_wavelength = operator.itemgetter(1) - self.assertEqual( - sorted(row_tuples, key=by_wavelength), sorted(ROWS, key=by_wavelength) - ) - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - def test_load_table_from_dataframe_w_automatic_schema(self): - """Test that a DataFrame with dtypes that map well to BigQuery types - can be uploaded without specifying a schema. - - https://github.com/googleapis/google-cloud-python/issues/9044 - """ - df_data = collections.OrderedDict( - [ - ("bool_col", pandas.Series([True, False, True], dtype="bool")), - ( - "ts_col", - pandas.Series( - [ - datetime.datetime(2010, 1, 2, 3, 44, 50), - datetime.datetime(2011, 2, 3, 14, 50, 59), - datetime.datetime(2012, 3, 14, 15, 16), - ], - dtype="datetime64[ns]", - ).dt.tz_localize(pytz.utc), - ), - ( - "dt_col", - pandas.Series( - [ - datetime.datetime(2010, 1, 2, 3, 44, 50), - datetime.datetime(2011, 2, 3, 14, 50, 59), - datetime.datetime(2012, 3, 14, 15, 16), - ], - dtype="datetime64[ns]", - ), - ), - ("float32_col", pandas.Series([1.0, 2.0, 3.0], dtype="float32")), - ("float64_col", pandas.Series([4.0, 5.0, 6.0], dtype="float64")), - ("int8_col", pandas.Series([-12, -11, -10], dtype="int8")), - ("int16_col", pandas.Series([-9, -8, -7], dtype="int16")), - ("int32_col", pandas.Series([-6, -5, -4], dtype="int32")), - ("int64_col", pandas.Series([-3, -2, -1], dtype="int64")), - ("uint8_col", pandas.Series([0, 1, 2], dtype="uint8")), - ("uint16_col", pandas.Series([3, 4, 5], dtype="uint16")), - ("uint32_col", pandas.Series([6, 7, 8], dtype="uint32")), - ] - ) - dataframe = pandas.DataFrame(df_data, columns=df_data.keys()) - - dataset_id = _make_dataset_id("bq_load_test") - self.temp_dataset(dataset_id) - table_id = "{}.{}.load_table_from_dataframe_w_automatic_schema".format( - Config.CLIENT.project, dataset_id - ) - - load_job = Config.CLIENT.load_table_from_dataframe(dataframe, table_id) - load_job.result() - - table = Config.CLIENT.get_table(table_id) - self.assertEqual( - tuple(table.schema), - ( - bigquery.SchemaField("bool_col", "BOOLEAN"), - bigquery.SchemaField("ts_col", "TIMESTAMP"), - # BigQuery does not support uploading DATETIME values from - # Parquet files. See: - # https://github.com/googleapis/google-cloud-python/issues/9996 - bigquery.SchemaField("dt_col", "TIMESTAMP"), - bigquery.SchemaField("float32_col", "FLOAT"), - bigquery.SchemaField("float64_col", "FLOAT"), - bigquery.SchemaField("int8_col", "INTEGER"), - bigquery.SchemaField("int16_col", "INTEGER"), - bigquery.SchemaField("int32_col", "INTEGER"), - bigquery.SchemaField("int64_col", "INTEGER"), - bigquery.SchemaField("uint8_col", "INTEGER"), - bigquery.SchemaField("uint16_col", "INTEGER"), - bigquery.SchemaField("uint32_col", "INTEGER"), - ), - ) - self.assertEqual(table.num_rows, 3) - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - def test_load_table_from_dataframe_w_nulls(self): - """Test that a DataFrame with null columns can be uploaded if a - BigQuery schema is specified. - - See: https://github.com/googleapis/google-cloud-python/issues/7370 - """ - # Schema with all scalar types. - scalars_schema = ( - bigquery.SchemaField("bool_col", "BOOLEAN"), - bigquery.SchemaField("bytes_col", "BYTES"), - bigquery.SchemaField("date_col", "DATE"), - bigquery.SchemaField("dt_col", "DATETIME"), - bigquery.SchemaField("float_col", "FLOAT"), - bigquery.SchemaField("geo_col", "GEOGRAPHY"), - bigquery.SchemaField("int_col", "INTEGER"), - bigquery.SchemaField("num_col", "NUMERIC"), - bigquery.SchemaField("str_col", "STRING"), - bigquery.SchemaField("time_col", "TIME"), - bigquery.SchemaField("ts_col", "TIMESTAMP"), - ) - table_schema = scalars_schema + ( - # TODO: Array columns can't be read due to NULLABLE versus REPEATED - # mode mismatch. See: - # https://issuetracker.google.com/133415569#comment3 - # bigquery.SchemaField("array_col", "INTEGER", mode="REPEATED"), - # TODO: Support writing StructArrays to Parquet. See: - # https://jira.apache.org/jira/browse/ARROW-2587 - # bigquery.SchemaField("struct_col", "RECORD", fields=scalars_schema), - ) - num_rows = 100 - nulls = [None] * num_rows - df_data = collections.OrderedDict( - [ - ("bool_col", nulls), - ("bytes_col", nulls), - ("date_col", nulls), - ("dt_col", nulls), - ("float_col", nulls), - ("geo_col", nulls), - ("int_col", nulls), - ("num_col", nulls), - ("str_col", nulls), - ("time_col", nulls), - ("ts_col", nulls), - ] - ) - dataframe = pandas.DataFrame(df_data, columns=df_data.keys()) - - dataset_id = _make_dataset_id("bq_load_test") - self.temp_dataset(dataset_id) - table_id = "{}.{}.load_table_from_dataframe_w_nulls".format( - Config.CLIENT.project, dataset_id - ) - - # Create the table before loading so that schema mismatch errors are - # identified. - table = retry_403(Config.CLIENT.create_table)( - Table(table_id, schema=table_schema) - ) - self.to_delete.insert(0, table) - - job_config = bigquery.LoadJobConfig(schema=table_schema) - load_job = Config.CLIENT.load_table_from_dataframe( - dataframe, table_id, job_config=job_config - ) - load_job.result() - - table = Config.CLIENT.get_table(table) - self.assertEqual(tuple(table.schema), table_schema) - self.assertEqual(table.num_rows, num_rows) - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - def test_load_table_from_dataframe_w_required(self): - """Test that a DataFrame with required columns can be uploaded if a - BigQuery schema is specified. - - See: https://github.com/googleapis/google-cloud-python/issues/8093 - """ - table_schema = ( - bigquery.SchemaField("name", "STRING", mode="REQUIRED"), - bigquery.SchemaField("age", "INTEGER", mode="REQUIRED"), - ) - - records = [{"name": "Chip", "age": 2}, {"name": "Dale", "age": 3}] - dataframe = pandas.DataFrame(records, columns=["name", "age"]) - job_config = bigquery.LoadJobConfig(schema=table_schema) - dataset_id = _make_dataset_id("bq_load_test") - self.temp_dataset(dataset_id) - table_id = "{}.{}.load_table_from_dataframe_w_required".format( - Config.CLIENT.project, dataset_id - ) - - # Create the table before loading so that schema mismatch errors are - # identified. - table = retry_403(Config.CLIENT.create_table)( - Table(table_id, schema=table_schema) - ) - self.to_delete.insert(0, table) - - job_config = bigquery.LoadJobConfig(schema=table_schema) - load_job = Config.CLIENT.load_table_from_dataframe( - dataframe, table_id, job_config=job_config - ) - load_job.result() - - table = Config.CLIENT.get_table(table) - self.assertEqual(tuple(table.schema), table_schema) - self.assertEqual(table.num_rows, 2) - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - def test_load_table_from_dataframe_w_explicit_schema(self): - # Schema with all scalar types. - scalars_schema = ( - bigquery.SchemaField("bool_col", "BOOLEAN"), - bigquery.SchemaField("bytes_col", "BYTES"), - bigquery.SchemaField("date_col", "DATE"), - bigquery.SchemaField("dt_col", "DATETIME"), - bigquery.SchemaField("float_col", "FLOAT"), - bigquery.SchemaField("geo_col", "GEOGRAPHY"), - bigquery.SchemaField("int_col", "INTEGER"), - bigquery.SchemaField("num_col", "NUMERIC"), - bigquery.SchemaField("str_col", "STRING"), - bigquery.SchemaField("time_col", "TIME"), - bigquery.SchemaField("ts_col", "TIMESTAMP"), - ) - table_schema = scalars_schema + ( - # TODO: Array columns can't be read due to NULLABLE versus REPEATED - # mode mismatch. See: - # https://issuetracker.google.com/133415569#comment3 - # bigquery.SchemaField("array_col", "INTEGER", mode="REPEATED"), - # TODO: Support writing StructArrays to Parquet. See: - # https://jira.apache.org/jira/browse/ARROW-2587 - # bigquery.SchemaField("struct_col", "RECORD", fields=scalars_schema), - ) - df_data = collections.OrderedDict( - [ - ("bool_col", [True, None, False]), - ("bytes_col", [b"abc", None, b"def"]), - ( - "date_col", - [datetime.date(1, 1, 1), None, datetime.date(9999, 12, 31)], - ), - ( - "dt_col", - [ - datetime.datetime(1, 1, 1, 0, 0, 0), - None, - datetime.datetime(9999, 12, 31, 23, 59, 59, 999999), - ], - ), - ("float_col", [float("-inf"), float("nan"), float("inf")]), - ( - "geo_col", - [ - "POINT(30 10)", - None, - "POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))", - ], - ), - ("int_col", [-9223372036854775808, None, 9223372036854775807]), - ( - "num_col", - [ - decimal.Decimal("-99999999999999999999999999999.999999999"), - None, - decimal.Decimal("99999999999999999999999999999.999999999"), - ], - ), - ("str_col", [u"abc", None, u"def"]), - ( - "time_col", - [datetime.time(0, 0, 0), None, datetime.time(23, 59, 59, 999999)], - ), - ( - "ts_col", - [ - datetime.datetime(1, 1, 1, 0, 0, 0, tzinfo=pytz.utc), - None, - datetime.datetime( - 9999, 12, 31, 23, 59, 59, 999999, tzinfo=pytz.utc - ), - ], - ), - ] - ) - dataframe = pandas.DataFrame(df_data, dtype="object", columns=df_data.keys()) - - dataset_id = _make_dataset_id("bq_load_test") - self.temp_dataset(dataset_id) - table_id = "{}.{}.load_table_from_dataframe_w_explicit_schema".format( - Config.CLIENT.project, dataset_id - ) - - job_config = bigquery.LoadJobConfig(schema=table_schema) - load_job = Config.CLIENT.load_table_from_dataframe( - dataframe, table_id, job_config=job_config - ) - load_job.result() - - table = Config.CLIENT.get_table(table_id) - self.assertEqual(tuple(table.schema), table_schema) - self.assertEqual(table.num_rows, 3) - - def test_load_table_from_json_basic_use(self): - table_schema = ( - bigquery.SchemaField("name", "STRING", mode="REQUIRED"), - bigquery.SchemaField("age", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("birthday", "DATE", mode="REQUIRED"), - bigquery.SchemaField("is_awesome", "BOOLEAN", mode="REQUIRED"), - ) - - json_rows = [ - {"name": "John", "age": 18, "birthday": "2001-10-15", "is_awesome": False}, - {"name": "Chuck", "age": 79, "birthday": "1940-03-10", "is_awesome": True}, - ] - - dataset_id = _make_dataset_id("bq_system_test") - self.temp_dataset(dataset_id) - table_id = "{}.{}.load_table_from_json_basic_use".format( - Config.CLIENT.project, dataset_id - ) - - # Create the table before loading so that schema mismatch errors are - # identified. - table = retry_403(Config.CLIENT.create_table)( - Table(table_id, schema=table_schema) - ) - self.to_delete.insert(0, table) - - job_config = bigquery.LoadJobConfig(schema=table_schema) - load_job = Config.CLIENT.load_table_from_json( - json_rows, table_id, job_config=job_config - ) - load_job.result() - - table = Config.CLIENT.get_table(table) - self.assertEqual(tuple(table.schema), table_schema) - self.assertEqual(table.num_rows, 2) - - def test_load_table_from_json_schema_autodetect(self): - json_rows = [ - {"name": "John", "age": 18, "birthday": "2001-10-15", "is_awesome": False}, - {"name": "Chuck", "age": 79, "birthday": "1940-03-10", "is_awesome": True}, - ] - - dataset_id = _make_dataset_id("bq_system_test") - self.temp_dataset(dataset_id) - table_id = "{}.{}.load_table_from_json_basic_use".format( - Config.CLIENT.project, dataset_id - ) - - # Use schema with NULLABLE fields, because schema autodetection - # defaults to field mode NULLABLE. - table_schema = ( - bigquery.SchemaField("name", "STRING", mode="NULLABLE"), - bigquery.SchemaField("age", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("birthday", "DATE", mode="NULLABLE"), - bigquery.SchemaField("is_awesome", "BOOLEAN", mode="NULLABLE"), - ) - # create the table before loading so that the column order is predictable - table = retry_403(Config.CLIENT.create_table)( - Table(table_id, schema=table_schema) - ) - self.to_delete.insert(0, table) - - # do not pass an explicit job config to trigger automatic schema detection - load_job = Config.CLIENT.load_table_from_json(json_rows, table_id) - load_job.result() - - table = Config.CLIENT.get_table(table) - self.assertEqual(tuple(table.schema), table_schema) - self.assertEqual(table.num_rows, 2) - - def test_load_avro_from_uri_then_dump_table(self): - from google.cloud.bigquery.job import CreateDisposition - from google.cloud.bigquery.job import SourceFormat - from google.cloud.bigquery.job import WriteDisposition - - table_name = "test_table" - rows = [ - ("violet", 400), - ("indigo", 445), - ("blue", 475), - ("green", 510), - ("yellow", 570), - ("orange", 590), - ("red", 650), - ] - with open(os.path.join(WHERE, "data", "colors.avro"), "rb") as f: - GS_URL = self._write_avro_to_storage( - "bq_load_test" + unique_resource_id(), "colors.avro", f - ) - - dataset = self.temp_dataset(_make_dataset_id("bq_load_test")) - table_arg = dataset.table(table_name) - table = retry_403(Config.CLIENT.create_table)(Table(table_arg)) - self.to_delete.insert(0, table) - - config = bigquery.LoadJobConfig() - config.create_disposition = CreateDisposition.CREATE_NEVER - config.source_format = SourceFormat.AVRO - config.write_disposition = WriteDisposition.WRITE_EMPTY - job = Config.CLIENT.load_table_from_uri(GS_URL, table_arg, job_config=config) - job.result(timeout=JOB_TIMEOUT) - self.assertEqual(job.output_rows, len(rows)) - - table = Config.CLIENT.get_table(table) - fetched = self._fetch_single_page(table) - row_tuples = [r.values() for r in fetched] - self.assertEqual( - sorted(row_tuples, key=lambda x: x[1]), sorted(rows, key=lambda x: x[1]) - ) - - def test_load_table_from_uri_then_dump_table(self): - from google.cloud.bigquery.job import CreateDisposition - from google.cloud.bigquery.job import SourceFormat - from google.cloud.bigquery.job import WriteDisposition - - TABLE_ID = "test_table" - GS_URL = self._write_csv_to_storage( - "bq_load_test" + unique_resource_id(), "person_ages.csv", HEADER_ROW, ROWS - ) - - dataset = self.temp_dataset(_make_dataset_id("load_gcs_then_dump")) - - table_arg = Table(dataset.table(TABLE_ID), schema=SCHEMA) - table = retry_403(Config.CLIENT.create_table)(table_arg) - self.to_delete.insert(0, table) - - config = bigquery.LoadJobConfig() - config.create_disposition = CreateDisposition.CREATE_NEVER - config.skip_leading_rows = 1 - config.source_format = SourceFormat.CSV - config.write_disposition = WriteDisposition.WRITE_EMPTY - job = Config.CLIENT.load_table_from_uri( - GS_URL, dataset.table(TABLE_ID), job_config=config - ) - - # Allow for 90 seconds of "warm up" before rows visible. See - # https://cloud.google.com/bigquery/streaming-data-into-bigquery#dataavailability - # 8 tries -> 1 + 2 + 4 + 8 + 16 + 32 + 64 = 127 seconds - retry = RetryInstanceState(_job_done, max_tries=8) - retry(job.reload)() - - rows = self._fetch_single_page(table) - row_tuples = [r.values() for r in rows] - by_age = operator.itemgetter(1) - self.assertEqual(sorted(row_tuples, key=by_age), sorted(ROWS, key=by_age)) - - def test_load_table_from_file_w_explicit_location(self): - # Create a temporary bucket for extract files. - bucket_name = "bq_load_table_eu_extract_test" + unique_resource_id() - self._create_bucket(bucket_name, location="eu") - - # Create a temporary dataset & table in the EU. - table_bytes = six.BytesIO(b"a,3\nb,2\nc,1\n") - client = Config.CLIENT - dataset = self.temp_dataset(_make_dataset_id("eu_load_file"), location="EU") - table_ref = dataset.table("letters") - job_config = bigquery.LoadJobConfig() - job_config.skip_leading_rows = 0 - job_config.schema = [ - bigquery.SchemaField("letter", "STRING"), - bigquery.SchemaField("value", "INTEGER"), - ] - - # Load the file to an EU dataset with an EU load job. - load_job = client.load_table_from_file( - table_bytes, table_ref, location="EU", job_config=job_config - ) - load_job.result() - job_id = load_job.job_id - - # Can get the job from the EU. - load_job = client.get_job(job_id, location="EU") - self.assertEqual(job_id, load_job.job_id) - self.assertEqual("EU", load_job.location) - self.assertTrue(load_job.exists()) - - # Cannot get the job from the US. - with self.assertRaises(NotFound): - client.get_job(job_id, location="US") - - load_job_us = client.get_job(job_id) - load_job_us._properties["jobReference"]["location"] = "US" - self.assertFalse(load_job_us.exists()) - with self.assertRaises(NotFound): - load_job_us.reload() - - # Can cancel the job from the EU. - self.assertTrue(load_job.cancel()) - load_job = client.cancel_job(job_id, location="EU") - self.assertEqual(job_id, load_job.job_id) - self.assertEqual("EU", load_job.location) - - # Cannot cancel the job from the US. - with self.assertRaises(NotFound): - client.cancel_job(job_id, location="US") - with self.assertRaises(NotFound): - load_job_us.cancel() - - # Can list the table rows. - table = client.get_table(table_ref) - self.assertEqual(table.num_rows, 3) - rows = [(row.letter, row.value) for row in client.list_rows(table)] - self.assertEqual(list(sorted(rows)), [("a", 3), ("b", 2), ("c", 1)]) - - # Verify location behavior with queries - query_config = bigquery.QueryJobConfig() - query_config.dry_run = True - - query_string = "SELECT * FROM `{}.letters` LIMIT 1".format(dataset.dataset_id) - - eu_query = client.query(query_string, location="EU", job_config=query_config) - self.assertTrue(eu_query.done) - - # Cannot query from US. - with self.assertRaises(GoogleAPICallError): - list(client.query(query_string, location="US", job_config=query_config)) - - # Cannot copy from US. - with self.assertRaises(GoogleAPICallError): - client.copy_table( - table_ref, dataset.table("letters2_us"), location="US" - ).result() - - # Cannot extract from US. - with self.assertRaises(GoogleAPICallError): - client.extract_table( - table_ref, "gs://{}/letters-us.csv".format(bucket_name), location="US" - ).result() - - def _write_csv_to_storage(self, bucket_name, blob_name, header_row, data_rows): - from google.cloud._testing import _NamedTemporaryFile - - bucket = self._create_bucket(bucket_name) - blob = bucket.blob(blob_name) - - with _NamedTemporaryFile() as temp: - with open(temp.name, "w") as csv_write: - writer = csv.writer(csv_write) - writer.writerow(header_row) - writer.writerows(data_rows) - - with open(temp.name, "rb") as csv_read: - retry_storage_errors(blob.upload_from_file)( - csv_read, content_type="text/csv" - ) - - self.to_delete.insert(0, blob) - return "gs://{}/{}".format(bucket_name, blob_name) - - def _write_avro_to_storage(self, bucket_name, blob_name, avro_file): - bucket = self._create_bucket(bucket_name) - blob = bucket.blob(blob_name) - retry_storage_errors(blob.upload_from_file)( - avro_file, content_type="application/x-avro-binary" - ) - self.to_delete.insert(0, blob) - return "gs://{}/{}".format(bucket_name, blob_name) - - def _load_table_for_extract_table(self, bucket, blob_name, table, rows): - from google.cloud._testing import _NamedTemporaryFile - - blob = bucket.blob(blob_name) - with _NamedTemporaryFile() as temp: - with open(temp.name, "w") as csv_write: - writer = csv.writer(csv_write) - writer.writerow(HEADER_ROW) - writer.writerows(rows) - - with open(temp.name, "rb") as csv_read: - retry_storage_errors(blob.upload_from_file)( - csv_read, content_type="text/csv" - ) - - self.to_delete.insert(0, blob) - - dataset = self.temp_dataset(table.dataset_id) - table_ref = dataset.table(table.table_id) - config = bigquery.LoadJobConfig() - config.autodetect = True - gs_url = "gs://{}/{}".format(bucket.name, blob_name) - job = Config.CLIENT.load_table_from_uri(gs_url, table_ref, job_config=config) - # TODO(jba): do we need this retry now that we have job.result()? - # Allow for 90 seconds of "warm up" before rows visible. See - # https://cloud.google.com/bigquery/streaming-data-into-bigquery#dataavailability - # 8 tries -> 1 + 2 + 4 + 8 + 16 + 32 + 64 = 127 seconds - retry = RetryInstanceState(_job_done, max_tries=8) - retry(job.reload)() - - def test_extract_table(self): - local_id = unique_resource_id() - bucket_name = "bq_extract_test" + local_id - source_blob_name = "person_ages.csv" - dataset_id = _make_dataset_id("load_gcs_then_extract") - table_id = "test_table" - table_ref = Config.CLIENT.dataset(dataset_id).table(table_id) - table = Table(table_ref) - self.to_delete.insert(0, table) - bucket = self._create_bucket(bucket_name) - self._load_table_for_extract_table(bucket, source_blob_name, table_ref, ROWS) - destination_blob_name = "person_ages_out.csv" - destination = bucket.blob(destination_blob_name) - destination_uri = "gs://{}/person_ages_out.csv".format(bucket_name) - - job = Config.CLIENT.extract_table(table_ref, destination_uri) - job.result(timeout=100) - - self.to_delete.insert(0, destination) - got_bytes = retry_storage_errors(destination.download_as_string)() - got = got_bytes.decode("utf-8") - self.assertIn("Bharney Rhubble", got) - - def test_copy_table(self): - # If we create a new table to copy from, the test won't work - # because the new rows will be stored in the streaming buffer, - # and copy jobs don't read the streaming buffer. - # We could wait for the streaming buffer to empty, but that could - # take minutes. Instead we copy a small public table. - source_dataset = DatasetReference("bigquery-public-data", "samples") - source_ref = source_dataset.table("shakespeare") - dest_dataset = self.temp_dataset(_make_dataset_id("copy_table")) - dest_ref = dest_dataset.table("destination_table") - job_config = bigquery.CopyJobConfig() - job = Config.CLIENT.copy_table(source_ref, dest_ref, job_config=job_config) - job.result() - - dest_table = Config.CLIENT.get_table(dest_ref) - self.to_delete.insert(0, dest_table) - # Just check that we got some rows. - got_rows = self._fetch_single_page(dest_table) - self.assertTrue(len(got_rows) > 0) - - def test_job_cancel(self): - DATASET_ID = _make_dataset_id("job_cancel") - JOB_ID_PREFIX = "fetch_" + DATASET_ID - TABLE_NAME = "test_table" - QUERY = "SELECT * FROM %s.%s" % (DATASET_ID, TABLE_NAME) - - dataset = self.temp_dataset(DATASET_ID) - - table_arg = Table(dataset.table(TABLE_NAME), schema=SCHEMA) - table = retry_403(Config.CLIENT.create_table)(table_arg) - self.to_delete.insert(0, table) - - job = Config.CLIENT.query(QUERY, job_id_prefix=JOB_ID_PREFIX) - job.cancel() - - retry = RetryInstanceState(_job_done, max_tries=8) - retry(job.reload)() - - # The `cancel` API doesn't leave any reliable traces on - # the status of the job resource, so we can't really assert for - # them here. The best we can do is not that the API call didn't - # raise an error, and that the job completed (in the `retry()` - # above). - - def test_get_failed_job(self): - # issue 4246 - from google.api_core.exceptions import BadRequest - - JOB_ID = "invalid_{}".format(str(uuid.uuid4())) - QUERY = "SELECT TIMESTAMP_ADD(@ts_value, INTERVAL 1 HOUR);" - PARAM = bigquery.ScalarQueryParameter("ts_value", "TIMESTAMP", 1.4810976e9) - - job_config = bigquery.QueryJobConfig() - job_config.query_parameters = [PARAM] - - with self.assertRaises(BadRequest): - Config.CLIENT.query(QUERY, job_id=JOB_ID, job_config=job_config).result() - - job = Config.CLIENT.get_job(JOB_ID) - - with self.assertRaises(ValueError): - job.query_parameters - - def test_query_w_legacy_sql_types(self): - naive = datetime.datetime(2016, 12, 5, 12, 41, 9) - stamp = "%s %s" % (naive.date().isoformat(), naive.time().isoformat()) - zoned = naive.replace(tzinfo=UTC) - examples = [ - {"sql": "SELECT 1", "expected": 1}, - {"sql": "SELECT 1.3", "expected": 1.3}, - {"sql": "SELECT TRUE", "expected": True}, - {"sql": 'SELECT "ABC"', "expected": "ABC"}, - {"sql": 'SELECT CAST("foo" AS BYTES)', "expected": b"foo"}, - {"sql": 'SELECT CAST("%s" AS TIMESTAMP)' % (stamp,), "expected": zoned}, - ] - for example in examples: - job_config = bigquery.QueryJobConfig() - job_config.use_legacy_sql = True - rows = list(Config.CLIENT.query(example["sql"], job_config=job_config)) - self.assertEqual(len(rows), 1) - self.assertEqual(len(rows[0]), 1) - self.assertEqual(rows[0][0], example["expected"]) - - def _generate_standard_sql_types_examples(self): - naive = datetime.datetime(2016, 12, 5, 12, 41, 9) - naive_microseconds = datetime.datetime(2016, 12, 5, 12, 41, 9, 250000) - stamp = "%s %s" % (naive.date().isoformat(), naive.time().isoformat()) - stamp_microseconds = stamp + ".250000" - zoned = naive.replace(tzinfo=UTC) - zoned_microseconds = naive_microseconds.replace(tzinfo=UTC) - numeric = decimal.Decimal("123456789.123456789") - return [ - {"sql": "SELECT 1", "expected": 1}, - {"sql": "SELECT 1.3", "expected": 1.3}, - {"sql": "SELECT TRUE", "expected": True}, - {"sql": 'SELECT "ABC"', "expected": "ABC"}, - {"sql": 'SELECT CAST("foo" AS BYTES)', "expected": b"foo"}, - {"sql": 'SELECT TIMESTAMP "%s"' % (stamp,), "expected": zoned}, - { - "sql": 'SELECT TIMESTAMP "%s"' % (stamp_microseconds,), - "expected": zoned_microseconds, - }, - {"sql": 'SELECT DATETIME(TIMESTAMP "%s")' % (stamp,), "expected": naive}, - { - "sql": 'SELECT DATETIME(TIMESTAMP "%s")' % (stamp_microseconds,), - "expected": naive_microseconds, - }, - {"sql": 'SELECT DATE(TIMESTAMP "%s")' % (stamp,), "expected": naive.date()}, - {"sql": 'SELECT TIME(TIMESTAMP "%s")' % (stamp,), "expected": naive.time()}, - {"sql": 'SELECT NUMERIC "%s"' % (numeric,), "expected": numeric}, - {"sql": "SELECT (1, 2)", "expected": {"_field_1": 1, "_field_2": 2}}, - { - "sql": "SELECT ((1, 2), (3, 4), 5)", - "expected": { - "_field_1": {"_field_1": 1, "_field_2": 2}, - "_field_2": {"_field_1": 3, "_field_2": 4}, - "_field_3": 5, - }, - }, - {"sql": "SELECT [1, 2, 3]", "expected": [1, 2, 3]}, - { - "sql": "SELECT ([1, 2], 3, [4, 5])", - "expected": {"_field_1": [1, 2], "_field_2": 3, "_field_3": [4, 5]}, - }, - { - "sql": "SELECT [(1, 2, 3), (4, 5, 6)]", - "expected": [ - {"_field_1": 1, "_field_2": 2, "_field_3": 3}, - {"_field_1": 4, "_field_2": 5, "_field_3": 6}, - ], - }, - { - "sql": "SELECT [([1, 2, 3], 4), ([5, 6], 7)]", - "expected": [ - {u"_field_1": [1, 2, 3], u"_field_2": 4}, - {u"_field_1": [5, 6], u"_field_2": 7}, - ], - }, - { - "sql": "SELECT ARRAY(SELECT STRUCT([1, 2]))", - "expected": [{u"_field_1": [1, 2]}], - }, - {"sql": "SELECT ST_GeogPoint(1, 2)", "expected": "POINT(1 2)"}, - ] - - def test_query_w_standard_sql_types(self): - examples = self._generate_standard_sql_types_examples() - for example in examples: - rows = list(Config.CLIENT.query(example["sql"])) - self.assertEqual(len(rows), 1) - self.assertEqual(len(rows[0]), 1) - self.assertEqual(rows[0][0], example["expected"]) - - def test_query_w_failed_query(self): - from google.api_core.exceptions import BadRequest - - with self.assertRaises(BadRequest): - Config.CLIENT.query("invalid syntax;").result() - - def test_query_w_wrong_config(self): - from google.cloud.bigquery.job import LoadJobConfig - - good_query = "SELECT 1;" - rows = list(Config.CLIENT.query("SELECT 1;").result()) - assert rows[0][0] == 1 - - bad_config = LoadJobConfig() - bad_config.destination = Config.CLIENT.dataset("dset").table("tbl") - with self.assertRaises(Exception): - Config.CLIENT.query(good_query, job_config=bad_config).result() - - def test_query_w_timeout(self): - query_job = Config.CLIENT.query( - "SELECT * FROM `bigquery-public-data.github_repos.commits`;", - job_id_prefix="test_query_w_timeout_", - ) - - with self.assertRaises(concurrent.futures.TimeoutError): - # 1 second is much too short for this query. - query_job.result(timeout=1) - - def test_query_w_page_size(self): - page_size = 45 - query_job = Config.CLIENT.query( - "SELECT word FROM `bigquery-public-data.samples.shakespeare`;", - job_id_prefix="test_query_w_page_size_", - ) - iterator = query_job.result(page_size=page_size) - self.assertEqual(next(iterator.pages).num_items, page_size) - - def test_query_statistics(self): - """ - A system test to exercise some of the extended query statistics. - - Note: We construct a query that should need at least three stages by - specifying a JOIN query. Exact plan and stats are effectively - non-deterministic, so we're largely interested in confirming values - are present. - """ - - job_config = bigquery.QueryJobConfig() - job_config.use_query_cache = False - - query_job = Config.CLIENT.query( - """ - SELECT - COUNT(1) - FROM - ( - SELECT - year, - wban_number - FROM `bigquery-public-data.samples.gsod` - LIMIT 1000 - ) lside - INNER JOIN - ( - SELECT - year, - state - FROM `bigquery-public-data.samples.natality` - LIMIT 1000 - ) rside - ON - lside.year = rside.year - """, - location="US", - job_config=job_config, - ) - - # run the job to completion - query_job.result() - - # Assert top-level stats - self.assertFalse(query_job.cache_hit) - self.assertIsNotNone(query_job.destination) - self.assertTrue(query_job.done) - self.assertFalse(query_job.dry_run) - self.assertIsNone(query_job.num_dml_affected_rows) - self.assertEqual(query_job.priority, "INTERACTIVE") - self.assertGreater(query_job.total_bytes_billed, 1) - self.assertGreater(query_job.total_bytes_processed, 1) - self.assertEqual(query_job.statement_type, "SELECT") - self.assertGreater(query_job.slot_millis, 1) - - # Make assertions on the shape of the query plan. - plan = query_job.query_plan - self.assertGreaterEqual(len(plan), 3) - first_stage = plan[0] - self.assertIsNotNone(first_stage.start) - self.assertIsNotNone(first_stage.end) - self.assertIsNotNone(first_stage.entry_id) - self.assertIsNotNone(first_stage.name) - self.assertGreater(first_stage.parallel_inputs, 0) - self.assertGreater(first_stage.completed_parallel_inputs, 0) - self.assertGreater(first_stage.shuffle_output_bytes, 0) - self.assertEqual(first_stage.status, "COMPLETE") - - # Query plan is a digraph. Ensure it has inter-stage links, - # but not every stage has inputs. - stages_with_inputs = 0 - for entry in plan: - if len(entry.input_stages) > 0: - stages_with_inputs = stages_with_inputs + 1 - self.assertGreater(stages_with_inputs, 0) - self.assertGreater(len(plan), stages_with_inputs) - - def test_dbapi_w_standard_sql_types(self): - examples = self._generate_standard_sql_types_examples() - for example in examples: - Config.CURSOR.execute(example["sql"]) - self.assertEqual(Config.CURSOR.rowcount, 1) - row = Config.CURSOR.fetchone() - self.assertEqual(len(row), 1) - self.assertEqual(row[0], example["expected"]) - row = Config.CURSOR.fetchone() - self.assertIsNone(row) - - def test_dbapi_fetchall(self): - query = "SELECT * FROM UNNEST([(1, 2), (3, 4), (5, 6)])" - - for arraysize in range(1, 5): - Config.CURSOR.execute(query) - self.assertEqual(Config.CURSOR.rowcount, 3, "expected 3 rows") - Config.CURSOR.arraysize = arraysize - rows = Config.CURSOR.fetchall() - row_tuples = [r.values() for r in rows] - self.assertEqual(row_tuples, [(1, 2), (3, 4), (5, 6)]) - - def _load_table_for_dml(self, rows, dataset_id, table_id): - from google.cloud._testing import _NamedTemporaryFile - from google.cloud.bigquery.job import CreateDisposition - from google.cloud.bigquery.job import SourceFormat - from google.cloud.bigquery.job import WriteDisposition - - dataset = self.temp_dataset(dataset_id) - greeting = bigquery.SchemaField("greeting", "STRING", mode="NULLABLE") - table_ref = dataset.table(table_id) - table_arg = Table(table_ref, schema=[greeting]) - table = retry_403(Config.CLIENT.create_table)(table_arg) - self.to_delete.insert(0, table) - - with _NamedTemporaryFile() as temp: - with open(temp.name, "w") as csv_write: - writer = csv.writer(csv_write) - writer.writerow(("Greeting",)) - writer.writerows(rows) - - with open(temp.name, "rb") as csv_read: - config = bigquery.LoadJobConfig() - config.source_format = SourceFormat.CSV - config.skip_leading_rows = 1 - config.create_disposition = CreateDisposition.CREATE_NEVER - config.write_disposition = WriteDisposition.WRITE_EMPTY - job = Config.CLIENT.load_table_from_file( - csv_read, table_ref, job_config=config - ) - - # Retry until done. - job.result(timeout=JOB_TIMEOUT) - self._fetch_single_page(table) - - def test_query_w_dml(self): - dataset_name = _make_dataset_id("dml_query") - table_name = "test_table" - self._load_table_for_dml([("Hello World",)], dataset_name, table_name) - query_template = """UPDATE {}.{} - SET greeting = 'Guten Tag' - WHERE greeting = 'Hello World' - """ - - query_job = Config.CLIENT.query( - query_template.format(dataset_name, table_name), - job_id_prefix="test_query_w_dml_", - ) - query_job.result() - - self.assertEqual(query_job.num_dml_affected_rows, 1) - - def test_dbapi_w_dml(self): - dataset_name = _make_dataset_id("dml_dbapi") - table_name = "test_table" - self._load_table_for_dml([("Hello World",)], dataset_name, table_name) - query_template = """UPDATE {}.{} - SET greeting = 'Guten Tag' - WHERE greeting = 'Hello World' - """ - - Config.CURSOR.execute( - query_template.format(dataset_name, table_name), - job_id="test_dbapi_w_dml_{}".format(str(uuid.uuid4())), - ) - self.assertEqual(Config.CURSOR.rowcount, 1) - self.assertIsNone(Config.CURSOR.fetchone()) - - def test_query_w_query_params(self): - from google.cloud.bigquery.job import QueryJobConfig - from google.cloud.bigquery.query import ArrayQueryParameter - from google.cloud.bigquery.query import ScalarQueryParameter - from google.cloud.bigquery.query import StructQueryParameter - - question = "What is the answer to life, the universe, and everything?" - question_param = ScalarQueryParameter( - name="question", type_="STRING", value=question - ) - answer = 42 - answer_param = ScalarQueryParameter(name="answer", type_="INT64", value=answer) - pi = 3.1415926 - pi_param = ScalarQueryParameter(name="pi", type_="FLOAT64", value=pi) - pi_numeric = decimal.Decimal("3.141592654") - pi_numeric_param = ScalarQueryParameter( - name="pi_numeric_param", type_="NUMERIC", value=pi_numeric - ) - truthy = True - truthy_param = ScalarQueryParameter(name="truthy", type_="BOOL", value=truthy) - beef = b"DEADBEEF" - beef_param = ScalarQueryParameter(name="beef", type_="BYTES", value=beef) - naive = datetime.datetime(2016, 12, 5, 12, 41, 9) - naive_param = ScalarQueryParameter(name="naive", type_="DATETIME", value=naive) - naive_date_param = ScalarQueryParameter( - name="naive_date", type_="DATE", value=naive.date() - ) - naive_time_param = ScalarQueryParameter( - name="naive_time", type_="TIME", value=naive.time() - ) - zoned = naive.replace(tzinfo=UTC) - zoned_param = ScalarQueryParameter(name="zoned", type_="TIMESTAMP", value=zoned) - array_param = ArrayQueryParameter( - name="array_param", array_type="INT64", values=[1, 2] - ) - struct_param = StructQueryParameter("hitchhiker", question_param, answer_param) - phred_name = "Phred Phlyntstone" - phred_name_param = ScalarQueryParameter( - name="name", type_="STRING", value=phred_name - ) - phred_age = 32 - phred_age_param = ScalarQueryParameter( - name="age", type_="INT64", value=phred_age - ) - phred_param = StructQueryParameter(None, phred_name_param, phred_age_param) - bharney_name = "Bharney Rhubbyl" - bharney_name_param = ScalarQueryParameter( - name="name", type_="STRING", value=bharney_name - ) - bharney_age = 31 - bharney_age_param = ScalarQueryParameter( - name="age", type_="INT64", value=bharney_age - ) - bharney_param = StructQueryParameter( - None, bharney_name_param, bharney_age_param - ) - characters_param = ArrayQueryParameter( - name=None, array_type="RECORD", values=[phred_param, bharney_param] - ) - hero_param = StructQueryParameter("hero", phred_name_param, phred_age_param) - sidekick_param = StructQueryParameter( - "sidekick", bharney_name_param, bharney_age_param - ) - roles_param = StructQueryParameter("roles", hero_param, sidekick_param) - friends_param = ArrayQueryParameter( - name="friends", array_type="STRING", values=[phred_name, bharney_name] - ) - with_friends_param = StructQueryParameter(None, friends_param) - top_left_param = StructQueryParameter( - "top_left", - ScalarQueryParameter("x", "INT64", 12), - ScalarQueryParameter("y", "INT64", 102), - ) - bottom_right_param = StructQueryParameter( - "bottom_right", - ScalarQueryParameter("x", "INT64", 22), - ScalarQueryParameter("y", "INT64", 92), - ) - rectangle_param = StructQueryParameter( - "rectangle", top_left_param, bottom_right_param - ) - examples = [ - { - "sql": "SELECT @question", - "expected": question, - "query_parameters": [question_param], - }, - { - "sql": "SELECT @answer", - "expected": answer, - "query_parameters": [answer_param], - }, - {"sql": "SELECT @pi", "expected": pi, "query_parameters": [pi_param]}, - { - "sql": "SELECT @pi_numeric_param", - "expected": pi_numeric, - "query_parameters": [pi_numeric_param], - }, - { - "sql": "SELECT @truthy", - "expected": truthy, - "query_parameters": [truthy_param], - }, - {"sql": "SELECT @beef", "expected": beef, "query_parameters": [beef_param]}, - { - "sql": "SELECT @naive", - "expected": naive, - "query_parameters": [naive_param], - }, - { - "sql": "SELECT @naive_date", - "expected": naive.date(), - "query_parameters": [naive_date_param], - }, - { - "sql": "SELECT @naive_time", - "expected": naive.time(), - "query_parameters": [naive_time_param], - }, - { - "sql": "SELECT @zoned", - "expected": zoned, - "query_parameters": [zoned_param], - }, - { - "sql": "SELECT @array_param", - "expected": [1, 2], - "query_parameters": [array_param], - }, - { - "sql": "SELECT (@hitchhiker.question, @hitchhiker.answer)", - "expected": ({"_field_1": question, "_field_2": answer}), - "query_parameters": [struct_param], - }, - { - "sql": "SELECT " - "((@rectangle.bottom_right.x - @rectangle.top_left.x) " - "* (@rectangle.top_left.y - @rectangle.bottom_right.y))", - "expected": 100, - "query_parameters": [rectangle_param], - }, - { - "sql": "SELECT ?", - "expected": [ - {"name": phred_name, "age": phred_age}, - {"name": bharney_name, "age": bharney_age}, - ], - "query_parameters": [characters_param], - }, - { - "sql": "SELECT @roles", - "expected": { - "hero": {"name": phred_name, "age": phred_age}, - "sidekick": {"name": bharney_name, "age": bharney_age}, - }, - "query_parameters": [roles_param], - }, - { - "sql": "SELECT ?", - "expected": {"friends": [phred_name, bharney_name]}, - "query_parameters": [with_friends_param], - }, - ] - for example in examples: - jconfig = QueryJobConfig() - jconfig.query_parameters = example["query_parameters"] - query_job = Config.CLIENT.query( - example["sql"], - job_config=jconfig, - job_id_prefix="test_query_w_query_params", - ) - rows = list(query_job.result()) - self.assertEqual(len(rows), 1) - self.assertEqual(len(rows[0]), 1) - self.assertEqual(rows[0][0], example["expected"]) - - def test_dbapi_w_query_parameters(self): - examples = [ - { - "sql": "SELECT %(boolval)s", - "expected": True, - "query_parameters": {"boolval": True}, - }, - { - "sql": 'SELECT %(a "very" weird `name`)s', - "expected": True, - "query_parameters": {'a "very" weird `name`': True}, - }, - { - "sql": "SELECT %(select)s", - "expected": True, - "query_parameters": {"select": True}, # this name is a keyword - }, - {"sql": "SELECT %s", "expected": False, "query_parameters": [False]}, - { - "sql": "SELECT %(intval)s", - "expected": 123, - "query_parameters": {"intval": 123}, - }, - { - "sql": "SELECT %s", - "expected": -123456789, - "query_parameters": [-123456789], - }, - { - "sql": "SELECT %(floatval)s", - "expected": 1.25, - "query_parameters": {"floatval": 1.25}, - }, - { - "sql": "SELECT LOWER(%(strval)s)", - "query_parameters": {"strval": "I Am A String"}, - "expected": "i am a string", - }, - { - "sql": "SELECT DATE_SUB(%(dateval)s, INTERVAL 1 DAY)", - "query_parameters": {"dateval": datetime.date(2017, 4, 2)}, - "expected": datetime.date(2017, 4, 1), - }, - { - "sql": "SELECT TIME_ADD(%(timeval)s, INTERVAL 4 SECOND)", - "query_parameters": {"timeval": datetime.time(12, 34, 56)}, - "expected": datetime.time(12, 35, 0), - }, - { - "sql": ("SELECT DATETIME_ADD(%(datetimeval)s, INTERVAL 53 SECOND)"), - "query_parameters": { - "datetimeval": datetime.datetime(2012, 3, 4, 5, 6, 7) - }, - "expected": datetime.datetime(2012, 3, 4, 5, 7, 0), - }, - { - "sql": "SELECT TIMESTAMP_TRUNC(%(zoned)s, MINUTE)", - "query_parameters": { - "zoned": datetime.datetime(2012, 3, 4, 5, 6, 7, tzinfo=UTC) - }, - "expected": datetime.datetime(2012, 3, 4, 5, 6, 0, tzinfo=UTC), - }, - { - "sql": "SELECT TIMESTAMP_TRUNC(%(zoned)s, MINUTE)", - "query_parameters": { - "zoned": datetime.datetime(2012, 3, 4, 5, 6, 7, 250000, tzinfo=UTC) - }, - "expected": datetime.datetime(2012, 3, 4, 5, 6, 0, tzinfo=UTC), - }, - ] - for example in examples: - msg = "sql: {} query_parameters: {}".format( - example["sql"], example["query_parameters"] - ) - - Config.CURSOR.execute(example["sql"], example["query_parameters"]) - - self.assertEqual(Config.CURSOR.rowcount, 1, msg=msg) - row = Config.CURSOR.fetchone() - self.assertEqual(len(row), 1, msg=msg) - self.assertEqual(row[0], example["expected"], msg=msg) - row = Config.CURSOR.fetchone() - self.assertIsNone(row, msg=msg) - - def test_large_query_w_public_data(self): - PUBLIC = "bigquery-public-data" - DATASET_ID = "samples" - TABLE_NAME = "natality" - LIMIT = 1000 - SQL = "SELECT * from `{}.{}.{}` LIMIT {}".format( - PUBLIC, DATASET_ID, TABLE_NAME, LIMIT - ) - - query_job = Config.CLIENT.query(SQL) - - rows = list(query_job) - self.assertEqual(len(rows), LIMIT) - - def test_query_future(self): - query_job = Config.CLIENT.query("SELECT 1") - iterator = query_job.result(timeout=JOB_TIMEOUT) - row_tuples = [r.values() for r in iterator] - self.assertEqual(row_tuples, [(1,)]) - - def test_query_iter(self): - import types - - query_job = Config.CLIENT.query("SELECT 1") - self.assertIsInstance(iter(query_job), types.GeneratorType) - row_tuples = [r.values() for r in query_job] - self.assertEqual(row_tuples, [(1,)]) - - def test_querying_data_w_timeout(self): - job_config = bigquery.QueryJobConfig() - job_config.use_query_cache = False - - query_job = Config.CLIENT.query( - """ - SELECT name, SUM(number) AS total_people - FROM `bigquery-public-data.usa_names.usa_1910_current` - GROUP BY name - """, - location="US", - job_config=job_config, - ) - - # Specify a very tight deadline to demonstrate that the timeout - # actually has effect. - with self.assertRaises(requests.exceptions.Timeout): - query_job.done(timeout=0.1) - - # Now wait for the result using a more realistic deadline. - query_job.result(timeout=30) - self.assertTrue(query_job.done(timeout=30)) - - @unittest.skipIf(pandas is None, "Requires `pandas`") - def test_query_results_to_dataframe(self): - QUERY = """ - SELECT id, author, time_ts, dead - FROM `bigquery-public-data.hacker_news.comments` - LIMIT 10 - """ - - df = Config.CLIENT.query(QUERY).result().to_dataframe() - - self.assertIsInstance(df, pandas.DataFrame) - self.assertEqual(len(df), 10) # verify the number of rows - column_names = ["id", "author", "time_ts", "dead"] - self.assertEqual(list(df), column_names) # verify the column names - exp_datatypes = { - "id": int, - "author": six.text_type, - "time_ts": pandas.Timestamp, - "dead": bool, - } - for index, row in df.iterrows(): - for col in column_names: - # all the schema fields are nullable, so None is acceptable - if not row[col] is None: - self.assertIsInstance(row[col], exp_datatypes[col]) - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf( - bigquery_storage_v1beta1 is None, "Requires `google-cloud-bigquery-storage`" - ) - def test_query_results_to_dataframe_w_bqstorage(self): - dest_dataset = self.temp_dataset(_make_dataset_id("bqstorage_to_dataframe_")) - dest_ref = dest_dataset.table("query_results") - - query = """ - SELECT id, author, time_ts, dead - FROM `bigquery-public-data.hacker_news.comments` - LIMIT 10 - """ - - bqstorage_client = bigquery_storage_v1beta1.BigQueryStorageClient( - credentials=Config.CLIENT._credentials - ) - - job_configs = ( - # There is a known issue reading small anonymous query result - # tables with the BQ Storage API. Writing to a destination - # table works around this issue. - bigquery.QueryJobConfig( - destination=dest_ref, write_disposition="WRITE_TRUNCATE" - ), - # Check that the client is able to work around the issue with - # reading small anonymous query result tables by falling back to - # the tabledata.list API. - None, - ) - - for job_config in job_configs: - df = ( - Config.CLIENT.query(query, job_config=job_config) - .result() - .to_dataframe(bqstorage_client) - ) - - self.assertIsInstance(df, pandas.DataFrame) - self.assertEqual(len(df), 10) # verify the number of rows - column_names = ["id", "author", "time_ts", "dead"] - self.assertEqual(list(df), column_names) - exp_datatypes = { - "id": int, - "author": six.text_type, - "time_ts": pandas.Timestamp, - "dead": bool, - } - for index, row in df.iterrows(): - for col in column_names: - # all the schema fields are nullable, so None is acceptable - if not row[col] is None: - self.assertIsInstance(row[col], exp_datatypes[col]) - - @unittest.skipIf(pandas is None, "Requires `pandas`") - def test_insert_rows_from_dataframe(self): - SF = bigquery.SchemaField - schema = [ - SF("float_col", "FLOAT", mode="REQUIRED"), - SF("int_col", "INTEGER", mode="REQUIRED"), - SF("bool_col", "BOOLEAN", mode="REQUIRED"), - SF("string_col", "STRING", mode="NULLABLE"), - ] - - dataframe = pandas.DataFrame( - [ - { - "float_col": 1.11, - "bool_col": True, - "string_col": "my string", - "int_col": 10, - }, - { - "float_col": 2.22, - "bool_col": False, - "string_col": "another string", - "int_col": 20, - }, - { - "float_col": 3.33, - "bool_col": False, - "string_col": "another string", - "int_col": 30, - }, - { - "float_col": 4.44, - "bool_col": True, - "string_col": "another string", - "int_col": 40, - }, - { - "float_col": 5.55, - "bool_col": False, - "string_col": "another string", - "int_col": 50, - }, - ] - ) - - table_id = "test_table" - dataset = self.temp_dataset(_make_dataset_id("issue_7553")) - table_arg = Table(dataset.table(table_id), schema=schema) - table = retry_403(Config.CLIENT.create_table)(table_arg) - self.to_delete.insert(0, table) - - Config.CLIENT.insert_rows_from_dataframe(table, dataframe, chunk_size=3) - - retry = RetryResult(_has_rows, max_tries=8) - rows = retry(self._fetch_single_page)(table) - - sorted_rows = sorted(rows, key=operator.attrgetter("int_col")) - row_tuples = [r.values() for r in sorted_rows] - expected = [tuple(data_row) for data_row in dataframe.itertuples(index=False)] - - assert len(row_tuples) == len(expected) - - for row, expected_row in zip(row_tuples, expected): - six.assertCountEqual( - self, row, expected_row - ) # column order does not matter - - def test_insert_rows_nested_nested(self): - # See #2951 - SF = bigquery.SchemaField - schema = [ - SF("string_col", "STRING", mode="NULLABLE"), - SF( - "record_col", - "RECORD", - mode="NULLABLE", - fields=[ - SF("nested_string", "STRING", mode="NULLABLE"), - SF("nested_repeated", "INTEGER", mode="REPEATED"), - SF( - "nested_record", - "RECORD", - mode="NULLABLE", - fields=[SF("nested_nested_string", "STRING", mode="NULLABLE")], - ), - ], - ), - ] - record = { - "nested_string": "another string value", - "nested_repeated": [0, 1, 2], - "nested_record": {"nested_nested_string": "some deep insight"}, - } - to_insert = [("Some value", record)] - table_id = "test_table" - dataset = self.temp_dataset(_make_dataset_id("issue_2951")) - table_arg = Table(dataset.table(table_id), schema=schema) - table = retry_403(Config.CLIENT.create_table)(table_arg) - self.to_delete.insert(0, table) - - Config.CLIENT.insert_rows(table, to_insert) - - retry = RetryResult(_has_rows, max_tries=8) - rows = retry(self._fetch_single_page)(table) - row_tuples = [r.values() for r in rows] - self.assertEqual(row_tuples, to_insert) - - def test_insert_rows_nested_nested_dictionary(self): - # See #2951 - SF = bigquery.SchemaField - schema = [ - SF("string_col", "STRING", mode="NULLABLE"), - SF( - "record_col", - "RECORD", - mode="NULLABLE", - fields=[ - SF("nested_string", "STRING", mode="NULLABLE"), - SF("nested_repeated", "INTEGER", mode="REPEATED"), - SF( - "nested_record", - "RECORD", - mode="NULLABLE", - fields=[SF("nested_nested_string", "STRING", mode="NULLABLE")], - ), - ], - ), - ] - record = { - "nested_string": "another string value", - "nested_repeated": [0, 1, 2], - "nested_record": {"nested_nested_string": "some deep insight"}, - } - to_insert = [{"string_col": "Some value", "record_col": record}] - table_id = "test_table" - dataset = self.temp_dataset(_make_dataset_id("issue_2951")) - table_arg = Table(dataset.table(table_id), schema=schema) - table = retry_403(Config.CLIENT.create_table)(table_arg) - self.to_delete.insert(0, table) - - Config.CLIENT.insert_rows(table, to_insert) - - retry = RetryResult(_has_rows, max_tries=8) - rows = retry(self._fetch_single_page)(table) - row_tuples = [r.values() for r in rows] - expected_rows = [("Some value", record)] - self.assertEqual(row_tuples, expected_rows) - - def test_create_routine(self): - routine_name = "test_routine" - dataset = self.temp_dataset(_make_dataset_id("create_routine")) - float64_type = bigquery_v2.types.StandardSqlDataType( - type_kind=bigquery_v2.enums.StandardSqlDataType.TypeKind.FLOAT64 - ) - routine = bigquery.Routine( - dataset.routine(routine_name), - language="JAVASCRIPT", - type_="SCALAR_FUNCTION", - return_type=float64_type, - imported_libraries=[ - "gs://{}/bigquery/udfs/max-value.js".format(SAMPLES_BUCKET) - ], - ) - routine.arguments = [ - bigquery.RoutineArgument( - name="arr", - data_type=bigquery_v2.types.StandardSqlDataType( - type_kind=bigquery_v2.enums.StandardSqlDataType.TypeKind.ARRAY, - array_element_type=float64_type, - ), - ) - ] - routine.body = "return maxValue(arr)" - query_string = "SELECT `{}`([-100.0, 3.14, 100.0, 42.0]) as max_value;".format( - str(routine.reference) - ) - - routine = retry_403(Config.CLIENT.create_routine)(routine) - query_job = retry_403(Config.CLIENT.query)(query_string) - rows = list(query_job.result()) - - assert len(rows) == 1 - assert rows[0].max_value == 100.0 - - def test_create_table_rows_fetch_nested_schema(self): - table_name = "test_table" - dataset = self.temp_dataset(_make_dataset_id("create_table_nested_schema")) - schema = _load_json_schema() - table_arg = Table(dataset.table(table_name), schema=schema) - table = retry_403(Config.CLIENT.create_table)(table_arg) - self.to_delete.insert(0, table) - self.assertTrue(_table_exists(table)) - self.assertEqual(table.table_id, table_name) - - to_insert = [] - # Data is in "JSON Lines" format, see http://jsonlines.org/ - json_filename = os.path.join(WHERE, "data", "characters.jsonl") - with open(json_filename) as rows_file: - for line in rows_file: - to_insert.append(json.loads(line)) - - errors = Config.CLIENT.insert_rows_json(table, to_insert) - self.assertEqual(len(errors), 0) - - retry = RetryResult(_has_rows, max_tries=8) - fetched = retry(self._fetch_single_page)(table) - fetched_tuples = [f.values() for f in fetched] - - self.assertEqual(len(fetched), len(to_insert)) - - for found, expected in zip(sorted(fetched_tuples), to_insert): - self.assertEqual(found[0], expected["Name"]) - self.assertEqual(found[1], int(expected["Age"])) - self.assertEqual(found[2], expected["Weight"]) - self.assertEqual(found[3], expected["IsMagic"]) - - self.assertEqual(len(found[4]), len(expected["Spells"])) - for f_spell, e_spell in zip(found[4], expected["Spells"]): - self.assertEqual(f_spell["Name"], e_spell["Name"]) - parts = time.strptime(e_spell["LastUsed"], "%Y-%m-%d %H:%M:%S UTC") - e_used = datetime.datetime(*parts[0:6], tzinfo=UTC) - self.assertEqual(f_spell["LastUsed"], e_used) - self.assertEqual(f_spell["DiscoveredBy"], e_spell["DiscoveredBy"]) - self.assertEqual(f_spell["Properties"], e_spell["Properties"]) - - e_icon = base64.standard_b64decode(e_spell["Icon"].encode("ascii")) - self.assertEqual(f_spell["Icon"], e_icon) - - parts = time.strptime(expected["TeaTime"], "%H:%M:%S") - e_teatime = datetime.time(*parts[3:6]) - self.assertEqual(found[5], e_teatime) - - parts = time.strptime(expected["NextVacation"], "%Y-%m-%d") - e_nextvac = datetime.date(*parts[0:3]) - self.assertEqual(found[6], e_nextvac) - - parts = time.strptime(expected["FavoriteTime"], "%Y-%m-%dT%H:%M:%S") - e_favtime = datetime.datetime(*parts[0:6]) - self.assertEqual(found[7], e_favtime) - self.assertEqual(found[8], decimal.Decimal(expected["FavoriteNumber"])) - - def _fetch_dataframe(self, query): - return Config.CLIENT.query(query).result().to_dataframe() - - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - @unittest.skipIf( - bigquery_storage_v1beta1 is None, "Requires `google-cloud-bigquery-storage`" - ) - def test_nested_table_to_arrow(self): - from google.cloud.bigquery.job import SourceFormat - from google.cloud.bigquery.job import WriteDisposition - - SF = bigquery.SchemaField - schema = [ - SF("string_col", "STRING", mode="NULLABLE"), - SF( - "record_col", - "RECORD", - mode="NULLABLE", - fields=[ - SF("nested_string", "STRING", mode="NULLABLE"), - SF("nested_repeated", "INTEGER", mode="REPEATED"), - ], - ), - SF("float_col", "FLOAT", mode="NULLABLE"), - ] - record = {"nested_string": "another string value", "nested_repeated": [0, 1, 2]} - to_insert = [ - {"string_col": "Some value", "record_col": record, "float_col": 3.14} - ] - rows = [json.dumps(row) for row in to_insert] - body = six.BytesIO("{}\n".format("\n".join(rows)).encode("ascii")) - table_id = "test_table" - dataset = self.temp_dataset(_make_dataset_id("nested_df")) - table = dataset.table(table_id) - self.to_delete.insert(0, table) - job_config = bigquery.LoadJobConfig() - job_config.write_disposition = WriteDisposition.WRITE_TRUNCATE - job_config.source_format = SourceFormat.NEWLINE_DELIMITED_JSON - job_config.schema = schema - # Load a table using a local JSON file from memory. - Config.CLIENT.load_table_from_file(body, table, job_config=job_config).result() - bqstorage_client = bigquery_storage_v1beta1.BigQueryStorageClient( - credentials=Config.CLIENT._credentials - ) - - tbl = Config.CLIENT.list_rows(table, selected_fields=schema).to_arrow( - bqstorage_client=bqstorage_client - ) - - self.assertIsInstance(tbl, pyarrow.Table) - self.assertEqual(tbl.num_rows, 1) - self.assertEqual(tbl.num_columns, 3) - # Columns may not appear in the requested order. - self.assertTrue( - pyarrow.types.is_float64(tbl.schema.field_by_name("float_col").type) - ) - self.assertTrue( - pyarrow.types.is_string(tbl.schema.field_by_name("string_col").type) - ) - record_col = tbl.schema.field_by_name("record_col").type - self.assertTrue(pyarrow.types.is_struct(record_col)) - self.assertEqual(record_col.num_children, 2) - self.assertEqual(record_col[0].name, "nested_string") - self.assertTrue(pyarrow.types.is_string(record_col[0].type)) - self.assertEqual(record_col[1].name, "nested_repeated") - self.assertTrue(pyarrow.types.is_list(record_col[1].type)) - self.assertTrue(pyarrow.types.is_int64(record_col[1].type.value_type)) - - @unittest.skipIf(pandas is None, "Requires `pandas`") - def test_nested_table_to_dataframe(self): - from google.cloud.bigquery.job import SourceFormat - from google.cloud.bigquery.job import WriteDisposition - - SF = bigquery.SchemaField - schema = [ - SF("string_col", "STRING", mode="NULLABLE"), - SF( - "record_col", - "RECORD", - mode="NULLABLE", - fields=[ - SF("nested_string", "STRING", mode="NULLABLE"), - SF("nested_repeated", "INTEGER", mode="REPEATED"), - SF( - "nested_record", - "RECORD", - mode="NULLABLE", - fields=[SF("nested_nested_string", "STRING", mode="NULLABLE")], - ), - ], - ), - SF("bigfloat_col", "FLOAT", mode="NULLABLE"), - SF("smallfloat_col", "FLOAT", mode="NULLABLE"), - ] - record = { - "nested_string": "another string value", - "nested_repeated": [0, 1, 2], - "nested_record": {"nested_nested_string": "some deep insight"}, - } - to_insert = [ - { - "string_col": "Some value", - "record_col": record, - "bigfloat_col": 3.14, - "smallfloat_col": 2.72, - } - ] - rows = [json.dumps(row) for row in to_insert] - body = six.BytesIO("{}\n".format("\n".join(rows)).encode("ascii")) - table_id = "test_table" - dataset = self.temp_dataset(_make_dataset_id("nested_df")) - table = dataset.table(table_id) - self.to_delete.insert(0, table) - job_config = bigquery.LoadJobConfig() - job_config.write_disposition = WriteDisposition.WRITE_TRUNCATE - job_config.source_format = SourceFormat.NEWLINE_DELIMITED_JSON - job_config.schema = schema - # Load a table using a local JSON file from memory. - Config.CLIENT.load_table_from_file(body, table, job_config=job_config).result() - - df = Config.CLIENT.list_rows(table, selected_fields=schema).to_dataframe( - dtypes={"smallfloat_col": "float16"} - ) - - self.assertIsInstance(df, pandas.DataFrame) - self.assertEqual(len(df), 1) # verify the number of rows - exp_columns = ["string_col", "record_col", "bigfloat_col", "smallfloat_col"] - self.assertEqual(list(df), exp_columns) # verify the column names - row = df.iloc[0] - # verify the row content - self.assertEqual(row["string_col"], "Some value") - expected_keys = tuple(sorted(record.keys())) - row_keys = tuple(sorted(row["record_col"].keys())) - self.assertEqual(row_keys, expected_keys) - # Can't compare numpy arrays, which pyarrow encodes the embedded - # repeated column to, so convert to list. - self.assertEqual(list(row["record_col"]["nested_repeated"]), [0, 1, 2]) - # verify that nested data can be accessed with indices/keys - self.assertEqual(row["record_col"]["nested_repeated"][0], 0) - self.assertEqual( - row["record_col"]["nested_record"]["nested_nested_string"], - "some deep insight", - ) - # verify dtypes - self.assertEqual(df.dtypes["bigfloat_col"].name, "float64") - self.assertEqual(df.dtypes["smallfloat_col"].name, "float16") - - def test_list_rows_empty_table(self): - from google.cloud.bigquery.table import RowIterator - - dataset_id = _make_dataset_id("empty_table") - dataset = self.temp_dataset(dataset_id) - table_ref = dataset.table("empty_table") - table = Config.CLIENT.create_table(bigquery.Table(table_ref)) - - # It's a bit silly to list rows for an empty table, but this does - # happen as the result of a DDL query from an IPython magic command. - rows = Config.CLIENT.list_rows(table) - self.assertIsInstance(rows, RowIterator) - self.assertEqual(tuple(rows), ()) - - def test_list_rows_page_size(self): - from google.cloud.bigquery.job import SourceFormat - from google.cloud.bigquery.job import WriteDisposition - - num_items = 7 - page_size = 3 - num_pages, num_last_page = divmod(num_items, page_size) - - SF = bigquery.SchemaField - schema = [SF("string_col", "STRING", mode="NULLABLE")] - to_insert = [{"string_col": "item%d" % i} for i in range(num_items)] - rows = [json.dumps(row) for row in to_insert] - body = six.BytesIO("{}\n".format("\n".join(rows)).encode("ascii")) - - table_id = "test_table" - dataset = self.temp_dataset(_make_dataset_id("nested_df")) - table = dataset.table(table_id) - self.to_delete.insert(0, table) - job_config = bigquery.LoadJobConfig() - job_config.write_disposition = WriteDisposition.WRITE_TRUNCATE - job_config.source_format = SourceFormat.NEWLINE_DELIMITED_JSON - job_config.schema = schema - # Load a table using a local JSON file from memory. - Config.CLIENT.load_table_from_file(body, table, job_config=job_config).result() - - df = Config.CLIENT.list_rows(table, selected_fields=schema, page_size=page_size) - pages = df.pages - - for i in range(num_pages): - page = next(pages) - self.assertEqual(page.num_items, page_size) - page = next(pages) - self.assertEqual(page.num_items, num_last_page) - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf( - bigquery_storage_v1beta1 is None, "Requires `google-cloud-bigquery-storage`" - ) - def test_list_rows_max_results_w_bqstorage(self): - table_ref = DatasetReference("bigquery-public-data", "utility_us").table( - "country_code_iso" - ) - bqstorage_client = bigquery_storage_v1beta1.BigQueryStorageClient( - credentials=Config.CLIENT._credentials - ) - - row_iterator = Config.CLIENT.list_rows( - table_ref, - selected_fields=[bigquery.SchemaField("country_name", "STRING")], - max_results=100, - ) - dataframe = row_iterator.to_dataframe(bqstorage_client=bqstorage_client) - - self.assertEqual(len(dataframe.index), 100) - - def temp_dataset(self, dataset_id, location=None): - dataset = Dataset(Config.CLIENT.dataset(dataset_id)) - if location: - dataset.location = location - dataset = retry_403(Config.CLIENT.create_dataset)(dataset) - self.to_delete.append(dataset) - return dataset - - -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -@pytest.mark.skipif(IPython is None, reason="Requires `ipython`") -@pytest.mark.usefixtures("ipython_interactive") -def test_bigquery_magic(): - ip = IPython.get_ipython() - current_process = psutil.Process() - conn_count_start = len(current_process.connections()) - - ip.extension_manager.load_extension("google.cloud.bigquery") - sql = """ - SELECT - CONCAT( - 'https://stackoverflow.com/questions/', - CAST(id as STRING)) as url, - view_count - FROM `bigquery-public-data.stackoverflow.posts_questions` - WHERE tags like '%google-bigquery%' - ORDER BY view_count DESC - LIMIT 10 - """ - with io.capture_output() as captured: - result = ip.run_cell_magic("bigquery", "", sql) - - conn_count_end = len(current_process.connections()) - - lines = re.split("\n|\r", captured.stdout) - # Removes blanks & terminal code (result of display clearing) - updates = list(filter(lambda x: bool(x) and x != "\x1b[2K", lines)) - assert re.match("Executing query with job ID: .*", updates[0]) - assert all(re.match("Query executing: .*s", line) for line in updates[1:-1]) - assert re.match("Query complete after .*s", updates[-1]) - assert isinstance(result, pandas.DataFrame) - assert len(result) == 10 # verify row count - assert list(result) == ["url", "view_count"] # verify column names - assert conn_count_end == conn_count_start # system resources are released - - -def _job_done(instance): - return instance.state.lower() == "done" - - -def _dataset_exists(ds): - try: - Config.CLIENT.get_dataset(DatasetReference(ds.project, ds.dataset_id)) - return True - except NotFound: - return False - - -def _table_exists(t): - try: - tr = DatasetReference(t.project, t.dataset_id).table(t.table_id) - Config.CLIENT.get_table(tr) - return True - except NotFound: - return False - - -@pytest.fixture(scope="session") -def ipython(): - config = tools.default_config() - config.TerminalInteractiveShell.simple_prompt = True - shell = interactiveshell.TerminalInteractiveShell.instance(config=config) - return shell - - -@pytest.fixture() -def ipython_interactive(request, ipython): - """Activate IPython's builtin hooks - - for the duration of the test scope. - """ - with ipython.builtin_trap: - yield ipython diff --git a/bigquery/tests/unit/__init__.py b/bigquery/tests/unit/__init__.py deleted file mode 100644 index df379f1e9d88..000000000000 --- a/bigquery/tests/unit/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/bigquery/tests/unit/enums/__init__.py b/bigquery/tests/unit/enums/__init__.py deleted file mode 100644 index c5cce043083c..000000000000 --- a/bigquery/tests/unit/enums/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright 2019, Google LLC All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/bigquery/tests/unit/enums/test_standard_sql_data_types.py b/bigquery/tests/unit/enums/test_standard_sql_data_types.py deleted file mode 100644 index 6fa4f057fb98..000000000000 --- a/bigquery/tests/unit/enums/test_standard_sql_data_types.py +++ /dev/null @@ -1,73 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import pytest - - -@pytest.fixture -def module_under_test(): - from google.cloud.bigquery import enums - - return enums - - -@pytest.fixture -def enum_under_test(): - from google.cloud.bigquery.enums import StandardSqlDataTypes - - return StandardSqlDataTypes - - -@pytest.fixture -def gapic_enum(): - """The referential autogenerated enum the enum under test is based on.""" - from google.cloud.bigquery_v2.gapic.enums import StandardSqlDataType - - return StandardSqlDataType.TypeKind - - -def test_all_gapic_enum_members_are_known(module_under_test, gapic_enum): - gapic_names = set(type_.name for type_ in gapic_enum) - anticipated_names = ( - module_under_test._SQL_SCALAR_TYPES | module_under_test._SQL_NONSCALAR_TYPES - ) - assert not (gapic_names - anticipated_names) # no unhandled names - - -def test_standard_sql_types_enum_members(enum_under_test, gapic_enum): - # check the presence of a few typical SQL types - for name in ("INT64", "FLOAT64", "DATE", "BOOL", "GEOGRAPHY"): - assert name in enum_under_test.__members__ - - # the enum members must match those in the original gapic enum - for member in enum_under_test: - assert member.name in gapic_enum.__members__ - assert member.value == gapic_enum[member.name].value - - # check a few members that should *not* be copied over from the gapic enum - for name in ("STRUCT", "ARRAY"): - assert name in gapic_enum.__members__ - assert name not in enum_under_test.__members__ - - -def test_standard_sql_types_enum_docstring(enum_under_test, gapic_enum): - assert "STRUCT (int):" not in enum_under_test.__doc__ - assert "BOOL (int):" in enum_under_test.__doc__ - assert "TIME (int):" in enum_under_test.__doc__ - - # All lines in the docstring should actually come from the original docstring, - # except for the header. - assert "An Enum of scalar SQL types." in enum_under_test.__doc__ - doc_lines = enum_under_test.__doc__.splitlines() - assert set(doc_lines[1:]) <= set(gapic_enum.__doc__.splitlines()) diff --git a/bigquery/tests/unit/helpers.py b/bigquery/tests/unit/helpers.py deleted file mode 100644 index 5b731a763a99..000000000000 --- a/bigquery/tests/unit/helpers.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def make_connection(*responses): - import google.cloud.bigquery._http - import mock - from google.cloud.exceptions import NotFound - - mock_conn = mock.create_autospec(google.cloud.bigquery._http.Connection) - mock_conn.user_agent = "testing 1.2.3" - mock_conn.api_request.side_effect = list(responses) + [NotFound("miss")] - return mock_conn diff --git a/bigquery/tests/unit/model/__init__.py b/bigquery/tests/unit/model/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/bigquery/tests/unit/model/test_model.py b/bigquery/tests/unit/model/test_model.py deleted file mode 100644 index bbb93ef9e897..000000000000 --- a/bigquery/tests/unit/model/test_model.py +++ /dev/null @@ -1,320 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import datetime - -import pytest - -import google.cloud._helpers -from google.cloud.bigquery_v2.gapic import enums - -KMS_KEY_NAME = "projects/1/locations/us/keyRings/1/cryptoKeys/1" - - -@pytest.fixture -def target_class(): - from google.cloud.bigquery import Model - - return Model - - -@pytest.fixture -def object_under_test(target_class): - return target_class("project-id.dataset_id.model_id") - - -def test_ctor(target_class): - from google.cloud.bigquery import ModelReference - - ref = ModelReference.from_string("my-proj.my_dset.my_model") - got = target_class(ref) - assert got.reference == ref - - -def test_ctor_string(target_class): - from google.cloud.bigquery import ModelReference - - model_id = "my-proj.my_dset.my_model" - ref = ModelReference.from_string(model_id) - got = target_class(model_id) - assert got.reference == ref - - -def test_from_api_repr(target_class): - from google.cloud.bigquery import ModelReference - - creation_time = datetime.datetime( - 2010, 5, 19, 16, 0, 0, tzinfo=google.cloud._helpers.UTC - ) - modified_time = datetime.datetime( - 2011, 10, 1, 16, 0, 0, tzinfo=google.cloud._helpers.UTC - ) - expiration_time = datetime.datetime( - 2012, 12, 21, 16, 0, 0, tzinfo=google.cloud._helpers.UTC - ) - resource = { - "modelReference": { - "projectId": "my-project", - "datasetId": "my_dataset", - "modelId": "my_model", - }, - "location": "US", - "etag": "abcdefg", - "creationTime": str(google.cloud._helpers._millis(creation_time)), - "lastModifiedTime": str(google.cloud._helpers._millis(modified_time)), - "expirationTime": str(google.cloud._helpers._millis(expiration_time)), - "description": "A friendly description.", - "friendlyName": "A friendly name.", - "modelType": "LOGISTIC_REGRESSION", - "labels": {"greeting": u"こんにちは"}, - "trainingRuns": [ - { - "trainingOptions": {"initialLearnRate": 1.0}, - "startTime": str( - google.cloud._helpers._datetime_to_rfc3339(creation_time) - ), - }, - { - "trainingOptions": {"initialLearnRate": 0.5}, - "startTime": str( - google.cloud._helpers._datetime_to_rfc3339(modified_time) - ), - }, - { - "trainingOptions": {"initialLearnRate": 0.25}, - # Allow milliseconds since epoch format. - # TODO: Remove this hack once CL 238585470 hits prod. - "startTime": str(google.cloud._helpers._millis(expiration_time)), - }, - ], - "featureColumns": [], - "encryptionConfiguration": {"kmsKeyName": KMS_KEY_NAME}, - } - got = target_class.from_api_repr(resource) - - assert got.project == "my-project" - assert got.dataset_id == "my_dataset" - assert got.model_id == "my_model" - assert got.reference == ModelReference.from_string("my-project.my_dataset.my_model") - assert got.path == "/projects/my-project/datasets/my_dataset/models/my_model" - assert got.location == "US" - assert got.etag == "abcdefg" - assert got.created == creation_time - assert got.modified == modified_time - assert got.expires == expiration_time - assert got.description == u"A friendly description." - assert got.friendly_name == u"A friendly name." - assert got.model_type == enums.Model.ModelType.LOGISTIC_REGRESSION - assert got.labels == {"greeting": u"こんにちは"} - assert got.encryption_configuration.kms_key_name == KMS_KEY_NAME - assert got.training_runs[0].training_options.initial_learn_rate == 1.0 - assert ( - got.training_runs[0] - .start_time.ToDatetime() - .replace(tzinfo=google.cloud._helpers.UTC) - == creation_time - ) - assert got.training_runs[1].training_options.initial_learn_rate == 0.5 - assert ( - got.training_runs[1] - .start_time.ToDatetime() - .replace(tzinfo=google.cloud._helpers.UTC) - == modified_time - ) - assert got.training_runs[2].training_options.initial_learn_rate == 0.25 - assert ( - got.training_runs[2] - .start_time.ToDatetime() - .replace(tzinfo=google.cloud._helpers.UTC) - == expiration_time - ) - - -def test_from_api_repr_w_minimal_resource(target_class): - from google.cloud.bigquery import ModelReference - - resource = { - "modelReference": { - "projectId": "my-project", - "datasetId": "my_dataset", - "modelId": "my_model", - } - } - got = target_class.from_api_repr(resource) - assert got.reference == ModelReference.from_string("my-project.my_dataset.my_model") - assert got.location == "" - assert got.etag == "" - assert got.created is None - assert got.modified is None - assert got.expires is None - assert got.description is None - assert got.friendly_name is None - assert got.model_type == enums.Model.ModelType.MODEL_TYPE_UNSPECIFIED - assert got.labels == {} - assert got.encryption_configuration is None - assert len(got.training_runs) == 0 - assert len(got.feature_columns) == 0 - assert len(got.label_columns) == 0 - - -def test_from_api_repr_w_unknown_fields(target_class): - from google.cloud.bigquery import ModelReference - - resource = { - "modelReference": { - "projectId": "my-project", - "datasetId": "my_dataset", - "modelId": "my_model", - }, - "thisFieldIsNotInTheProto": "just ignore me", - } - got = target_class.from_api_repr(resource) - assert got.reference == ModelReference.from_string("my-project.my_dataset.my_model") - assert got._properties is resource - - -@pytest.mark.parametrize( - "resource,filter_fields,expected", - [ - ( - { - "friendlyName": "hello", - "description": "world", - "expirationTime": "12345", - "labels": {"a-label": "a-value"}, - }, - ["description"], - {"description": "world"}, - ), - ( - {"friendlyName": "hello", "description": "world"}, - ["friendlyName"], - {"friendlyName": "hello"}, - ), - ( - { - "friendlyName": "hello", - "description": "world", - "expirationTime": "12345", - "labels": {"a-label": "a-value"}, - }, - ["expires"], - {"expirationTime": "12345"}, - ), - ( - { - "friendlyName": "hello", - "description": "world", - "expirationTime": None, - "labels": {"a-label": "a-value"}, - }, - ["expires"], - {"expirationTime": None}, - ), - ( - { - "friendlyName": "hello", - "description": "world", - "expirationTime": None, - "labels": {"a-label": "a-value"}, - }, - ["labels"], - {"labels": {"a-label": "a-value"}}, - ), - ( - { - "friendlyName": "hello", - "description": "world", - "expirationTime": None, - "labels": {"a-label": "a-value"}, - "encryptionConfiguration": {"kmsKeyName": KMS_KEY_NAME}, - }, - ["encryptionConfiguration"], - {"encryptionConfiguration": {"kmsKeyName": KMS_KEY_NAME}}, - ), - ], -) -def test_build_resource(object_under_test, resource, filter_fields, expected): - object_under_test._properties = resource - got = object_under_test._build_resource(filter_fields) - assert got == expected - - -def test_set_description(object_under_test): - assert not object_under_test.description - object_under_test.description = "A model description." - assert object_under_test.description == "A model description." - object_under_test.description = None - assert not object_under_test.description - - -def test_set_expires(object_under_test): - assert not object_under_test.expires - expiration_time = datetime.datetime( - 2012, 12, 21, 16, 0, 0, tzinfo=google.cloud._helpers.UTC - ) - object_under_test.expires = expiration_time - assert object_under_test.expires == expiration_time - object_under_test.expires = None - assert not object_under_test.expires - - -def test_set_friendly_name(object_under_test): - assert not object_under_test.friendly_name - object_under_test.friendly_name = "A model name." - assert object_under_test.friendly_name == "A model name." - object_under_test.friendly_name = None - assert not object_under_test.friendly_name - - -def test_set_labels(object_under_test): - assert object_under_test.labels == {} - object_under_test.labels["data_owner"] = "someteam" - assert object_under_test.labels == {"data_owner": "someteam"} - del object_under_test.labels["data_owner"] - assert object_under_test.labels == {} - - -def test_replace_labels(object_under_test): - assert object_under_test.labels == {} - object_under_test.labels = {"data_owner": "someteam"} - assert object_under_test.labels == {"data_owner": "someteam"} - labels = {} - object_under_test.labels = labels - assert object_under_test.labels is labels - object_under_test.labels = None - assert object_under_test.labels == {} - - -def test_set_encryption_configuration(object_under_test): - from google.cloud.bigquery.encryption_configuration import EncryptionConfiguration - - assert not object_under_test.encryption_configuration - object_under_test.encryption_configuration = EncryptionConfiguration( - kms_key_name=KMS_KEY_NAME - ) - assert object_under_test.encryption_configuration.kms_key_name == KMS_KEY_NAME - object_under_test.encryption_configuration = None - assert not object_under_test.encryption_configuration - - -def test_repr(target_class): - model = target_class("my-proj.my_dset.my_model") - got = repr(model) - assert got == ( - "Model(reference=ModelReference(" - "project='my-proj', dataset_id='my_dset', project_id='my_model'))" - ) diff --git a/bigquery/tests/unit/model/test_model_reference.py b/bigquery/tests/unit/model/test_model_reference.py deleted file mode 100644 index ff1d1df7d499..000000000000 --- a/bigquery/tests/unit/model/test_model_reference.py +++ /dev/null @@ -1,140 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import pytest - - -@pytest.fixture -def target_class(): - from google.cloud.bigquery import ModelReference - - return ModelReference - - -def test_from_api_repr(target_class): - resource = { - "projectId": "my-project", - "datasetId": "my_dataset", - "modelId": "my_model", - } - got = target_class.from_api_repr(resource) - assert got.project == "my-project" - assert got.dataset_id == "my_dataset" - assert got.model_id == "my_model" - assert got.path == "/projects/my-project/datasets/my_dataset/models/my_model" - - -def test_from_api_repr_w_unknown_fields(target_class): - resource = { - "projectId": "my-project", - "datasetId": "my_dataset", - "modelId": "my_model", - "thisFieldIsNotInTheProto": "just ignore me", - } - got = target_class.from_api_repr(resource) - assert got.project == "my-project" - assert got.dataset_id == "my_dataset" - assert got.model_id == "my_model" - assert got._properties is resource - - -def test_to_api_repr(target_class): - ref = target_class.from_string("my-project.my_dataset.my_model") - got = ref.to_api_repr() - assert got == { - "projectId": "my-project", - "datasetId": "my_dataset", - "modelId": "my_model", - } - - -def test_from_string(target_class): - got = target_class.from_string("string-project.string_dataset.string_model") - assert got.project == "string-project" - assert got.dataset_id == "string_dataset" - assert got.model_id == "string_model" - assert got.path == ( - "/projects/string-project/datasets/string_dataset/models/string_model" - ) - - -def test_from_string_legacy_string(target_class): - with pytest.raises(ValueError): - target_class.from_string("string-project:string_dataset.string_model") - - -def test_from_string_not_fully_qualified(target_class): - with pytest.raises(ValueError): - target_class.from_string("string_model") - - with pytest.raises(ValueError): - target_class.from_string("string_dataset.string_model") - - with pytest.raises(ValueError): - target_class.from_string("a.b.c.d") - - -def test_from_string_with_default_project(target_class): - got = target_class.from_string( - "string_dataset.string_model", default_project="default-project" - ) - assert got.project == "default-project" - assert got.dataset_id == "string_dataset" - assert got.model_id == "string_model" - - -def test_from_string_ignores_default_project(target_class): - got = target_class.from_string( - "string-project.string_dataset.string_model", default_project="default-project" - ) - assert got.project == "string-project" - assert got.dataset_id == "string_dataset" - assert got.model_id == "string_model" - - -def test_eq(target_class): - model = target_class.from_string("my-proj.my_dset.my_model") - model_too = target_class.from_string("my-proj.my_dset.my_model") - assert model == model_too - assert not (model != model_too) - - other_model = target_class.from_string("my-proj.my_dset.my_model2") - assert not (model == other_model) - assert model != other_model - - notamodel = object() - assert not (model == notamodel) - assert model != notamodel - - -def test_hash(target_class): - model = target_class.from_string("my-proj.my_dset.my_model") - model2 = target_class.from_string("my-proj.my_dset.model2") - got = {model: "hello", model2: "world"} - assert got[model] == "hello" - assert got[model2] == "world" - - model_too = target_class.from_string("my-proj.my_dset.my_model") - assert got[model_too] == "hello" - - -def test_repr(target_class): - model = target_class.from_string("my-proj.my_dset.my_model") - got = repr(model) - assert ( - got - == "ModelReference(project='my-proj', dataset_id='my_dset', project_id='my_model')" - ) diff --git a/bigquery/tests/unit/routine/__init__.py b/bigquery/tests/unit/routine/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/bigquery/tests/unit/routine/test_routine.py b/bigquery/tests/unit/routine/test_routine.py deleted file mode 100644 index 02f703535227..000000000000 --- a/bigquery/tests/unit/routine/test_routine.py +++ /dev/null @@ -1,335 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import datetime - -import pytest - -import google.cloud._helpers -from google.cloud import bigquery_v2 - - -@pytest.fixture -def target_class(): - from google.cloud.bigquery.routine import Routine - - return Routine - - -@pytest.fixture -def object_under_test(target_class): - return target_class("project-id.dataset_id.routine_id") - - -def test_ctor(target_class): - from google.cloud.bigquery.routine import RoutineReference - - ref = RoutineReference.from_string("my-proj.my_dset.my_routine") - actual_routine = target_class(ref) - assert actual_routine.reference == ref - assert ( - actual_routine.path == "/projects/my-proj/datasets/my_dset/routines/my_routine" - ) - - -def test_ctor_w_string(target_class): - from google.cloud.bigquery.routine import RoutineReference - - routine_id = "my-proj.my_dset.my_routine" - ref = RoutineReference.from_string(routine_id) - actual_routine = target_class(routine_id) - assert actual_routine.reference == ref - - -def test_ctor_w_properties(target_class): - from google.cloud.bigquery.routine import RoutineArgument - from google.cloud.bigquery.routine import RoutineReference - - routine_id = "my-proj.my_dset.my_routine" - arguments = [ - RoutineArgument( - name="x", - data_type=bigquery_v2.types.StandardSqlDataType( - type_kind=bigquery_v2.enums.StandardSqlDataType.TypeKind.INT64 - ), - ) - ] - body = "x * 3" - language = "SQL" - return_type = bigquery_v2.types.StandardSqlDataType( - type_kind=bigquery_v2.enums.StandardSqlDataType.TypeKind.INT64 - ) - type_ = "SCALAR_FUNCTION" - description = "A routine description." - - actual_routine = target_class( - routine_id, - arguments=arguments, - body=body, - language=language, - return_type=return_type, - type_=type_, - description=description, - ) - - ref = RoutineReference.from_string(routine_id) - assert actual_routine.reference == ref - assert actual_routine.arguments == arguments - assert actual_routine.body == body - assert actual_routine.language == language - assert actual_routine.return_type == return_type - assert actual_routine.type_ == type_ - assert actual_routine.description == description - - -def test_from_api_repr(target_class): - from google.cloud.bigquery.routine import RoutineArgument - from google.cloud.bigquery.routine import RoutineReference - - creation_time = datetime.datetime( - 2010, 5, 19, 16, 0, 0, tzinfo=google.cloud._helpers.UTC - ) - modified_time = datetime.datetime( - 2011, 10, 1, 16, 0, 0, tzinfo=google.cloud._helpers.UTC - ) - resource = { - "routineReference": { - "projectId": "my-project", - "datasetId": "my_dataset", - "routineId": "my_routine", - }, - "etag": "abcdefg", - "creationTime": str(google.cloud._helpers._millis(creation_time)), - "lastModifiedTime": str(google.cloud._helpers._millis(modified_time)), - "definitionBody": "42", - "arguments": [{"name": "x", "dataType": {"typeKind": "INT64"}}], - "language": "SQL", - "returnType": {"typeKind": "INT64"}, - "routineType": "SCALAR_FUNCTION", - "someNewField": "someValue", - "description": "A routine description.", - } - actual_routine = target_class.from_api_repr(resource) - - assert actual_routine.project == "my-project" - assert actual_routine.dataset_id == "my_dataset" - assert actual_routine.routine_id == "my_routine" - assert ( - actual_routine.path - == "/projects/my-project/datasets/my_dataset/routines/my_routine" - ) - assert actual_routine.reference == RoutineReference.from_string( - "my-project.my_dataset.my_routine" - ) - assert actual_routine.etag == "abcdefg" - assert actual_routine.created == creation_time - assert actual_routine.modified == modified_time - assert actual_routine.arguments == [ - RoutineArgument( - name="x", - data_type=bigquery_v2.types.StandardSqlDataType( - type_kind=bigquery_v2.enums.StandardSqlDataType.TypeKind.INT64 - ), - ) - ] - assert actual_routine.body == "42" - assert actual_routine.language == "SQL" - assert actual_routine.return_type == bigquery_v2.types.StandardSqlDataType( - type_kind=bigquery_v2.enums.StandardSqlDataType.TypeKind.INT64 - ) - assert actual_routine.type_ == "SCALAR_FUNCTION" - assert actual_routine._properties["someNewField"] == "someValue" - assert actual_routine.description == "A routine description." - - -def test_from_api_repr_w_minimal_resource(target_class): - from google.cloud.bigquery.routine import RoutineReference - - resource = { - "routineReference": { - "projectId": "my-project", - "datasetId": "my_dataset", - "routineId": "my_routine", - } - } - actual_routine = target_class.from_api_repr(resource) - assert actual_routine.reference == RoutineReference.from_string( - "my-project.my_dataset.my_routine" - ) - assert actual_routine.etag is None - assert actual_routine.created is None - assert actual_routine.modified is None - assert actual_routine.arguments == [] - assert actual_routine.body is None - assert actual_routine.language is None - assert actual_routine.return_type is None - assert actual_routine.type_ is None - assert actual_routine.description is None - - -def test_from_api_repr_w_unknown_fields(target_class): - from google.cloud.bigquery.routine import RoutineReference - - resource = { - "routineReference": { - "projectId": "my-project", - "datasetId": "my_dataset", - "routineId": "my_routine", - }, - "thisFieldIsNotInTheProto": "just ignore me", - } - actual_routine = target_class.from_api_repr(resource) - assert actual_routine.reference == RoutineReference.from_string( - "my-project.my_dataset.my_routine" - ) - assert actual_routine._properties is resource - - -@pytest.mark.parametrize( - "resource,filter_fields,expected", - [ - ( - { - "arguments": [{"name": "x", "dataType": {"typeKind": "INT64"}}], - "definitionBody": "x * 3", - "language": "SQL", - "returnType": {"typeKind": "INT64"}, - "routineType": "SCALAR_FUNCTION", - "description": "A routine description.", - }, - ["arguments"], - {"arguments": [{"name": "x", "dataType": {"typeKind": "INT64"}}]}, - ), - ( - { - "arguments": [{"name": "x", "dataType": {"typeKind": "INT64"}}], - "definitionBody": "x * 3", - "language": "SQL", - "returnType": {"typeKind": "INT64"}, - "routineType": "SCALAR_FUNCTION", - "description": "A routine description.", - }, - ["body"], - {"definitionBody": "x * 3"}, - ), - ( - { - "arguments": [{"name": "x", "dataType": {"typeKind": "INT64"}}], - "definitionBody": "x * 3", - "language": "SQL", - "returnType": {"typeKind": "INT64"}, - "routineType": "SCALAR_FUNCTION", - "description": "A routine description.", - }, - ["language"], - {"language": "SQL"}, - ), - ( - { - "arguments": [{"name": "x", "dataType": {"typeKind": "INT64"}}], - "definitionBody": "x * 3", - "language": "SQL", - "returnType": {"typeKind": "INT64"}, - "routineType": "SCALAR_FUNCTION", - "description": "A routine description.", - }, - ["return_type"], - {"returnType": {"typeKind": "INT64"}}, - ), - ( - { - "arguments": [{"name": "x", "dataType": {"typeKind": "INT64"}}], - "definitionBody": "x * 3", - "language": "SQL", - "returnType": {"typeKind": "INT64"}, - "routineType": "SCALAR_FUNCTION", - "description": "A routine description.", - }, - ["type_"], - {"routineType": "SCALAR_FUNCTION"}, - ), - ( - { - "arguments": [{"name": "x", "dataType": {"typeKind": "INT64"}}], - "definitionBody": "x * 3", - "language": "SQL", - "returnType": {"typeKind": "INT64"}, - "routineType": "SCALAR_FUNCTION", - "description": "A routine description.", - }, - ["description"], - {"description": "A routine description."}, - ), - ( - {}, - ["arguments", "language", "body", "type_", "return_type", "description"], - { - "arguments": None, - "definitionBody": None, - "language": None, - "returnType": None, - "routineType": None, - "description": None, - }, - ), - ( - {"someNewField": "someValue"}, - ["someNewField"], - {"someNewField": "someValue"}, - ), - ], -) -def test_build_resource(object_under_test, resource, filter_fields, expected): - object_under_test._properties = resource - actual_routine = object_under_test._build_resource(filter_fields) - assert actual_routine == expected - - -def test_set_arguments_w_none(object_under_test): - object_under_test.arguments = None - assert object_under_test.arguments == [] - assert object_under_test._properties["arguments"] == [] - - -def test_set_imported_libraries(object_under_test): - imported_libraries = ["gs://cloud-samples-data/bigquery/udfs/max-value.js"] - object_under_test.imported_libraries = imported_libraries - assert object_under_test.imported_libraries == imported_libraries - assert object_under_test._properties["importedLibraries"] == imported_libraries - - -def test_set_imported_libraries_w_none(object_under_test): - object_under_test.imported_libraries = None - assert object_under_test.imported_libraries == [] - assert object_under_test._properties["importedLibraries"] == [] - - -def test_set_return_type_w_none(object_under_test): - object_under_test.return_type = None - assert object_under_test.return_type is None - assert object_under_test._properties["returnType"] is None - - -def test_set_description_w_none(object_under_test): - object_under_test.description = None - assert object_under_test.description is None - assert object_under_test._properties["description"] is None - - -def test_repr(target_class): - model = target_class("my-proj.my_dset.my_routine") - actual_routine = repr(model) - assert actual_routine == "Routine('my-proj.my_dset.my_routine')" diff --git a/bigquery/tests/unit/routine/test_routine_argument.py b/bigquery/tests/unit/routine/test_routine_argument.py deleted file mode 100644 index 7d17b5fc703f..000000000000 --- a/bigquery/tests/unit/routine/test_routine_argument.py +++ /dev/null @@ -1,100 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import pytest - -from google.cloud import bigquery_v2 - - -@pytest.fixture -def target_class(): - from google.cloud.bigquery.routine import RoutineArgument - - return RoutineArgument - - -def test_ctor(target_class): - data_type = bigquery_v2.types.StandardSqlDataType( - type_kind=bigquery_v2.enums.StandardSqlDataType.TypeKind.INT64 - ) - actual_arg = target_class( - name="field_name", kind="FIXED_TYPE", mode="IN", data_type=data_type - ) - assert actual_arg.name == "field_name" - assert actual_arg.kind == "FIXED_TYPE" - assert actual_arg.mode == "IN" - assert actual_arg.data_type == data_type - - -def test_from_api_repr(target_class): - resource = { - "argumentKind": "FIXED_TYPE", - "dataType": {"typeKind": "INT64"}, - "mode": "IN", - "name": "field_name", - } - actual_arg = target_class.from_api_repr(resource) - assert actual_arg.name == "field_name" - assert actual_arg.kind == "FIXED_TYPE" - assert actual_arg.mode == "IN" - assert actual_arg.data_type == bigquery_v2.types.StandardSqlDataType( - type_kind=bigquery_v2.enums.StandardSqlDataType.TypeKind.INT64 - ) - - -def test_from_api_repr_w_minimal_resource(target_class): - resource = {} - actual_arg = target_class.from_api_repr(resource) - assert actual_arg.name is None - assert actual_arg.kind is None - assert actual_arg.mode is None - assert actual_arg.data_type is None - - -def test_from_api_repr_w_unknown_fields(target_class): - resource = {"thisFieldIsNotInTheProto": "just ignore me"} - actual_arg = target_class.from_api_repr(resource) - assert actual_arg._properties is resource - - -def test_eq(target_class): - data_type = bigquery_v2.types.StandardSqlDataType( - type_kind=bigquery_v2.enums.StandardSqlDataType.TypeKind.INT64 - ) - arg = target_class( - name="field_name", kind="FIXED_TYPE", mode="IN", data_type=data_type - ) - arg_too = target_class( - name="field_name", kind="FIXED_TYPE", mode="IN", data_type=data_type - ) - assert arg == arg_too - assert not (arg != arg_too) - - other_arg = target_class() - assert not (arg == other_arg) - assert arg != other_arg - - notanarg = object() - assert not (arg == notanarg) - assert arg != notanarg - - -def test_repr(target_class): - arg = target_class(name="field_name", kind="FIXED_TYPE", mode="IN", data_type=None) - actual_repr = repr(arg) - assert actual_repr == ( - "RoutineArgument(data_type=None, kind='FIXED_TYPE', mode='IN', name='field_name')" - ) diff --git a/bigquery/tests/unit/routine/test_routine_reference.py b/bigquery/tests/unit/routine/test_routine_reference.py deleted file mode 100644 index 9d3d551a6294..000000000000 --- a/bigquery/tests/unit/routine/test_routine_reference.py +++ /dev/null @@ -1,138 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import pytest - - -@pytest.fixture -def target_class(): - from google.cloud.bigquery.routine import RoutineReference - - return RoutineReference - - -def test_from_api_repr(target_class): - resource = { - "projectId": "my-project", - "datasetId": "my_dataset", - "routineId": "my_routine", - } - got = target_class.from_api_repr(resource) - assert got.project == "my-project" - assert got.dataset_id == "my_dataset" - assert got.routine_id == "my_routine" - assert got.path == "/projects/my-project/datasets/my_dataset/routines/my_routine" - - -def test_from_api_repr_w_unknown_fields(target_class): - resource = { - "projectId": "my-project", - "datasetId": "my_dataset", - "routineId": "my_routine", - "thisFieldIsNotInTheProto": "just ignore me", - } - got = target_class.from_api_repr(resource) - assert got.project == "my-project" - assert got.dataset_id == "my_dataset" - assert got.routine_id == "my_routine" - assert got._properties is resource - - -def test_to_api_repr(target_class): - ref = target_class.from_string("my-project.my_dataset.my_routine") - got = ref.to_api_repr() - assert got == { - "projectId": "my-project", - "datasetId": "my_dataset", - "routineId": "my_routine", - } - - -def test_from_string(target_class): - got = target_class.from_string("string-project.string_dataset.string_routine") - assert got.project == "string-project" - assert got.dataset_id == "string_dataset" - assert got.routine_id == "string_routine" - assert got.path == ( - "/projects/string-project/datasets/string_dataset/routines/string_routine" - ) - - -def test_from_string_legacy_string(target_class): - with pytest.raises(ValueError): - target_class.from_string("string-project:string_dataset.string_routine") - - -def test_from_string_not_fully_qualified(target_class): - with pytest.raises(ValueError): - target_class.from_string("string_routine") - - with pytest.raises(ValueError): - target_class.from_string("string_dataset.string_routine") - - with pytest.raises(ValueError): - target_class.from_string("a.b.c.d") - - -def test_from_string_with_default_project(target_class): - got = target_class.from_string( - "string_dataset.string_routine", default_project="default-project" - ) - assert got.project == "default-project" - assert got.dataset_id == "string_dataset" - assert got.routine_id == "string_routine" - - -def test_from_string_ignores_default_project(target_class): - got = target_class.from_string( - "string-project.string_dataset.string_routine", - default_project="default-project", - ) - assert got.project == "string-project" - assert got.dataset_id == "string_dataset" - assert got.routine_id == "string_routine" - - -def test_eq(target_class): - routine = target_class.from_string("my-proj.my_dset.my_routine") - routine_too = target_class.from_string("my-proj.my_dset.my_routine") - assert routine == routine_too - assert not (routine != routine_too) - - other_routine = target_class.from_string("my-proj.my_dset.my_routine2") - assert not (routine == other_routine) - assert routine != other_routine - - notaroutine = object() - assert not (routine == notaroutine) - assert routine != notaroutine - - -def test_hash(target_class): - routine = target_class.from_string("my-proj.my_dset.my_routine") - routine2 = target_class.from_string("my-proj.my_dset.routine2") - got = {routine: "hello", routine2: "world"} - assert got[routine] == "hello" - assert got[routine2] == "world" - - routine_too = target_class.from_string("my-proj.my_dset.my_routine") - assert got[routine_too] == "hello" - - -def test_repr(target_class): - routine = target_class.from_string("my-proj.my_dset.my_routine") - got = repr(routine) - assert got == "RoutineReference.from_string('my-proj.my_dset.my_routine')" diff --git a/bigquery/tests/unit/test__helpers.py b/bigquery/tests/unit/test__helpers.py deleted file mode 100644 index fa6d27c981d8..000000000000 --- a/bigquery/tests/unit/test__helpers.py +++ /dev/null @@ -1,1074 +0,0 @@ -# Copyright 2015 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import base64 -import datetime -import decimal -import unittest - -import mock - - -class Test_not_null(unittest.TestCase): - def _call_fut(self, value, field): - from google.cloud.bigquery._helpers import _not_null - - return _not_null(value, field) - - def test_w_none_nullable(self): - self.assertFalse(self._call_fut(None, _Field("NULLABLE"))) - - def test_w_none_required(self): - self.assertTrue(self._call_fut(None, _Field("REQUIRED"))) - - def test_w_value(self): - self.assertTrue(self._call_fut(object(), object())) - - -class Test_int_from_json(unittest.TestCase): - def _call_fut(self, value, field): - from google.cloud.bigquery._helpers import _int_from_json - - return _int_from_json(value, field) - - def test_w_none_nullable(self): - self.assertIsNone(self._call_fut(None, _Field("NULLABLE"))) - - def test_w_none_required(self): - with self.assertRaises(TypeError): - self._call_fut(None, _Field("REQUIRED")) - - def test_w_string_value(self): - coerced = self._call_fut("42", object()) - self.assertEqual(coerced, 42) - - def test_w_float_value(self): - coerced = self._call_fut(42, object()) - self.assertEqual(coerced, 42) - - -class Test_float_from_json(unittest.TestCase): - def _call_fut(self, value, field): - from google.cloud.bigquery._helpers import _float_from_json - - return _float_from_json(value, field) - - def test_w_none_nullable(self): - self.assertIsNone(self._call_fut(None, _Field("NULLABLE"))) - - def test_w_none_required(self): - with self.assertRaises(TypeError): - self._call_fut(None, _Field("REQUIRED")) - - def test_w_string_value(self): - coerced = self._call_fut("3.1415", object()) - self.assertEqual(coerced, 3.1415) - - def test_w_float_value(self): - coerced = self._call_fut(3.1415, object()) - self.assertEqual(coerced, 3.1415) - - -class Test_decimal_from_json(unittest.TestCase): - def _call_fut(self, value, field): - from google.cloud.bigquery._helpers import _decimal_from_json - - return _decimal_from_json(value, field) - - def test_w_none_nullable(self): - self.assertIsNone(self._call_fut(None, _Field("NULLABLE"))) - - def test_w_none_required(self): - with self.assertRaises(TypeError): - self._call_fut(None, _Field("REQUIRED")) - - def test_w_string_value(self): - coerced = self._call_fut("3.1415", object()) - self.assertEqual(coerced, decimal.Decimal("3.1415")) - - def test_w_float_value(self): - coerced = self._call_fut(3.1415, object()) - # There is no exact float representation of 3.1415. - self.assertEqual(coerced, decimal.Decimal(3.1415)) - - -class Test_bool_from_json(unittest.TestCase): - def _call_fut(self, value, field): - from google.cloud.bigquery._helpers import _bool_from_json - - return _bool_from_json(value, field) - - def test_w_none_nullable(self): - self.assertIsNone(self._call_fut(None, _Field("NULLABLE"))) - - def test_w_none_required(self): - with self.assertRaises(AttributeError): - self._call_fut(None, _Field("REQUIRED")) - - def test_w_value_t(self): - coerced = self._call_fut("T", object()) - self.assertTrue(coerced) - - def test_w_value_true(self): - coerced = self._call_fut("True", object()) - self.assertTrue(coerced) - - def test_w_value_1(self): - coerced = self._call_fut("1", object()) - self.assertTrue(coerced) - - def test_w_value_other(self): - coerced = self._call_fut("f", object()) - self.assertFalse(coerced) - - -class Test_string_from_json(unittest.TestCase): - def _call_fut(self, value, field): - from google.cloud.bigquery._helpers import _string_from_json - - return _string_from_json(value, field) - - def test_w_none_nullable(self): - self.assertIsNone(self._call_fut(None, _Field("NULLABLE"))) - - def test_w_none_required(self): - self.assertIsNone(self._call_fut(None, _Field("REQUIRED"))) - - def test_w_string_value(self): - coerced = self._call_fut("Wonderful!", object()) - self.assertEqual(coerced, "Wonderful!") - - -class Test_bytes_from_json(unittest.TestCase): - def _call_fut(self, value, field): - from google.cloud.bigquery._helpers import _bytes_from_json - - return _bytes_from_json(value, field) - - def test_w_none_nullable(self): - self.assertIsNone(self._call_fut(None, _Field("NULLABLE"))) - - def test_w_none_required(self): - with self.assertRaises(TypeError): - self._call_fut(None, _Field("REQUIRED")) - - def test_w_base64_encoded_bytes(self): - expected = b"Wonderful!" - encoded = base64.standard_b64encode(expected) - coerced = self._call_fut(encoded, object()) - self.assertEqual(coerced, expected) - - def test_w_base64_encoded_text(self): - expected = b"Wonderful!" - encoded = base64.standard_b64encode(expected).decode("ascii") - coerced = self._call_fut(encoded, object()) - self.assertEqual(coerced, expected) - - -class Test_timestamp_from_json(unittest.TestCase): - def _call_fut(self, value, field): - from google.cloud.bigquery._helpers import _timestamp_from_json - - return _timestamp_from_json(value, field) - - def test_w_none_nullable(self): - self.assertIsNone(self._call_fut(None, _Field("NULLABLE"))) - - def test_w_none_required(self): - with self.assertRaises(TypeError): - self._call_fut(None, _Field("REQUIRED")) - - def test_w_string_value(self): - from google.cloud._helpers import _EPOCH - - coerced = self._call_fut("1.234567", object()) - self.assertEqual( - coerced, _EPOCH + datetime.timedelta(seconds=1, microseconds=234567) - ) - - def test_w_float_value(self): - from google.cloud._helpers import _EPOCH - - coerced = self._call_fut(1.234567, object()) - self.assertEqual( - coerced, _EPOCH + datetime.timedelta(seconds=1, microseconds=234567) - ) - - -class Test_timestamp_query_param_from_json(unittest.TestCase): - def _call_fut(self, value, field): - from google.cloud.bigquery import _helpers - - return _helpers._timestamp_query_param_from_json(value, field) - - def test_w_none_nullable(self): - self.assertIsNone(self._call_fut(None, _Field("NULLABLE"))) - - def test_w_timestamp_valid(self): - from google.cloud._helpers import UTC - - samples = [ - ( - "2016-12-20 15:58:27.339328+00:00", - datetime.datetime(2016, 12, 20, 15, 58, 27, 339328, tzinfo=UTC), - ), - ( - "2016-12-20 15:58:27+00:00", - datetime.datetime(2016, 12, 20, 15, 58, 27, tzinfo=UTC), - ), - ( - "2016-12-20T15:58:27.339328+00:00", - datetime.datetime(2016, 12, 20, 15, 58, 27, 339328, tzinfo=UTC), - ), - ( - "2016-12-20T15:58:27+00:00", - datetime.datetime(2016, 12, 20, 15, 58, 27, tzinfo=UTC), - ), - ( - "2016-12-20 15:58:27.339328Z", - datetime.datetime(2016, 12, 20, 15, 58, 27, 339328, tzinfo=UTC), - ), - ( - "2016-12-20 15:58:27Z", - datetime.datetime(2016, 12, 20, 15, 58, 27, tzinfo=UTC), - ), - ( - "2016-12-20T15:58:27.339328Z", - datetime.datetime(2016, 12, 20, 15, 58, 27, 339328, tzinfo=UTC), - ), - ( - "2016-12-20T15:58:27Z", - datetime.datetime(2016, 12, 20, 15, 58, 27, tzinfo=UTC), - ), - ] - for timestamp_str, expected_result in samples: - self.assertEqual( - self._call_fut(timestamp_str, _Field("NULLABLE")), expected_result - ) - - def test_w_timestamp_invalid(self): - with self.assertRaises(ValueError): - self._call_fut("definitely-not-a-timestamp", _Field("NULLABLE")) - - -class Test_datetime_from_json(unittest.TestCase): - def _call_fut(self, value, field): - from google.cloud.bigquery._helpers import _datetime_from_json - - return _datetime_from_json(value, field) - - def test_w_none_nullable(self): - self.assertIsNone(self._call_fut(None, _Field("NULLABLE"))) - - def test_w_none_required(self): - with self.assertRaises(TypeError): - self._call_fut(None, _Field("REQUIRED")) - - def test_w_string_value(self): - coerced = self._call_fut("2016-12-02T18:51:33", object()) - self.assertEqual(coerced, datetime.datetime(2016, 12, 2, 18, 51, 33)) - - def test_w_microseconds(self): - coerced = self._call_fut("2015-05-22T10:11:12.987654", object()) - self.assertEqual(coerced, datetime.datetime(2015, 5, 22, 10, 11, 12, 987654)) - - -class Test_date_from_json(unittest.TestCase): - def _call_fut(self, value, field): - from google.cloud.bigquery._helpers import _date_from_json - - return _date_from_json(value, field) - - def test_w_none_nullable(self): - self.assertIsNone(self._call_fut(None, _Field("NULLABLE"))) - - def test_w_none_required(self): - with self.assertRaises(TypeError): - self._call_fut(None, _Field("REQUIRED")) - - def test_w_string_value(self): - coerced = self._call_fut("1987-09-22", object()) - self.assertEqual(coerced, datetime.date(1987, 9, 22)) - - -class Test_time_from_json(unittest.TestCase): - def _call_fut(self, value, field): - from google.cloud.bigquery._helpers import _time_from_json - - return _time_from_json(value, field) - - def test_w_none_nullable(self): - self.assertIsNone(self._call_fut(None, _Field("NULLABLE"))) - - def test_w_none_required(self): - with self.assertRaises(TypeError): - self._call_fut(None, _Field("REQUIRED")) - - def test_w_string_value(self): - coerced = self._call_fut("12:12:27", object()) - self.assertEqual(coerced, datetime.time(12, 12, 27)) - - def test_w_subsecond_string_value(self): - coerced = self._call_fut("12:12:27.123456", object()) - self.assertEqual(coerced, datetime.time(12, 12, 27, 123456)) - - def test_w_bogus_string_value(self): - with self.assertRaises(ValueError): - self._call_fut("12:12:27.123", object()) - - -class Test_record_from_json(unittest.TestCase): - def _call_fut(self, value, field): - from google.cloud.bigquery._helpers import _record_from_json - - return _record_from_json(value, field) - - def test_w_none_nullable(self): - self.assertIsNone(self._call_fut(None, _Field("NULLABLE"))) - - def test_w_none_required(self): - with self.assertRaises(TypeError): - self._call_fut(None, _Field("REQUIRED")) - - def test_w_nullable_subfield_none(self): - subfield = _Field("NULLABLE", "age", "INTEGER") - field = _Field("REQUIRED", fields=[subfield]) - value = {"f": [{"v": None}]} - coerced = self._call_fut(value, field) - self.assertEqual(coerced, {"age": None}) - - def test_w_scalar_subfield(self): - subfield = _Field("REQUIRED", "age", "INTEGER") - field = _Field("REQUIRED", fields=[subfield]) - value = {"f": [{"v": 42}]} - coerced = self._call_fut(value, field) - self.assertEqual(coerced, {"age": 42}) - - def test_w_scalar_subfield_geography(self): - subfield = _Field("REQUIRED", "geo", "GEOGRAPHY") - field = _Field("REQUIRED", fields=[subfield]) - value = {"f": [{"v": "POINT(1, 2)"}]} - coerced = self._call_fut(value, field) - self.assertEqual(coerced, {"geo": "POINT(1, 2)"}) - - def test_w_repeated_subfield(self): - subfield = _Field("REPEATED", "color", "STRING") - field = _Field("REQUIRED", fields=[subfield]) - value = {"f": [{"v": [{"v": "red"}, {"v": "yellow"}, {"v": "blue"}]}]} - coerced = self._call_fut(value, field) - self.assertEqual(coerced, {"color": ["red", "yellow", "blue"]}) - - def test_w_record_subfield(self): - full_name = _Field("REQUIRED", "full_name", "STRING") - area_code = _Field("REQUIRED", "area_code", "STRING") - local_number = _Field("REQUIRED", "local_number", "STRING") - rank = _Field("REQUIRED", "rank", "INTEGER") - phone = _Field( - "NULLABLE", "phone", "RECORD", fields=[area_code, local_number, rank] - ) - person = _Field("REQUIRED", "person", "RECORD", fields=[full_name, phone]) - value = { - "f": [ - {"v": "Phred Phlyntstone"}, - {"v": {"f": [{"v": "800"}, {"v": "555-1212"}, {"v": 1}]}}, - ] - } - expected = { - "full_name": "Phred Phlyntstone", - "phone": {"area_code": "800", "local_number": "555-1212", "rank": 1}, - } - coerced = self._call_fut(value, person) - self.assertEqual(coerced, expected) - - -class Test_field_to_index_mapping(unittest.TestCase): - def _call_fut(self, schema): - from google.cloud.bigquery._helpers import _field_to_index_mapping - - return _field_to_index_mapping(schema) - - def test_w_empty_schema(self): - self.assertEqual(self._call_fut([]), {}) - - def test_w_non_empty_schema(self): - schema = [ - _Field("REPEATED", "first", "INTEGER"), - _Field("REQUIRED", "second", "INTEGER"), - _Field("REPEATED", "third", "INTEGER"), - ] - self.assertEqual(self._call_fut(schema), {"first": 0, "second": 1, "third": 2}) - - -class Test_row_tuple_from_json(unittest.TestCase): - def _call_fut(self, row, schema): - from google.cloud.bigquery._helpers import _row_tuple_from_json - - with _field_isinstance_patcher(): - return _row_tuple_from_json(row, schema) - - def test_w_single_scalar_column(self): - # SELECT 1 AS col - col = _Field("REQUIRED", "col", "INTEGER") - row = {u"f": [{u"v": u"1"}]} - self.assertEqual(self._call_fut(row, schema=[col]), (1,)) - - def test_w_single_scalar_geography_column(self): - # SELECT 1 AS col - col = _Field("REQUIRED", "geo", "GEOGRAPHY") - row = {u"f": [{u"v": u"POINT(1, 2)"}]} - self.assertEqual(self._call_fut(row, schema=[col]), ("POINT(1, 2)",)) - - def test_w_single_struct_column(self): - # SELECT (1, 2) AS col - sub_1 = _Field("REQUIRED", "sub_1", "INTEGER") - sub_2 = _Field("REQUIRED", "sub_2", "INTEGER") - col = _Field("REQUIRED", "col", "RECORD", fields=[sub_1, sub_2]) - row = {u"f": [{u"v": {u"f": [{u"v": u"1"}, {u"v": u"2"}]}}]} - self.assertEqual(self._call_fut(row, schema=[col]), ({"sub_1": 1, "sub_2": 2},)) - - def test_w_single_array_column(self): - # SELECT [1, 2, 3] as col - col = _Field("REPEATED", "col", "INTEGER") - row = {u"f": [{u"v": [{u"v": u"1"}, {u"v": u"2"}, {u"v": u"3"}]}]} - self.assertEqual(self._call_fut(row, schema=[col]), ([1, 2, 3],)) - - def test_w_struct_w_nested_array_column(self): - # SELECT ([1, 2], 3, [4, 5]) as col - first = _Field("REPEATED", "first", "INTEGER") - second = _Field("REQUIRED", "second", "INTEGER") - third = _Field("REPEATED", "third", "INTEGER") - col = _Field("REQUIRED", "col", "RECORD", fields=[first, second, third]) - row = { - u"f": [ - { - u"v": { - u"f": [ - {u"v": [{u"v": u"1"}, {u"v": u"2"}]}, - {u"v": u"3"}, - {u"v": [{u"v": u"4"}, {u"v": u"5"}]}, - ] - } - } - ] - } - self.assertEqual( - self._call_fut(row, schema=[col]), - ({u"first": [1, 2], u"second": 3, u"third": [4, 5]},), - ) - - def test_w_array_of_struct(self): - # SELECT [(1, 2, 3), (4, 5, 6)] as col - first = _Field("REQUIRED", "first", "INTEGER") - second = _Field("REQUIRED", "second", "INTEGER") - third = _Field("REQUIRED", "third", "INTEGER") - col = _Field("REPEATED", "col", "RECORD", fields=[first, second, third]) - row = { - u"f": [ - { - u"v": [ - {u"v": {u"f": [{u"v": u"1"}, {u"v": u"2"}, {u"v": u"3"}]}}, - {u"v": {u"f": [{u"v": u"4"}, {u"v": u"5"}, {u"v": u"6"}]}}, - ] - } - ] - } - self.assertEqual( - self._call_fut(row, schema=[col]), - ( - [ - {u"first": 1, u"second": 2, u"third": 3}, - {u"first": 4, u"second": 5, u"third": 6}, - ], - ), - ) - - def test_w_array_of_struct_w_array(self): - # SELECT [([1, 2, 3], 4), ([5, 6], 7)] - first = _Field("REPEATED", "first", "INTEGER") - second = _Field("REQUIRED", "second", "INTEGER") - col = _Field("REPEATED", "col", "RECORD", fields=[first, second]) - row = { - u"f": [ - { - u"v": [ - { - u"v": { - u"f": [ - {u"v": [{u"v": u"1"}, {u"v": u"2"}, {u"v": u"3"}]}, - {u"v": u"4"}, - ] - } - }, - { - u"v": { - u"f": [ - {u"v": [{u"v": u"5"}, {u"v": u"6"}]}, - {u"v": u"7"}, - ] - } - }, - ] - } - ] - } - self.assertEqual( - self._call_fut(row, schema=[col]), - ([{u"first": [1, 2, 3], u"second": 4}, {u"first": [5, 6], u"second": 7}],), - ) - - -class Test_rows_from_json(unittest.TestCase): - def _call_fut(self, rows, schema): - from google.cloud.bigquery._helpers import _rows_from_json - - with _field_isinstance_patcher(): - return _rows_from_json(rows, schema) - - def test_w_record_subfield(self): - from google.cloud.bigquery.table import Row - - full_name = _Field("REQUIRED", "full_name", "STRING") - area_code = _Field("REQUIRED", "area_code", "STRING") - local_number = _Field("REQUIRED", "local_number", "STRING") - rank = _Field("REQUIRED", "rank", "INTEGER") - phone = _Field( - "NULLABLE", "phone", "RECORD", fields=[area_code, local_number, rank] - ) - color = _Field("REPEATED", "color", "STRING") - schema = [full_name, phone, color] - rows = [ - { - "f": [ - {"v": "Phred Phlyntstone"}, - {"v": {"f": [{"v": "800"}, {"v": "555-1212"}, {"v": 1}]}}, - {"v": [{"v": "orange"}, {"v": "black"}]}, - ] - }, - { - "f": [ - {"v": "Bharney Rhubble"}, - {"v": {"f": [{"v": "877"}, {"v": "768-5309"}, {"v": 2}]}}, - {"v": [{"v": "brown"}]}, - ] - }, - {"f": [{"v": "Wylma Phlyntstone"}, {"v": None}, {"v": []}]}, - ] - phred_phone = {"area_code": "800", "local_number": "555-1212", "rank": 1} - bharney_phone = {"area_code": "877", "local_number": "768-5309", "rank": 2} - f2i = {"full_name": 0, "phone": 1, "color": 2} - expected = [ - Row(("Phred Phlyntstone", phred_phone, ["orange", "black"]), f2i), - Row(("Bharney Rhubble", bharney_phone, ["brown"]), f2i), - Row(("Wylma Phlyntstone", None, []), f2i), - ] - coerced = self._call_fut(rows, schema) - self.assertEqual(coerced, expected) - - def test_w_int64_float64_bool(self): - from google.cloud.bigquery.table import Row - - # "Standard" SQL dialect uses 'INT64', 'FLOAT64', 'BOOL'. - candidate = _Field("REQUIRED", "candidate", "STRING") - votes = _Field("REQUIRED", "votes", "INT64") - percentage = _Field("REQUIRED", "percentage", "FLOAT64") - incumbent = _Field("REQUIRED", "incumbent", "BOOL") - schema = [candidate, votes, percentage, incumbent] - rows = [ - {"f": [{"v": "Phred Phlyntstone"}, {"v": 8}, {"v": 0.25}, {"v": "true"}]}, - {"f": [{"v": "Bharney Rhubble"}, {"v": 4}, {"v": 0.125}, {"v": "false"}]}, - { - "f": [ - {"v": "Wylma Phlyntstone"}, - {"v": 20}, - {"v": 0.625}, - {"v": "false"}, - ] - }, - ] - f2i = {"candidate": 0, "votes": 1, "percentage": 2, "incumbent": 3} - expected = [ - Row(("Phred Phlyntstone", 8, 0.25, True), f2i), - Row(("Bharney Rhubble", 4, 0.125, False), f2i), - Row(("Wylma Phlyntstone", 20, 0.625, False), f2i), - ] - coerced = self._call_fut(rows, schema) - self.assertEqual(coerced, expected) - - -class Test_int_to_json(unittest.TestCase): - def _call_fut(self, value): - from google.cloud.bigquery._helpers import _int_to_json - - return _int_to_json(value) - - def test_w_int(self): - self.assertEqual(self._call_fut(123), "123") - - def test_w_string(self): - self.assertEqual(self._call_fut("123"), "123") - - -class Test_float_to_json(unittest.TestCase): - def _call_fut(self, value): - from google.cloud.bigquery._helpers import _float_to_json - - return _float_to_json(value) - - def test_w_float(self): - self.assertEqual(self._call_fut(1.23), 1.23) - - -class Test_decimal_to_json(unittest.TestCase): - def _call_fut(self, value): - from google.cloud.bigquery._helpers import _decimal_to_json - - return _decimal_to_json(value) - - def test_w_float(self): - self.assertEqual(self._call_fut(1.23), 1.23) - - def test_w_string(self): - self.assertEqual(self._call_fut("1.23"), "1.23") - - def test_w_decimal(self): - self.assertEqual(self._call_fut(decimal.Decimal("1.23")), "1.23") - - -class Test_bool_to_json(unittest.TestCase): - def _call_fut(self, value): - from google.cloud.bigquery._helpers import _bool_to_json - - return _bool_to_json(value) - - def test_w_true(self): - self.assertEqual(self._call_fut(True), "true") - - def test_w_false(self): - self.assertEqual(self._call_fut(False), "false") - - def test_w_string(self): - self.assertEqual(self._call_fut("false"), "false") - - -class Test_bytes_to_json(unittest.TestCase): - def _call_fut(self, value): - from google.cloud.bigquery._helpers import _bytes_to_json - - return _bytes_to_json(value) - - def test_w_non_bytes(self): - non_bytes = object() - self.assertIs(self._call_fut(non_bytes), non_bytes) - - def test_w_bytes(self): - source = b"source" - expected = u"c291cmNl" - converted = self._call_fut(source) - self.assertEqual(converted, expected) - - -class Test_timestamp_to_json_parameter(unittest.TestCase): - def _call_fut(self, value): - from google.cloud.bigquery._helpers import _timestamp_to_json_parameter - - return _timestamp_to_json_parameter(value) - - def test_w_float(self): - self.assertEqual(self._call_fut(1.234567), 1.234567) - - def test_w_string(self): - ZULU = "2016-12-20 15:58:27.339328+00:00" - self.assertEqual(self._call_fut(ZULU), ZULU) - - def test_w_datetime_wo_zone(self): - ZULU = "2016-12-20 15:58:27.339328+00:00" - when = datetime.datetime(2016, 12, 20, 15, 58, 27, 339328) - self.assertEqual(self._call_fut(when), ZULU) - - def test_w_datetime_w_non_utc_zone(self): - class _Zone(datetime.tzinfo): - def utcoffset(self, _): - return datetime.timedelta(minutes=-240) - - ZULU = "2016-12-20 19:58:27.339328+00:00" - when = datetime.datetime(2016, 12, 20, 15, 58, 27, 339328, tzinfo=_Zone()) - self.assertEqual(self._call_fut(when), ZULU) - - def test_w_datetime_w_utc_zone(self): - from google.cloud._helpers import UTC - - ZULU = "2016-12-20 15:58:27.339328+00:00" - when = datetime.datetime(2016, 12, 20, 15, 58, 27, 339328, tzinfo=UTC) - self.assertEqual(self._call_fut(when), ZULU) - - -class Test_timestamp_to_json_row(unittest.TestCase): - def _call_fut(self, value): - from google.cloud.bigquery._helpers import _timestamp_to_json_row - - return _timestamp_to_json_row(value) - - def test_w_float(self): - self.assertEqual(self._call_fut(1.234567), 1.234567) - - def test_w_string(self): - ZULU = "2016-12-20 15:58:27.339328+00:00" - self.assertEqual(self._call_fut(ZULU), ZULU) - - def test_w_datetime(self): - from google.cloud._helpers import _microseconds_from_datetime - - when = datetime.datetime(2016, 12, 20, 15, 58, 27, 339328) - self.assertEqual(self._call_fut(when), _microseconds_from_datetime(when) / 1e6) - - -class Test_datetime_to_json(unittest.TestCase): - def _call_fut(self, value): - from google.cloud.bigquery._helpers import _datetime_to_json - - return _datetime_to_json(value) - - def test_w_string(self): - RFC3339 = "2016-12-03T14:14:51Z" - self.assertEqual(self._call_fut(RFC3339), RFC3339) - - def test_w_datetime(self): - from google.cloud._helpers import UTC - - when = datetime.datetime(2016, 12, 3, 14, 11, 27, 123456, tzinfo=UTC) - self.assertEqual(self._call_fut(when), "2016-12-03T14:11:27.123456") - - -class Test_date_to_json(unittest.TestCase): - def _call_fut(self, value): - from google.cloud.bigquery._helpers import _date_to_json - - return _date_to_json(value) - - def test_w_string(self): - RFC3339 = "2016-12-03" - self.assertEqual(self._call_fut(RFC3339), RFC3339) - - def test_w_datetime(self): - when = datetime.date(2016, 12, 3) - self.assertEqual(self._call_fut(when), "2016-12-03") - - -class Test_time_to_json(unittest.TestCase): - def _call_fut(self, value): - from google.cloud.bigquery._helpers import _time_to_json - - return _time_to_json(value) - - def test_w_string(self): - RFC3339 = "12:13:41" - self.assertEqual(self._call_fut(RFC3339), RFC3339) - - def test_w_datetime(self): - when = datetime.time(12, 13, 41) - self.assertEqual(self._call_fut(when), "12:13:41") - - -def _make_field(field_type, mode="NULLABLE", name="testing", fields=()): - from google.cloud.bigquery.schema import SchemaField - - return SchemaField(name=name, field_type=field_type, mode=mode, fields=fields) - - -class Test_scalar_field_to_json(unittest.TestCase): - def _call_fut(self, field, value): - from google.cloud.bigquery._helpers import _scalar_field_to_json - - return _scalar_field_to_json(field, value) - - def test_w_unknown_field_type(self): - field = _make_field("UNKNOWN") - original = object() - converted = self._call_fut(field, original) - self.assertIs(converted, original) - - def test_w_known_field_type(self): - field = _make_field("INT64") - original = 42 - converted = self._call_fut(field, original) - self.assertEqual(converted, str(original)) - - -class Test_repeated_field_to_json(unittest.TestCase): - def _call_fut(self, field, value): - from google.cloud.bigquery._helpers import _repeated_field_to_json - - return _repeated_field_to_json(field, value) - - def test_w_empty(self): - field = _make_field("INT64", mode="REPEATED") - original = [] - converted = self._call_fut(field, original) - self.assertEqual(converted, original) - self.assertEqual(field.mode, "REPEATED") - - def test_w_non_empty(self): - field = _make_field("INT64", mode="REPEATED") - original = [42] - converted = self._call_fut(field, original) - self.assertEqual(converted, [str(value) for value in original]) - self.assertEqual(field.mode, "REPEATED") - - -class Test_record_field_to_json(unittest.TestCase): - def _call_fut(self, field, value): - from google.cloud.bigquery._helpers import _record_field_to_json - - return _record_field_to_json(field, value) - - def test_w_empty(self): - fields = [] - original = [] - converted = self._call_fut(fields, original) - self.assertEqual(converted, {}) - - def test_w_non_empty_list(self): - fields = [ - _make_field("INT64", name="one", mode="NULLABLE"), - _make_field("STRING", name="two", mode="NULLABLE"), - ] - original = [42, "two"] - converted = self._call_fut(fields, original) - self.assertEqual(converted, {"one": "42", "two": "two"}) - - def test_w_non_empty_dict(self): - fields = [ - _make_field("INT64", name="one", mode="NULLABLE"), - _make_field("STRING", name="two", mode="NULLABLE"), - ] - original = {"one": 42, "two": "two"} - converted = self._call_fut(fields, original) - self.assertEqual(converted, {"one": "42", "two": "two"}) - - def test_w_some_missing_nullables(self): - fields = [ - _make_field("INT64", name="one", mode="NULLABLE"), - _make_field("STRING", name="two", mode="NULLABLE"), - ] - original = {"one": 42} - converted = self._call_fut(fields, original) - - # missing fields should not be converted to an explicit None - self.assertEqual(converted, {"one": "42"}) - - def test_w_all_missing_nullables(self): - fields = [ - _make_field("INT64", name="one", mode="NULLABLE"), - _make_field("STRING", name="two", mode="NULLABLE"), - ] - original = {} - converted = self._call_fut(fields, original) - - # we should get an empty dict, not None - self.assertEqual(converted, {}) - - def test_w_explicit_none_value(self): - fields = [ - _make_field("INT64", name="one", mode="NULLABLE"), - _make_field("STRING", name="two", mode="NULLABLE"), - _make_field("BOOL", name="three", mode="REPEATED"), - ] - original = {"three": None, "one": 42, "two": None} - converted = self._call_fut(fields, original) - - # None values should be dropped regardless of the field type - self.assertEqual(converted, {"one": "42"}) - - -class Test_field_to_json(unittest.TestCase): - def _call_fut(self, field, value): - from google.cloud.bigquery._helpers import _field_to_json - - return _field_to_json(field, value) - - def test_w_none(self): - field = _make_field("INT64") - original = None - converted = self._call_fut(field, original) - self.assertIsNone(converted) - - def test_w_repeated(self): - field = _make_field("INT64", mode="REPEATED") - original = [42, 17] - converted = self._call_fut(field, original) - self.assertEqual(converted, [str(value) for value in original]) - - def test_w_record(self): - subfields = [ - _make_field("INT64", name="one"), - _make_field("STRING", name="two"), - ] - field = _make_field("RECORD", fields=subfields) - original = {"one": 42, "two": "two"} - converted = self._call_fut(field, original) - self.assertEqual(converted, {"one": "42", "two": "two"}) - - def test_w_scalar(self): - field = _make_field("INT64") - original = 42 - converted = self._call_fut(field, original) - self.assertEqual(converted, str(original)) - - -class Test_snake_to_camel_case(unittest.TestCase): - def _call_fut(self, value): - from google.cloud.bigquery._helpers import _snake_to_camel_case - - return _snake_to_camel_case(value) - - def test_w_snake_case_string(self): - self.assertEqual(self._call_fut("friendly_name"), "friendlyName") - - def test_w_camel_case_string(self): - self.assertEqual(self._call_fut("friendlyName"), "friendlyName") - - -class Test__get_sub_prop(unittest.TestCase): - def _call_fut(self, container, keys, **kw): - from google.cloud.bigquery._helpers import _get_sub_prop - - return _get_sub_prop(container, keys, **kw) - - def test_w_empty_container_default_default(self): - self.assertIsNone(self._call_fut({}, ["key1"])) - - def test_w_missing_key_explicit_default(self): - self.assertEqual(self._call_fut({"key2": 2}, ["key1"], default=1), 1) - - def test_w_matching_single_key(self): - self.assertEqual(self._call_fut({"key1": 1}, ["key1"]), 1) - - def test_w_matching_first_key_missing_second_key(self): - self.assertIsNone(self._call_fut({"key1": {"key3": 3}}, ["key1", "key2"])) - - def test_w_matching_first_key_matching_second_key(self): - self.assertEqual(self._call_fut({"key1": {"key2": 2}}, ["key1", "key2"]), 2) - - -class Test__set_sub_prop(unittest.TestCase): - def _call_fut(self, container, keys, value): - from google.cloud.bigquery._helpers import _set_sub_prop - - return _set_sub_prop(container, keys, value) - - def test_w_empty_container_single_key(self): - container = {} - self._call_fut(container, ["key1"], "value") - self.assertEqual(container, {"key1": "value"}) - - def test_w_empty_container_nested_keys(self): - container = {} - self._call_fut(container, ["key1", "key2", "key3"], "value") - self.assertEqual(container, {"key1": {"key2": {"key3": "value"}}}) - - def test_w_existing_value(self): - container = {"key1": "before"} - self._call_fut(container, ["key1"], "after") - self.assertEqual(container, {"key1": "after"}) - - def test_w_nested_keys_existing_value(self): - container = {"key1": {"key2": {"key3": "before"}}} - self._call_fut(container, ["key1", "key2", "key3"], "after") - self.assertEqual(container, {"key1": {"key2": {"key3": "after"}}}) - - -class Test__del_sub_prop(unittest.TestCase): - def _call_fut(self, container, keys): - from google.cloud.bigquery._helpers import _del_sub_prop - - return _del_sub_prop(container, keys) - - def test_w_single_key(self): - container = {"key1": "value"} - self._call_fut(container, ["key1"]) - self.assertEqual(container, {}) - - def test_w_empty_container_nested_keys(self): - container = {} - self._call_fut(container, ["key1", "key2", "key3"]) - self.assertEqual(container, {"key1": {"key2": {}}}) - - def test_w_existing_value_nested_keys(self): - container = {"key1": {"key2": {"key3": "value"}}} - self._call_fut(container, ["key1", "key2", "key3"]) - self.assertEqual(container, {"key1": {"key2": {}}}) - - -class Test__int_or_none(unittest.TestCase): - def _call_fut(self, value): - from google.cloud.bigquery._helpers import _int_or_none - - return _int_or_none(value) - - def test_w_num_string(self): - self.assertEqual(self._call_fut("123"), 123) - - def test_w_none(self): - self.assertIsNone(self._call_fut(None)) - - def test_w_int(self): - self.assertEqual(self._call_fut(123), 123) - - def test_w_non_num_string(self): - with self.assertRaises(ValueError): - self._call_fut("ham") - - -class Test__str_or_none(unittest.TestCase): - def _call_fut(self, value): - from google.cloud.bigquery._helpers import _str_or_none - - return _str_or_none(value) - - def test_w_int(self): - self.assertEqual(self._call_fut(123), "123") - - def test_w_none(self): - self.assertIsNone(self._call_fut(None)) - - def test_w_str(self): - self.assertEqual(self._call_fut("ham"), "ham") - - -class _Field(object): - def __init__(self, mode, name="unknown", field_type="UNKNOWN", fields=()): - self.mode = mode - self.name = name - self.field_type = field_type - self.fields = fields - - -def _field_isinstance_patcher(): - """A patcher thank makes _Field instances seem like SchemaField instances. - """ - from google.cloud.bigquery.schema import SchemaField - - def fake_isinstance(instance, target_class): - if instance.__class__.__name__ != "_Field": - return isinstance(instance, target_class) # pragma: NO COVER - - # pretend that _Field() instances are actually instances of SchemaField - return target_class is SchemaField or ( - isinstance(target_class, tuple) and SchemaField in target_class - ) - - patcher = mock.patch( - "google.cloud.bigquery.schema.isinstance", side_effect=fake_isinstance - ) - return patcher diff --git a/bigquery/tests/unit/test__http.py b/bigquery/tests/unit/test__http.py deleted file mode 100644 index 4da805d48c78..000000000000 --- a/bigquery/tests/unit/test__http.py +++ /dev/null @@ -1,122 +0,0 @@ -# Copyright 2015 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock -import requests - - -class TestConnection(unittest.TestCase): - @staticmethod - def _get_default_timeout(): - from google.cloud.bigquery._http import _http - - return _http._DEFAULT_TIMEOUT - - @staticmethod - def _get_target_class(): - from google.cloud.bigquery._http import Connection - - return Connection - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_build_api_url_no_extra_query_params(self): - conn = self._make_one(object()) - URI = "/".join([conn.DEFAULT_API_ENDPOINT, "bigquery", conn.API_VERSION, "foo"]) - self.assertEqual(conn.build_api_url("/foo"), URI) - - def test_build_api_url_w_custom_endpoint(self): - custom_endpoint = "https://www.foo-googleapis.com" - conn = self._make_one(object(), api_endpoint=custom_endpoint) - URI = "/".join([custom_endpoint, "bigquery", conn.API_VERSION, "foo"]) - self.assertEqual(conn.build_api_url("/foo"), URI) - - def test_build_api_url_w_extra_query_params(self): - from six.moves.urllib.parse import parse_qsl - from six.moves.urllib.parse import urlsplit - - conn = self._make_one(object()) - uri = conn.build_api_url("/foo", {"bar": "baz"}) - scheme, netloc, path, qs, _ = urlsplit(uri) - self.assertEqual("%s://%s" % (scheme, netloc), conn.API_BASE_URL) - self.assertEqual(path, "/".join(["", "bigquery", conn.API_VERSION, "foo"])) - parms = dict(parse_qsl(qs)) - self.assertEqual(parms["bar"], "baz") - - def test_user_agent(self): - from google.cloud import _http as base_http - - http = mock.create_autospec(requests.Session, instance=True) - response = requests.Response() - response.status_code = 200 - data = b"brent-spiner" - response._content = data - http.request.return_value = response - client = mock.Mock(_http=http, spec=["_http"]) - - conn = self._make_one(client) - conn.user_agent = "my-application/1.2.3" - req_data = "req-data-boring" - result = conn.api_request("GET", "/rainbow", data=req_data, expect_json=False) - self.assertEqual(result, data) - - expected_headers = { - "Accept-Encoding": "gzip", - base_http.CLIENT_INFO_HEADER: conn.user_agent, - "User-Agent": conn.user_agent, - } - expected_uri = conn.build_api_url("/rainbow") - http.request.assert_called_once_with( - data=req_data, - headers=expected_headers, - method="GET", - url=expected_uri, - timeout=self._get_default_timeout(), - ) - self.assertIn("my-application/1.2.3", conn.user_agent) - - def test_extra_headers_replace(self): - from google.cloud import _http as base_http - - http = mock.create_autospec(requests.Session, instance=True) - response = requests.Response() - response.status_code = 200 - data = b"brent-spiner" - response._content = data - http.request.return_value = response - client = mock.Mock(_http=http, spec=["_http"]) - - conn = self._make_one(client) - conn.extra_headers = {"x-test-header": "a test value"} - req_data = "req-data-boring" - result = conn.api_request("GET", "/rainbow", data=req_data, expect_json=False) - self.assertEqual(result, data) - - expected_headers = { - "Accept-Encoding": "gzip", - base_http.CLIENT_INFO_HEADER: conn.user_agent, - "User-Agent": conn.user_agent, - "x-test-header": "a test value", - } - expected_uri = conn.build_api_url("/rainbow") - http.request.assert_called_once_with( - data=req_data, - headers=expected_headers, - method="GET", - url=expected_uri, - timeout=self._get_default_timeout(), - ) diff --git a/bigquery/tests/unit/test__pandas_helpers.py b/bigquery/tests/unit/test__pandas_helpers.py deleted file mode 100644 index 6adf098c03c8..000000000000 --- a/bigquery/tests/unit/test__pandas_helpers.py +++ /dev/null @@ -1,1326 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import collections -import datetime -import decimal -import functools -import operator -import warnings - -import mock - -try: - import pandas - import pandas.api.types - import pandas.testing -except ImportError: # pragma: NO COVER - pandas = None -try: - import pyarrow - import pyarrow.types -except ImportError: # pragma: NO COVER - # Mock out pyarrow when missing, because methods from pyarrow.types are - # used in test parameterization. - pyarrow = mock.Mock() -import pytest -import pytz - -from google import api_core -from google.cloud.bigquery import schema - - -@pytest.fixture -def module_under_test(): - from google.cloud.bigquery import _pandas_helpers - - return _pandas_helpers - - -def is_none(value): - return value is None - - -def is_datetime(type_): - # See: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#datetime-type - return all_( - pyarrow.types.is_timestamp, - lambda type_: type_.unit == "us", - lambda type_: type_.tz is None, - )(type_) - - -def is_numeric(type_): - # See: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#numeric-type - return all_( - pyarrow.types.is_decimal, - lambda type_: type_.precision == 38, - lambda type_: type_.scale == 9, - )(type_) - - -def is_timestamp(type_): - # See: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#timestamp-type - return all_( - pyarrow.types.is_timestamp, - lambda type_: type_.unit == "us", - lambda type_: type_.tz == "UTC", - )(type_) - - -def do_all(functions, value): - return all((func(value) for func in functions)) - - -def all_(*functions): - return functools.partial(do_all, functions) - - -@pytest.mark.skipif(isinstance(pyarrow, mock.Mock), reason="Requires `pyarrow`") -def test_is_datetime(): - assert is_datetime(pyarrow.timestamp("us", tz=None)) - assert not is_datetime(pyarrow.timestamp("ms", tz=None)) - assert not is_datetime(pyarrow.timestamp("us", tz="UTC")) - assert not is_datetime(pyarrow.timestamp("ns", tz="UTC")) - assert not is_datetime(pyarrow.string()) - - -def test_do_all(): - assert do_all((lambda _: True, lambda _: True), None) - assert not do_all((lambda _: True, lambda _: False), None) - assert not do_all((lambda _: False,), None) - - -def test_all_(): - assert all_(lambda _: True, lambda _: True)(None) - assert not all_(lambda _: True, lambda _: False)(None) - - -@pytest.mark.parametrize( - "bq_type,bq_mode,is_correct_type", - [ - ("STRING", "NULLABLE", pyarrow.types.is_string), - ("STRING", None, pyarrow.types.is_string), - ("string", "NULLABLE", pyarrow.types.is_string), - ("StRiNg", "NULLABLE", pyarrow.types.is_string), - ("BYTES", "NULLABLE", pyarrow.types.is_binary), - ("INTEGER", "NULLABLE", pyarrow.types.is_int64), - ("INT64", "NULLABLE", pyarrow.types.is_int64), - ("FLOAT", "NULLABLE", pyarrow.types.is_float64), - ("FLOAT64", "NULLABLE", pyarrow.types.is_float64), - ("NUMERIC", "NULLABLE", is_numeric), - ("BOOLEAN", "NULLABLE", pyarrow.types.is_boolean), - ("BOOL", "NULLABLE", pyarrow.types.is_boolean), - ("TIMESTAMP", "NULLABLE", is_timestamp), - ("DATE", "NULLABLE", pyarrow.types.is_date32), - ("TIME", "NULLABLE", pyarrow.types.is_time64), - ("DATETIME", "NULLABLE", is_datetime), - ("GEOGRAPHY", "NULLABLE", pyarrow.types.is_string), - ("UNKNOWN_TYPE", "NULLABLE", is_none), - # Use pyarrow.list_(item_type) for repeated (array) fields. - ( - "STRING", - "REPEATED", - all_( - pyarrow.types.is_list, - lambda type_: pyarrow.types.is_string(type_.value_type), - ), - ), - ( - "STRING", - "repeated", - all_( - pyarrow.types.is_list, - lambda type_: pyarrow.types.is_string(type_.value_type), - ), - ), - ( - "STRING", - "RePeAtEd", - all_( - pyarrow.types.is_list, - lambda type_: pyarrow.types.is_string(type_.value_type), - ), - ), - ( - "BYTES", - "REPEATED", - all_( - pyarrow.types.is_list, - lambda type_: pyarrow.types.is_binary(type_.value_type), - ), - ), - ( - "INTEGER", - "REPEATED", - all_( - pyarrow.types.is_list, - lambda type_: pyarrow.types.is_int64(type_.value_type), - ), - ), - ( - "INT64", - "REPEATED", - all_( - pyarrow.types.is_list, - lambda type_: pyarrow.types.is_int64(type_.value_type), - ), - ), - ( - "FLOAT", - "REPEATED", - all_( - pyarrow.types.is_list, - lambda type_: pyarrow.types.is_float64(type_.value_type), - ), - ), - ( - "FLOAT64", - "REPEATED", - all_( - pyarrow.types.is_list, - lambda type_: pyarrow.types.is_float64(type_.value_type), - ), - ), - ( - "NUMERIC", - "REPEATED", - all_(pyarrow.types.is_list, lambda type_: is_numeric(type_.value_type)), - ), - ( - "BOOLEAN", - "REPEATED", - all_( - pyarrow.types.is_list, - lambda type_: pyarrow.types.is_boolean(type_.value_type), - ), - ), - ( - "BOOL", - "REPEATED", - all_( - pyarrow.types.is_list, - lambda type_: pyarrow.types.is_boolean(type_.value_type), - ), - ), - ( - "TIMESTAMP", - "REPEATED", - all_(pyarrow.types.is_list, lambda type_: is_timestamp(type_.value_type)), - ), - ( - "DATE", - "REPEATED", - all_( - pyarrow.types.is_list, - lambda type_: pyarrow.types.is_date32(type_.value_type), - ), - ), - ( - "TIME", - "REPEATED", - all_( - pyarrow.types.is_list, - lambda type_: pyarrow.types.is_time64(type_.value_type), - ), - ), - ( - "DATETIME", - "REPEATED", - all_(pyarrow.types.is_list, lambda type_: is_datetime(type_.value_type)), - ), - ( - "GEOGRAPHY", - "REPEATED", - all_( - pyarrow.types.is_list, - lambda type_: pyarrow.types.is_string(type_.value_type), - ), - ), - ("RECORD", "REPEATED", is_none), - ("UNKNOWN_TYPE", "REPEATED", is_none), - ], -) -@pytest.mark.skipif(isinstance(pyarrow, mock.Mock), reason="Requires `pyarrow`") -def test_bq_to_arrow_data_type(module_under_test, bq_type, bq_mode, is_correct_type): - field = schema.SchemaField("ignored_name", bq_type, mode=bq_mode) - actual = module_under_test.bq_to_arrow_data_type(field) - assert is_correct_type(actual) - - -@pytest.mark.parametrize("bq_type", ["RECORD", "record", "STRUCT", "struct"]) -@pytest.mark.skipif(isinstance(pyarrow, mock.Mock), reason="Requires `pyarrow`") -def test_bq_to_arrow_data_type_w_struct(module_under_test, bq_type): - fields = ( - schema.SchemaField("field01", "STRING"), - schema.SchemaField("field02", "BYTES"), - schema.SchemaField("field03", "INTEGER"), - schema.SchemaField("field04", "INT64"), - schema.SchemaField("field05", "FLOAT"), - schema.SchemaField("field06", "FLOAT64"), - schema.SchemaField("field07", "NUMERIC"), - schema.SchemaField("field08", "BOOLEAN"), - schema.SchemaField("field09", "BOOL"), - schema.SchemaField("field10", "TIMESTAMP"), - schema.SchemaField("field11", "DATE"), - schema.SchemaField("field12", "TIME"), - schema.SchemaField("field13", "DATETIME"), - schema.SchemaField("field14", "GEOGRAPHY"), - ) - field = schema.SchemaField("ignored_name", bq_type, mode="NULLABLE", fields=fields) - actual = module_under_test.bq_to_arrow_data_type(field) - expected = pyarrow.struct( - ( - pyarrow.field("field01", pyarrow.string()), - pyarrow.field("field02", pyarrow.binary()), - pyarrow.field("field03", pyarrow.int64()), - pyarrow.field("field04", pyarrow.int64()), - pyarrow.field("field05", pyarrow.float64()), - pyarrow.field("field06", pyarrow.float64()), - pyarrow.field("field07", module_under_test.pyarrow_numeric()), - pyarrow.field("field08", pyarrow.bool_()), - pyarrow.field("field09", pyarrow.bool_()), - pyarrow.field("field10", module_under_test.pyarrow_timestamp()), - pyarrow.field("field11", pyarrow.date32()), - pyarrow.field("field12", module_under_test.pyarrow_time()), - pyarrow.field("field13", module_under_test.pyarrow_datetime()), - pyarrow.field("field14", pyarrow.string()), - ) - ) - assert pyarrow.types.is_struct(actual) - assert actual.num_children == len(fields) - assert actual.equals(expected) - - -@pytest.mark.parametrize("bq_type", ["RECORD", "record", "STRUCT", "struct"]) -@pytest.mark.skipif(isinstance(pyarrow, mock.Mock), reason="Requires `pyarrow`") -def test_bq_to_arrow_data_type_w_array_struct(module_under_test, bq_type): - fields = ( - schema.SchemaField("field01", "STRING"), - schema.SchemaField("field02", "BYTES"), - schema.SchemaField("field03", "INTEGER"), - schema.SchemaField("field04", "INT64"), - schema.SchemaField("field05", "FLOAT"), - schema.SchemaField("field06", "FLOAT64"), - schema.SchemaField("field07", "NUMERIC"), - schema.SchemaField("field08", "BOOLEAN"), - schema.SchemaField("field09", "BOOL"), - schema.SchemaField("field10", "TIMESTAMP"), - schema.SchemaField("field11", "DATE"), - schema.SchemaField("field12", "TIME"), - schema.SchemaField("field13", "DATETIME"), - schema.SchemaField("field14", "GEOGRAPHY"), - ) - field = schema.SchemaField("ignored_name", bq_type, mode="REPEATED", fields=fields) - actual = module_under_test.bq_to_arrow_data_type(field) - expected_value_type = pyarrow.struct( - ( - pyarrow.field("field01", pyarrow.string()), - pyarrow.field("field02", pyarrow.binary()), - pyarrow.field("field03", pyarrow.int64()), - pyarrow.field("field04", pyarrow.int64()), - pyarrow.field("field05", pyarrow.float64()), - pyarrow.field("field06", pyarrow.float64()), - pyarrow.field("field07", module_under_test.pyarrow_numeric()), - pyarrow.field("field08", pyarrow.bool_()), - pyarrow.field("field09", pyarrow.bool_()), - pyarrow.field("field10", module_under_test.pyarrow_timestamp()), - pyarrow.field("field11", pyarrow.date32()), - pyarrow.field("field12", module_under_test.pyarrow_time()), - pyarrow.field("field13", module_under_test.pyarrow_datetime()), - pyarrow.field("field14", pyarrow.string()), - ) - ) - assert pyarrow.types.is_list(actual) - assert pyarrow.types.is_struct(actual.value_type) - assert actual.value_type.num_children == len(fields) - assert actual.value_type.equals(expected_value_type) - - -@pytest.mark.skipif(isinstance(pyarrow, mock.Mock), reason="Requires `pyarrow`") -def test_bq_to_arrow_data_type_w_struct_unknown_subfield(module_under_test): - fields = ( - schema.SchemaField("field1", "STRING"), - schema.SchemaField("field2", "INTEGER"), - # Don't know what to convert UNKNOWN_TYPE to, let type inference work, - # instead. - schema.SchemaField("field3", "UNKNOWN_TYPE"), - ) - field = schema.SchemaField("ignored_name", "RECORD", mode="NULLABLE", fields=fields) - - with warnings.catch_warnings(record=True) as warned: - actual = module_under_test.bq_to_arrow_data_type(field) - - assert actual is None - assert len(warned) == 1 - warning = warned[0] - assert "field3" in str(warning) - - -@pytest.mark.parametrize( - "bq_type,rows", - [ - ("STRING", ["abc", None, "def", None]), - ("BYTES", [b"abc", None, b"def", None]), - ("INTEGER", [123, None, 456, None]), - ("INT64", [-9223372036854775808, None, 9223372036854775807, 123]), - ("FLOAT", [1.25, None, 3.5, None]), - ( - "NUMERIC", - [ - decimal.Decimal("-99999999999999999999999999999.999999999"), - None, - decimal.Decimal("99999999999999999999999999999.999999999"), - decimal.Decimal("999.123456789"), - ], - ), - ("BOOLEAN", [True, None, False, None]), - ("BOOL", [False, None, True, None]), - ( - "TIMESTAMP", - [ - datetime.datetime(1, 1, 1, 0, 0, 0, tzinfo=pytz.utc), - None, - datetime.datetime(9999, 12, 31, 23, 59, 59, 999999, tzinfo=pytz.utc), - datetime.datetime(1970, 1, 1, 0, 0, 0, tzinfo=pytz.utc), - ], - ), - ( - "DATE", - [ - datetime.date(1, 1, 1), - None, - datetime.date(9999, 12, 31), - datetime.date(1970, 1, 1), - ], - ), - ( - "TIME", - [ - datetime.time(0, 0, 0), - None, - datetime.time(23, 59, 59, 999999), - datetime.time(12, 0, 0), - ], - ), - ( - "DATETIME", - [ - datetime.datetime(1, 1, 1, 0, 0, 0), - datetime.datetime(9999, 12, 31, 23, 59, 59, 999999), - None, - datetime.datetime(1970, 1, 1, 0, 0, 0), - datetime.datetime(1999, 3, 14, 15, 9, 26, 535898), - ], - ), - ( - "GEOGRAPHY", - [ - "POINT(30 10)", - None, - "LINESTRING (30 10, 10 30, 40 40)", - "POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))", - ], - ), - ], -) -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -@pytest.mark.skipif(isinstance(pyarrow, mock.Mock), reason="Requires `pyarrow`") -def test_bq_to_arrow_array_w_nullable_scalars(module_under_test, bq_type, rows): - series = pandas.Series(rows, dtype="object") - bq_field = schema.SchemaField("field_name", bq_type) - arrow_array = module_under_test.bq_to_arrow_array(series, bq_field) - roundtrip = arrow_array.to_pylist() - assert rows == roundtrip - - -@pytest.mark.parametrize( - "bq_type,rows", - [ - ( - "TIMESTAMP", - [ - "1971-09-28T23:59:07+00:00", - "1975-04-09T23:59:02+00:00", - "1979-08-17T23:59:05+00:00", - "NaT", - "1983-05-09T13:00:00+00:00", - ], - ), - ( - "DATETIME", - [ - "1971-09-28T23:59:07", - "1975-04-09T23:59:02", - "1979-08-17T23:59:05", - "NaT", - "1983-05-09T13:00:00", - ], - ), - ], -) -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -@pytest.mark.skipif(isinstance(pyarrow, mock.Mock), reason="Requires `pyarrow`") -def test_bq_to_arrow_array_w_pandas_timestamp(module_under_test, bq_type, rows): - rows = [pandas.Timestamp(row) for row in rows] - series = pandas.Series(rows) - bq_field = schema.SchemaField("field_name", bq_type) - arrow_array = module_under_test.bq_to_arrow_array(series, bq_field) - roundtrip = arrow_array.to_pandas() - assert series.equals(roundtrip) - - -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -@pytest.mark.skipif(isinstance(pyarrow, mock.Mock), reason="Requires `pyarrow`") -def test_bq_to_arrow_array_w_arrays(module_under_test): - rows = [[1, 2, 3], [], [4, 5, 6]] - series = pandas.Series(rows, dtype="object") - bq_field = schema.SchemaField("field_name", "INTEGER", mode="REPEATED") - arrow_array = module_under_test.bq_to_arrow_array(series, bq_field) - roundtrip = arrow_array.to_pylist() - assert rows == roundtrip - - -@pytest.mark.parametrize("bq_type", ["RECORD", "record", "STRUCT", "struct"]) -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -@pytest.mark.skipif(isinstance(pyarrow, mock.Mock), reason="Requires `pyarrow`") -def test_bq_to_arrow_array_w_structs(module_under_test, bq_type): - rows = [ - {"int_col": 123, "string_col": "abc"}, - None, - {"int_col": 456, "string_col": "def"}, - ] - series = pandas.Series(rows, dtype="object") - bq_field = schema.SchemaField( - "field_name", - bq_type, - fields=( - schema.SchemaField("int_col", "INTEGER"), - schema.SchemaField("string_col", "STRING"), - ), - ) - arrow_array = module_under_test.bq_to_arrow_array(series, bq_field) - roundtrip = arrow_array.to_pylist() - assert rows == roundtrip - - -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -@pytest.mark.skipif(isinstance(pyarrow, mock.Mock), reason="Requires `pyarrow`") -def test_bq_to_arrow_array_w_special_floats(module_under_test): - bq_field = schema.SchemaField("field_name", "FLOAT64") - rows = [float("-inf"), float("nan"), float("inf"), None] - series = pandas.Series(rows, dtype="object") - arrow_array = module_under_test.bq_to_arrow_array(series, bq_field) - roundtrip = arrow_array.to_pylist() - assert len(rows) == len(roundtrip) - assert roundtrip[0] == float("-inf") - # Since we are converting from pandas, NaN is treated as NULL in pyarrow - # due to pandas conventions. - # https://arrow.apache.org/docs/python/data.html#none-values-and-nan-handling - assert roundtrip[1] is None - assert roundtrip[2] == float("inf") - assert roundtrip[3] is None - - -@pytest.mark.skipif(isinstance(pyarrow, mock.Mock), reason="Requires `pyarrow`") -def test_bq_to_arrow_schema_w_unknown_type(module_under_test): - fields = ( - schema.SchemaField("field1", "STRING"), - schema.SchemaField("field2", "INTEGER"), - # Don't know what to convert UNKNOWN_TYPE to, let type inference work, - # instead. - schema.SchemaField("field3", "UNKNOWN_TYPE"), - ) - actual = module_under_test.bq_to_arrow_schema(fields) - assert actual is None - - -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -def test_get_column_or_index_not_found(module_under_test): - dataframe = pandas.DataFrame({"not_the_column_youre_looking_for": [1, 2, 3]}) - with pytest.raises(ValueError, match="col_is_missing"): - module_under_test.get_column_or_index(dataframe, "col_is_missing") - - -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -def test_get_column_or_index_with_multiindex_not_found(module_under_test): - dataframe = pandas.DataFrame( - {"column_name": [1, 2, 3, 4, 5, 6]}, - index=pandas.MultiIndex.from_tuples( - [("a", 0), ("a", 1), ("b", 0), ("b", 1), ("c", 0), ("c", 1)] - ), - ) - with pytest.raises(ValueError, match="not_in_df"): - module_under_test.get_column_or_index(dataframe, "not_in_df") - - -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -def test_get_column_or_index_with_both_prefers_column(module_under_test): - dataframe = pandas.DataFrame( - {"some_name": [1, 2, 3]}, index=pandas.Index([0, 1, 2], name="some_name") - ) - series = module_under_test.get_column_or_index(dataframe, "some_name") - expected = pandas.Series([1, 2, 3], name="some_name") - pandas.testing.assert_series_equal(series, expected) - - -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -def test_get_column_or_index_with_column(module_under_test): - dataframe = pandas.DataFrame({"column_name": [1, 2, 3], "other_column": [4, 5, 6]}) - series = module_under_test.get_column_or_index(dataframe, "column_name") - expected = pandas.Series([1, 2, 3], name="column_name") - pandas.testing.assert_series_equal(series, expected) - - -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -def test_get_column_or_index_with_named_index(module_under_test): - dataframe = pandas.DataFrame( - {"column_name": [1, 2, 3]}, index=pandas.Index([4, 5, 6], name="index_name") - ) - series = module_under_test.get_column_or_index(dataframe, "index_name") - expected = pandas.Series([4, 5, 6], name="index_name") - pandas.testing.assert_series_equal(series, expected) - - -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -def test_get_column_or_index_with_datetimeindex(module_under_test): - datetimes = [ - datetime.datetime(2000, 1, 2, 3, 4, 5, 101), - datetime.datetime(2006, 7, 8, 9, 10, 11, 202), - datetime.datetime(2012, 1, 14, 15, 16, 17, 303), - ] - dataframe = pandas.DataFrame( - {"column_name": [1, 2, 3]}, - index=pandas.DatetimeIndex(datetimes, name="index_name"), - ) - series = module_under_test.get_column_or_index(dataframe, "index_name") - expected = pandas.Series(datetimes, name="index_name") - pandas.testing.assert_series_equal(series, expected) - - -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -def test_get_column_or_index_with_multiindex(module_under_test): - dataframe = pandas.DataFrame( - {"column_name": [1, 2, 3, 4, 5, 6]}, - index=pandas.MultiIndex.from_tuples( - [("a", 0), ("a", 1), ("b", 0), ("b", 1), ("c", 0), ("c", 1)], - names=["letters", "numbers"], - ), - ) - - series = module_under_test.get_column_or_index(dataframe, "letters") - expected = pandas.Series(["a", "a", "b", "b", "c", "c"], name="letters") - pandas.testing.assert_series_equal(series, expected) - - series = module_under_test.get_column_or_index(dataframe, "numbers") - expected = pandas.Series([0, 1, 0, 1, 0, 1], name="numbers") - pandas.testing.assert_series_equal(series, expected) - - -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -def test_list_columns_and_indexes_without_named_index(module_under_test): - df_data = collections.OrderedDict( - [ - ("a_series", [1, 2, 3, 4]), - ("b_series", [0.1, 0.2, 0.3, 0.4]), - ("c_series", ["a", "b", "c", "d"]), - ] - ) - dataframe = pandas.DataFrame(df_data) - - columns_and_indexes = module_under_test.list_columns_and_indexes(dataframe) - expected = [ - ("a_series", pandas.api.types.pandas_dtype("int64")), - ("b_series", pandas.api.types.pandas_dtype("float64")), - ("c_series", pandas.api.types.pandas_dtype("object")), - ] - assert columns_and_indexes == expected - - -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -def test_list_columns_and_indexes_with_named_index_same_as_column_name( - module_under_test, -): - df_data = collections.OrderedDict( - [ - ("a_series", [1, 2, 3, 4]), - ("b_series", [0.1, 0.2, 0.3, 0.4]), - ("c_series", ["a", "b", "c", "d"]), - ] - ) - dataframe = pandas.DataFrame( - df_data, - # Use same name as an integer column but a different datatype so that - # we can verify that the column is listed but the index isn't. - index=pandas.Index([0.1, 0.2, 0.3, 0.4], name="a_series"), - ) - - columns_and_indexes = module_under_test.list_columns_and_indexes(dataframe) - expected = [ - ("a_series", pandas.api.types.pandas_dtype("int64")), - ("b_series", pandas.api.types.pandas_dtype("float64")), - ("c_series", pandas.api.types.pandas_dtype("object")), - ] - assert columns_and_indexes == expected - - -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -def test_list_columns_and_indexes_with_named_index(module_under_test): - df_data = collections.OrderedDict( - [ - ("a_series", [1, 2, 3, 4]), - ("b_series", [0.1, 0.2, 0.3, 0.4]), - ("c_series", ["a", "b", "c", "d"]), - ] - ) - dataframe = pandas.DataFrame( - df_data, index=pandas.Index([4, 5, 6, 7], name="a_index") - ) - - columns_and_indexes = module_under_test.list_columns_and_indexes(dataframe) - expected = [ - ("a_index", pandas.api.types.pandas_dtype("int64")), - ("a_series", pandas.api.types.pandas_dtype("int64")), - ("b_series", pandas.api.types.pandas_dtype("float64")), - ("c_series", pandas.api.types.pandas_dtype("object")), - ] - assert columns_and_indexes == expected - - -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -def test_list_columns_and_indexes_with_multiindex(module_under_test): - df_data = collections.OrderedDict( - [ - ("a_series", [1, 2, 3, 4]), - ("b_series", [0.1, 0.2, 0.3, 0.4]), - ("c_series", ["a", "b", "c", "d"]), - ] - ) - dataframe = pandas.DataFrame( - df_data, - index=pandas.MultiIndex.from_tuples( - [(0, 0, 41), (0, 0, 42), (1, 0, 41), (1, 1, 41)], - names=[ - "a_index", - # Use same name as column, but different dtype so we can verify - # the column type is included. - "b_series", - "c_index", - ], - ), - ) - - columns_and_indexes = module_under_test.list_columns_and_indexes(dataframe) - expected = [ - ("a_index", pandas.api.types.pandas_dtype("int64")), - ("c_index", pandas.api.types.pandas_dtype("int64")), - ("a_series", pandas.api.types.pandas_dtype("int64")), - ("b_series", pandas.api.types.pandas_dtype("float64")), - ("c_series", pandas.api.types.pandas_dtype("object")), - ] - assert columns_and_indexes == expected - - -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -def test_dataframe_to_bq_schema_dict_sequence(module_under_test): - df_data = collections.OrderedDict( - [ - ("str_column", [u"hello", u"world"]), - ("int_column", [42, 8]), - ("bool_column", [True, False]), - ] - ) - dataframe = pandas.DataFrame(df_data) - - dict_schema = [ - {"name": "str_column", "type": "STRING", "mode": "NULLABLE"}, - {"name": "bool_column", "type": "BOOL", "mode": "REQUIRED"}, - ] - - returned_schema = module_under_test.dataframe_to_bq_schema(dataframe, dict_schema) - - expected_schema = ( - schema.SchemaField("str_column", "STRING", "NULLABLE"), - schema.SchemaField("int_column", "INTEGER", "NULLABLE"), - schema.SchemaField("bool_column", "BOOL", "REQUIRED"), - ) - assert returned_schema == expected_schema - - -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -@pytest.mark.skipif(isinstance(pyarrow, mock.Mock), reason="Requires `pyarrow`") -def test_dataframe_to_arrow_with_multiindex(module_under_test): - bq_schema = ( - schema.SchemaField("str_index", "STRING"), - # int_index is intentionally omitted, to verify that it's okay to be - # missing indexes from the schema. - schema.SchemaField("dt_index", "DATETIME"), - schema.SchemaField("int_col", "INTEGER"), - schema.SchemaField("nullable_int_col", "INTEGER"), - schema.SchemaField("str_col", "STRING"), - ) - df_data = collections.OrderedDict( - [ - ("int_col", [1, 2, 3, 4, 5, 6]), - ("nullable_int_col", [6.0, float("nan"), 7.0, float("nan"), 8.0, 9.0]), - ("str_col", ["apple", "banana", "cherry", "durian", "etrog", "fig"]), - ] - ) - df_index = pandas.MultiIndex.from_tuples( - [ - ("a", 0, datetime.datetime(1999, 12, 31, 23, 59, 59, 999999)), - ("a", 0, datetime.datetime(2000, 1, 1, 0, 0, 0)), - ("a", 1, datetime.datetime(1999, 12, 31, 23, 59, 59, 999999)), - ("b", 1, datetime.datetime(2000, 1, 1, 0, 0, 0)), - ("b", 0, datetime.datetime(1999, 12, 31, 23, 59, 59, 999999)), - ("b", 0, datetime.datetime(2000, 1, 1, 0, 0, 0)), - ], - names=["str_index", "int_index", "dt_index"], - ) - dataframe = pandas.DataFrame(df_data, index=df_index) - - arrow_table = module_under_test.dataframe_to_arrow(dataframe, bq_schema) - - assert arrow_table.schema.names == [ - "str_index", - "dt_index", - "int_col", - "nullable_int_col", - "str_col", - ] - arrow_data = arrow_table.to_pydict() - assert arrow_data["str_index"] == ["a", "a", "a", "b", "b", "b"] - expected_dt_index = [ - pandas.Timestamp(dt) - for dt in ( - datetime.datetime(1999, 12, 31, 23, 59, 59, 999999), - datetime.datetime(2000, 1, 1, 0, 0, 0), - datetime.datetime(1999, 12, 31, 23, 59, 59, 999999), - datetime.datetime(2000, 1, 1, 0, 0, 0), - datetime.datetime(1999, 12, 31, 23, 59, 59, 999999), - datetime.datetime(2000, 1, 1, 0, 0, 0), - ) - ] - assert arrow_data["dt_index"] == expected_dt_index - assert arrow_data["int_col"] == [1, 2, 3, 4, 5, 6] - assert arrow_data["nullable_int_col"] == [6, None, 7, None, 8, 9] - assert arrow_data["str_col"] == [ - "apple", - "banana", - "cherry", - "durian", - "etrog", - "fig", - ] - - -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -@pytest.mark.skipif(isinstance(pyarrow, mock.Mock), reason="Requires `pyarrow`") -def test_dataframe_to_arrow_with_required_fields(module_under_test): - bq_schema = ( - schema.SchemaField("field01", "STRING", mode="REQUIRED"), - schema.SchemaField("field02", "BYTES", mode="REQUIRED"), - schema.SchemaField("field03", "INTEGER", mode="REQUIRED"), - schema.SchemaField("field04", "INT64", mode="REQUIRED"), - schema.SchemaField("field05", "FLOAT", mode="REQUIRED"), - schema.SchemaField("field06", "FLOAT64", mode="REQUIRED"), - schema.SchemaField("field07", "NUMERIC", mode="REQUIRED"), - schema.SchemaField("field08", "BOOLEAN", mode="REQUIRED"), - schema.SchemaField("field09", "BOOL", mode="REQUIRED"), - schema.SchemaField("field10", "TIMESTAMP", mode="REQUIRED"), - schema.SchemaField("field11", "DATE", mode="REQUIRED"), - schema.SchemaField("field12", "TIME", mode="REQUIRED"), - schema.SchemaField("field13", "DATETIME", mode="REQUIRED"), - schema.SchemaField("field14", "GEOGRAPHY", mode="REQUIRED"), - ) - dataframe = pandas.DataFrame( - { - "field01": ["hello", "world"], - "field02": [b"abd", b"efg"], - "field03": [1, 2], - "field04": [3, 4], - "field05": [1.25, 9.75], - "field06": [-1.75, -3.5], - "field07": [decimal.Decimal("1.2345"), decimal.Decimal("6.7891")], - "field08": [True, False], - "field09": [False, True], - "field10": [ - datetime.datetime(1970, 1, 1, 0, 0, 0, tzinfo=pytz.utc), - datetime.datetime(2012, 12, 21, 9, 7, 42, tzinfo=pytz.utc), - ], - "field11": [datetime.date(9999, 12, 31), datetime.date(1970, 1, 1)], - "field12": [datetime.time(23, 59, 59, 999999), datetime.time(12, 0, 0)], - "field13": [ - datetime.datetime(1970, 1, 1, 0, 0, 0), - datetime.datetime(2012, 12, 21, 9, 7, 42), - ], - "field14": [ - "POINT(30 10)", - "POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))", - ], - } - ) - - arrow_table = module_under_test.dataframe_to_arrow(dataframe, bq_schema) - arrow_schema = arrow_table.schema - - assert len(arrow_schema) == len(bq_schema) - for arrow_field in arrow_schema: - assert not arrow_field.nullable - - -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -@pytest.mark.skipif(isinstance(pyarrow, mock.Mock), reason="Requires `pyarrow`") -def test_dataframe_to_arrow_with_unknown_type(module_under_test): - bq_schema = ( - schema.SchemaField("field00", "UNKNOWN_TYPE"), - schema.SchemaField("field01", "STRING"), - schema.SchemaField("field02", "BYTES"), - schema.SchemaField("field03", "INTEGER"), - ) - dataframe = pandas.DataFrame( - { - "field00": ["whoami", "whatami"], - "field01": ["hello", "world"], - "field02": [b"abd", b"efg"], - "field03": [1, 2], - } - ) - - with warnings.catch_warnings(record=True) as warned: - arrow_table = module_under_test.dataframe_to_arrow(dataframe, bq_schema) - arrow_schema = arrow_table.schema - - assert len(warned) == 1 - warning = warned[0] - assert "field00" in str(warning) - - assert len(arrow_schema) == len(bq_schema) - assert arrow_schema[0].name == "field00" - assert arrow_schema[1].name == "field01" - assert arrow_schema[2].name == "field02" - assert arrow_schema[3].name == "field03" - - -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -@pytest.mark.skipif(isinstance(pyarrow, mock.Mock), reason="Requires `pyarrow`") -def test_dataframe_to_arrow_dict_sequence_schema(module_under_test): - dict_schema = [ - {"name": "field01", "type": "STRING", "mode": "REQUIRED"}, - {"name": "field02", "type": "BOOL", "mode": "NULLABLE"}, - ] - - dataframe = pandas.DataFrame( - {"field01": [u"hello", u"world"], "field02": [True, False]} - ) - - arrow_table = module_under_test.dataframe_to_arrow(dataframe, dict_schema) - arrow_schema = arrow_table.schema - - expected_fields = [ - pyarrow.field("field01", "string", nullable=False), - pyarrow.field("field02", "bool", nullable=True), - ] - assert list(arrow_schema) == expected_fields - - -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -def test_dataframe_to_parquet_without_pyarrow(module_under_test, monkeypatch): - monkeypatch.setattr(module_under_test, "pyarrow", None) - with pytest.raises(ValueError) as exc_context: - module_under_test.dataframe_to_parquet(pandas.DataFrame(), (), None) - assert "pyarrow is required" in str(exc_context.value) - - -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -@pytest.mark.skipif(isinstance(pyarrow, mock.Mock), reason="Requires `pyarrow`") -def test_dataframe_to_parquet_w_extra_fields(module_under_test, monkeypatch): - with pytest.raises(ValueError) as exc_context: - module_under_test.dataframe_to_parquet( - pandas.DataFrame(), (schema.SchemaField("not_in_df", "STRING"),), None - ) - message = str(exc_context.value) - assert "bq_schema contains fields not present in dataframe" in message - assert "not_in_df" in message - - -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -@pytest.mark.skipif(isinstance(pyarrow, mock.Mock), reason="Requires `pyarrow`") -def test_dataframe_to_parquet_w_missing_fields(module_under_test, monkeypatch): - with pytest.raises(ValueError) as exc_context: - module_under_test.dataframe_to_parquet( - pandas.DataFrame({"not_in_bq": [1, 2, 3]}), (), None - ) - message = str(exc_context.value) - assert "bq_schema is missing fields from dataframe" in message - assert "not_in_bq" in message - - -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -@pytest.mark.skipif(isinstance(pyarrow, mock.Mock), reason="Requires `pyarrow`") -def test_dataframe_to_parquet_compression_method(module_under_test): - bq_schema = (schema.SchemaField("field00", "STRING"),) - dataframe = pandas.DataFrame({"field00": ["foo", "bar"]}) - - write_table_patch = mock.patch.object( - module_under_test.pyarrow.parquet, "write_table", autospec=True - ) - - with write_table_patch as fake_write_table: - module_under_test.dataframe_to_parquet( - dataframe, bq_schema, None, parquet_compression="ZSTD" - ) - - call_args = fake_write_table.call_args - assert call_args is not None - assert call_args.kwargs.get("compression") == "ZSTD" - - -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -def test_dataframe_to_bq_schema_fallback_needed_wo_pyarrow(module_under_test): - dataframe = pandas.DataFrame( - data=[ - {"id": 10, "status": u"FOO", "execution_date": datetime.date(2019, 5, 10)}, - {"id": 20, "status": u"BAR", "created_at": datetime.date(2018, 9, 12)}, - ] - ) - - no_pyarrow_patch = mock.patch(module_under_test.__name__ + ".pyarrow", None) - - with no_pyarrow_patch, warnings.catch_warnings(record=True) as warned: - detected_schema = module_under_test.dataframe_to_bq_schema( - dataframe, bq_schema=[] - ) - - assert detected_schema is None - - # a warning should also be issued - expected_warnings = [ - warning for warning in warned if "could not determine" in str(warning).lower() - ] - assert len(expected_warnings) == 1 - msg = str(expected_warnings[0]) - assert "execution_date" in msg and "created_at" in msg - - -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -@pytest.mark.skipif(isinstance(pyarrow, mock.Mock), reason="Requires `pyarrow`") -def test_dataframe_to_bq_schema_fallback_needed_w_pyarrow(module_under_test): - dataframe = pandas.DataFrame( - data=[ - {"id": 10, "status": u"FOO", "created_at": datetime.date(2019, 5, 10)}, - {"id": 20, "status": u"BAR", "created_at": datetime.date(2018, 9, 12)}, - ] - ) - - with warnings.catch_warnings(record=True) as warned: - detected_schema = module_under_test.dataframe_to_bq_schema( - dataframe, bq_schema=[] - ) - - expected_schema = ( - schema.SchemaField("id", "INTEGER", mode="NULLABLE"), - schema.SchemaField("status", "STRING", mode="NULLABLE"), - schema.SchemaField("created_at", "DATE", mode="NULLABLE"), - ) - by_name = operator.attrgetter("name") - assert sorted(detected_schema, key=by_name) == sorted(expected_schema, key=by_name) - - # there should be no relevant warnings - unwanted_warnings = [ - warning for warning in warned if "could not determine" in str(warning).lower() - ] - assert not unwanted_warnings - - -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -@pytest.mark.skipif(isinstance(pyarrow, mock.Mock), reason="Requires `pyarrow`") -def test_dataframe_to_bq_schema_pyarrow_fallback_fails(module_under_test): - dataframe = pandas.DataFrame( - data=[ - {"struct_field": {"one": 2}, "status": u"FOO"}, - {"struct_field": {"two": u"222"}, "status": u"BAR"}, - ] - ) - - with warnings.catch_warnings(record=True) as warned: - detected_schema = module_under_test.dataframe_to_bq_schema( - dataframe, bq_schema=[] - ) - - assert detected_schema is None - - # a warning should also be issued - expected_warnings = [ - warning for warning in warned if "could not determine" in str(warning).lower() - ] - assert len(expected_warnings) == 1 - assert "struct_field" in str(expected_warnings[0]) - - -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -@pytest.mark.skipif(isinstance(pyarrow, mock.Mock), reason="Requires `pyarrow`") -def test_augment_schema_type_detection_succeeds(module_under_test): - dataframe = pandas.DataFrame( - data=[ - { - "bool_field": False, - "int_field": 123, - "float_field": 3.141592, - "time_field": datetime.time(17, 59, 47), - "timestamp_field": datetime.datetime(2005, 5, 31, 14, 25, 55), - "date_field": datetime.date(2005, 5, 31), - "bytes_field": b"some bytes", - "string_field": u"some characters", - "numeric_field": decimal.Decimal("123.456"), - } - ] - ) - - # NOTE: In Pandas dataframe, the dtype of Python's datetime instances is - # set to "datetime64[ns]", and pyarrow converts that to pyarrow.TimestampArray. - # We thus cannot expect to get a DATETIME date when converting back to the - # BigQuery type. - - current_schema = ( - schema.SchemaField("bool_field", field_type=None, mode="NULLABLE"), - schema.SchemaField("int_field", field_type=None, mode="NULLABLE"), - schema.SchemaField("float_field", field_type=None, mode="NULLABLE"), - schema.SchemaField("time_field", field_type=None, mode="NULLABLE"), - schema.SchemaField("timestamp_field", field_type=None, mode="NULLABLE"), - schema.SchemaField("date_field", field_type=None, mode="NULLABLE"), - schema.SchemaField("bytes_field", field_type=None, mode="NULLABLE"), - schema.SchemaField("string_field", field_type=None, mode="NULLABLE"), - schema.SchemaField("numeric_field", field_type=None, mode="NULLABLE"), - ) - - with warnings.catch_warnings(record=True) as warned: - augmented_schema = module_under_test.augment_schema(dataframe, current_schema) - - # there should be no relevant warnings - unwanted_warnings = [ - warning for warning in warned if "Pyarrow could not" in str(warning) - ] - assert not unwanted_warnings - - # the augmented schema must match the expected - expected_schema = ( - schema.SchemaField("bool_field", field_type="BOOL", mode="NULLABLE"), - schema.SchemaField("int_field", field_type="INT64", mode="NULLABLE"), - schema.SchemaField("float_field", field_type="FLOAT64", mode="NULLABLE"), - schema.SchemaField("time_field", field_type="TIME", mode="NULLABLE"), - schema.SchemaField("timestamp_field", field_type="TIMESTAMP", mode="NULLABLE"), - schema.SchemaField("date_field", field_type="DATE", mode="NULLABLE"), - schema.SchemaField("bytes_field", field_type="BYTES", mode="NULLABLE"), - schema.SchemaField("string_field", field_type="STRING", mode="NULLABLE"), - schema.SchemaField("numeric_field", field_type="NUMERIC", mode="NULLABLE"), - ) - by_name = operator.attrgetter("name") - assert sorted(augmented_schema, key=by_name) == sorted(expected_schema, key=by_name) - - -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -@pytest.mark.skipif(isinstance(pyarrow, mock.Mock), reason="Requires `pyarrow`") -def test_augment_schema_type_detection_fails(module_under_test): - dataframe = pandas.DataFrame( - data=[ - { - "status": u"FOO", - "struct_field": {"one": 1}, - "struct_field_2": {"foo": u"123"}, - }, - { - "status": u"BAR", - "struct_field": {"two": u"111"}, - "struct_field_2": {"bar": 27}, - }, - ] - ) - current_schema = [ - schema.SchemaField("status", field_type="STRING", mode="NULLABLE"), - schema.SchemaField("struct_field", field_type=None, mode="NULLABLE"), - schema.SchemaField("struct_field_2", field_type=None, mode="NULLABLE"), - ] - - with warnings.catch_warnings(record=True) as warned: - augmented_schema = module_under_test.augment_schema(dataframe, current_schema) - - assert augmented_schema is None - - expected_warnings = [ - warning for warning in warned if "could not determine" in str(warning) - ] - assert len(expected_warnings) == 1 - warning_msg = str(expected_warnings[0]) - assert "pyarrow" in warning_msg.lower() - assert "struct_field" in warning_msg and "struct_field_2" in warning_msg - - -@pytest.mark.skipif(isinstance(pyarrow, mock.Mock), reason="Requires `pyarrow`") -def test_dataframe_to_parquet_dict_sequence_schema(module_under_test): - dict_schema = [ - {"name": "field01", "type": "STRING", "mode": "REQUIRED"}, - {"name": "field02", "type": "BOOL", "mode": "NULLABLE"}, - ] - - dataframe = pandas.DataFrame( - {"field01": [u"hello", u"world"], "field02": [True, False]} - ) - - write_table_patch = mock.patch.object( - module_under_test.pyarrow.parquet, "write_table", autospec=True - ) - to_arrow_patch = mock.patch.object( - module_under_test, "dataframe_to_arrow", autospec=True - ) - - with write_table_patch, to_arrow_patch as fake_to_arrow: - module_under_test.dataframe_to_parquet(dataframe, dict_schema, None) - - expected_schema_arg = [ - schema.SchemaField("field01", "STRING", mode="REQUIRED"), - schema.SchemaField("field02", "BOOL", mode="NULLABLE"), - ] - schema_arg = fake_to_arrow.call_args.args[1] - assert schema_arg == expected_schema_arg - - -@pytest.mark.skipif(isinstance(pyarrow, mock.Mock), reason="Requires `pyarrow`") -def test_download_arrow_tabledata_list_unknown_field_type(module_under_test): - fake_page = api_core.page_iterator.Page( - parent=mock.Mock(), - items=[{"page_data": "foo"}], - item_to_value=api_core.page_iterator._item_to_value_identity, - ) - fake_page._columns = [[1, 10, 100], [2.2, 22.22, 222.222]] - pages = [fake_page] - - bq_schema = [ - schema.SchemaField("population_size", "INTEGER"), - schema.SchemaField("alien_field", "ALIEN_FLOAT_TYPE"), - ] - - results_gen = module_under_test.download_arrow_tabledata_list(pages, bq_schema) - - with warnings.catch_warnings(record=True) as warned: - result = next(results_gen) - - unwanted_warnings = [ - warning - for warning in warned - if "please pass schema= explicitly" in str(warning).lower() - ] - assert not unwanted_warnings - - assert len(result.columns) == 2 - col = result.columns[0] - assert type(col) is pyarrow.lib.Int64Array - assert list(col) == [1, 10, 100] - col = result.columns[1] - assert type(col) is pyarrow.lib.DoubleArray - assert list(col) == [2.2, 22.22, 222.222] - - -@pytest.mark.skipif(isinstance(pyarrow, mock.Mock), reason="Requires `pyarrow`") -def test_download_arrow_tabledata_list_known_field_type(module_under_test): - fake_page = api_core.page_iterator.Page( - parent=mock.Mock(), - items=[{"page_data": "foo"}], - item_to_value=api_core.page_iterator._item_to_value_identity, - ) - fake_page._columns = [[1, 10, 100], ["2.2", "22.22", "222.222"]] - pages = [fake_page] - - bq_schema = [ - schema.SchemaField("population_size", "INTEGER"), - schema.SchemaField("non_alien_field", "STRING"), - ] - - results_gen = module_under_test.download_arrow_tabledata_list(pages, bq_schema) - with warnings.catch_warnings(record=True) as warned: - result = next(results_gen) - - unwanted_warnings = [ - warning - for warning in warned - if "please pass schema= explicitly" in str(warning).lower() - ] - assert not unwanted_warnings - - assert len(result.columns) == 2 - col = result.columns[0] - assert type(col) is pyarrow.lib.Int64Array - assert list(col) == [1, 10, 100] - col = result.columns[1] - assert type(col) is pyarrow.lib.StringArray - assert list(col) == ["2.2", "22.22", "222.222"] - - -@pytest.mark.skipif(isinstance(pyarrow, mock.Mock), reason="Requires `pyarrow`") -def test_download_arrow_tabledata_list_dict_sequence_schema(module_under_test): - fake_page = api_core.page_iterator.Page( - parent=mock.Mock(), - items=[{"page_data": "foo"}], - item_to_value=api_core.page_iterator._item_to_value_identity, - ) - fake_page._columns = [[1, 10, 100], ["2.2", "22.22", "222.222"]] - pages = [fake_page] - - dict_schema = [ - {"name": "population_size", "type": "INTEGER", "mode": "NULLABLE"}, - {"name": "non_alien_field", "type": "STRING", "mode": "NULLABLE"}, - ] - - results_gen = module_under_test.download_arrow_tabledata_list(pages, dict_schema) - result = next(results_gen) - - assert len(result.columns) == 2 - col = result.columns[0] - assert type(col) is pyarrow.lib.Int64Array - assert list(col) == [1, 10, 100] - col = result.columns[1] - assert type(col) is pyarrow.lib.StringArray - assert list(col) == ["2.2", "22.22", "222.222"] - - -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -@pytest.mark.skipif(isinstance(pyarrow, mock.Mock), reason="Requires `pyarrow`") -def test_download_dataframe_tabledata_list_dict_sequence_schema(module_under_test): - fake_page = api_core.page_iterator.Page( - parent=mock.Mock(), - items=[{"page_data": "foo"}], - item_to_value=api_core.page_iterator._item_to_value_identity, - ) - fake_page._columns = [[1, 10, 100], ["2.2", "22.22", "222.222"]] - pages = [fake_page] - - dict_schema = [ - {"name": "population_size", "type": "INTEGER", "mode": "NULLABLE"}, - {"name": "non_alien_field", "type": "STRING", "mode": "NULLABLE"}, - ] - - results_gen = module_under_test.download_dataframe_tabledata_list( - pages, dict_schema, dtypes={} - ) - result = next(results_gen) - - expected_result = pandas.DataFrame( - collections.OrderedDict( - [ - ("population_size", [1, 10, 100]), - ("non_alien_field", ["2.2", "22.22", "222.222"]), - ] - ) - ) - assert result.equals(expected_result) diff --git a/bigquery/tests/unit/test_client.py b/bigquery/tests/unit/test_client.py deleted file mode 100644 index 2227183a9236..000000000000 --- a/bigquery/tests/unit/test_client.py +++ /dev/null @@ -1,7399 +0,0 @@ -# Copyright 2015 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import copy -import collections -import datetime -import decimal -import email -import gzip -import io -import json -import operator -import unittest -import warnings - -import freezegun -import mock -import requests -import six -from six.moves import http_client -import pytest -import pytz - -try: - import fastparquet -except (ImportError, AttributeError): # pragma: NO COVER - fastparquet = None -try: - import pandas -except (ImportError, AttributeError): # pragma: NO COVER - pandas = None -try: - import pyarrow -except (ImportError, AttributeError): # pragma: NO COVER - pyarrow = None - -import google.api_core.exceptions -from google.api_core.gapic_v1 import client_info -import google.cloud._helpers -from google.cloud import bigquery_v2 -from google.cloud.bigquery.dataset import DatasetReference - -try: - from google.cloud import bigquery_storage_v1beta1 -except (ImportError, AttributeError): # pragma: NO COVER - bigquery_storage_v1beta1 = None -from tests.unit.helpers import make_connection - - -def _make_credentials(): - import google.auth.credentials - - return mock.Mock(spec=google.auth.credentials.Credentials) - - -def _make_list_partitons_meta_info(project, dataset_id, table_id, num_rows=0): - return { - "tableReference": { - "projectId": project, - "datasetId": dataset_id, - "tableId": "{}$__PARTITIONS_SUMMARY__".format(table_id), - }, - "schema": { - "fields": [ - {"name": "project_id", "type": "STRING", "mode": "NULLABLE"}, - {"name": "dataset_id", "type": "STRING", "mode": "NULLABLE"}, - {"name": "table_id", "type": "STRING", "mode": "NULLABLE"}, - {"name": "partition_id", "type": "STRING", "mode": "NULLABLE"}, - ] - }, - "etag": "ETAG", - "numRows": num_rows, - } - - -class TestClient(unittest.TestCase): - - PROJECT = "PROJECT" - DS_ID = "DATASET_ID" - TABLE_ID = "TABLE_ID" - MODEL_ID = "MODEL_ID" - TABLE_REF = DatasetReference(PROJECT, DS_ID).table(TABLE_ID) - KMS_KEY_NAME = "projects/1/locations/us/keyRings/1/cryptoKeys/1" - LOCATION = "us-central" - - @staticmethod - def _get_target_class(): - from google.cloud.bigquery.client import Client - - return Client - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def _make_table_resource(self): - return { - "id": "%s:%s:%s" % (self.PROJECT, self.DS_ID, self.TABLE_ID), - "tableReference": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.TABLE_ID, - }, - } - - def test_ctor_defaults(self): - from google.cloud.bigquery._http import Connection - - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - self.assertIsInstance(client._connection, Connection) - self.assertIs(client._connection.credentials, creds) - self.assertIs(client._connection.http, http) - self.assertIsNone(client.location) - self.assertEqual( - client._connection.API_BASE_URL, Connection.DEFAULT_API_ENDPOINT - ) - - def test_ctor_w_empty_client_options(self): - from google.api_core.client_options import ClientOptions - - creds = _make_credentials() - http = object() - client_options = ClientOptions() - client = self._make_one( - project=self.PROJECT, - credentials=creds, - _http=http, - client_options=client_options, - ) - self.assertEqual( - client._connection.API_BASE_URL, client._connection.DEFAULT_API_ENDPOINT - ) - - def test_ctor_w_client_options_dict(self): - - creds = _make_credentials() - http = object() - client_options = {"api_endpoint": "https://www.foo-googleapis.com"} - client = self._make_one( - project=self.PROJECT, - credentials=creds, - _http=http, - client_options=client_options, - ) - self.assertEqual( - client._connection.API_BASE_URL, "https://www.foo-googleapis.com" - ) - - def test_ctor_w_client_options_object(self): - from google.api_core.client_options import ClientOptions - - creds = _make_credentials() - http = object() - client_options = ClientOptions(api_endpoint="https://www.foo-googleapis.com") - client = self._make_one( - project=self.PROJECT, - credentials=creds, - _http=http, - client_options=client_options, - ) - self.assertEqual( - client._connection.API_BASE_URL, "https://www.foo-googleapis.com" - ) - - def test_ctor_w_location(self): - from google.cloud.bigquery._http import Connection - - creds = _make_credentials() - http = object() - location = "us-central" - client = self._make_one( - project=self.PROJECT, credentials=creds, _http=http, location=location - ) - self.assertIsInstance(client._connection, Connection) - self.assertIs(client._connection.credentials, creds) - self.assertIs(client._connection.http, http) - self.assertEqual(client.location, location) - - def test_ctor_w_query_job_config(self): - from google.cloud.bigquery._http import Connection - from google.cloud.bigquery import QueryJobConfig - - creds = _make_credentials() - http = object() - location = "us-central" - job_config = QueryJobConfig() - job_config.dry_run = True - - client = self._make_one( - project=self.PROJECT, - credentials=creds, - _http=http, - location=location, - default_query_job_config=job_config, - ) - self.assertIsInstance(client._connection, Connection) - self.assertIs(client._connection.credentials, creds) - self.assertIs(client._connection.http, http) - self.assertEqual(client.location, location) - - self.assertIsInstance(client._default_query_job_config, QueryJobConfig) - self.assertTrue(client._default_query_job_config.dry_run) - - def test__call_api_applying_custom_retry_on_timeout(self): - from concurrent.futures import TimeoutError - from google.cloud.bigquery.retry import DEFAULT_RETRY - - client = self._make_one() - - api_request_patcher = mock.patch.object( - client._connection, "api_request", side_effect=[TimeoutError, "result"], - ) - retry = DEFAULT_RETRY.with_deadline(1).with_predicate( - lambda exc: isinstance(exc, TimeoutError) - ) - - with api_request_patcher as fake_api_request: - result = client._call_api(retry, foo="bar") - - self.assertEqual(result, "result") - self.assertEqual( - fake_api_request.call_args_list, - [mock.call(foo="bar"), mock.call(foo="bar")], # was retried once - ) - - def test__get_query_results_miss_w_explicit_project_and_timeout(self): - from google.cloud.exceptions import NotFound - - creds = _make_credentials() - client = self._make_one(self.PROJECT, creds) - conn = client._connection = make_connection() - - with self.assertRaises(NotFound): - client._get_query_results( - "nothere", - None, - project="other-project", - location=self.LOCATION, - timeout_ms=500, - timeout=42, - ) - - conn.api_request.assert_called_once_with( - method="GET", - path="/projects/other-project/queries/nothere", - query_params={"maxResults": 0, "timeoutMs": 500, "location": self.LOCATION}, - timeout=42, - ) - - def test__get_query_results_miss_w_client_location(self): - from google.cloud.exceptions import NotFound - - creds = _make_credentials() - client = self._make_one(self.PROJECT, creds, location=self.LOCATION) - conn = client._connection = make_connection() - - with self.assertRaises(NotFound): - client._get_query_results("nothere", None) - - conn.api_request.assert_called_once_with( - method="GET", - path="/projects/PROJECT/queries/nothere", - query_params={"maxResults": 0, "location": self.LOCATION}, - timeout=None, - ) - - def test__get_query_results_hit(self): - job_id = "query_job" - data = { - "kind": "bigquery#getQueryResultsResponse", - "etag": "some-tag", - "schema": { - "fields": [ - {"name": "title", "type": "STRING", "mode": "NULLABLE"}, - {"name": "unique_words", "type": "INTEGER", "mode": "NULLABLE"}, - ] - }, - "jobReference": {"projectId": self.PROJECT, "jobId": job_id}, - "totalRows": "10", - "totalBytesProcessed": "2464625", - "jobComplete": True, - "cacheHit": False, - } - - creds = _make_credentials() - client = self._make_one(self.PROJECT, creds) - client._connection = make_connection(data) - query_results = client._get_query_results(job_id, None) - - self.assertEqual(query_results.total_rows, 10) - self.assertTrue(query_results.complete) - - def test_get_service_account_email(self): - path = "/projects/%s/serviceAccount" % (self.PROJECT,) - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - email = "bq-123@bigquery-encryption.iam.gserviceaccount.com" - resource = {"kind": "bigquery#getServiceAccountResponse", "email": email} - conn = client._connection = make_connection(resource) - - service_account_email = client.get_service_account_email(timeout=7.5) - - conn.api_request.assert_called_once_with(method="GET", path=path, timeout=7.5) - self.assertEqual(service_account_email, email) - - def test_get_service_account_email_w_alternate_project(self): - project = "my-alternate-project" - path = "/projects/%s/serviceAccount" % (project,) - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - email = "bq-123@bigquery-encryption.iam.gserviceaccount.com" - resource = {"kind": "bigquery#getServiceAccountResponse", "email": email} - conn = client._connection = make_connection(resource) - - service_account_email = client.get_service_account_email(project=project) - - conn.api_request.assert_called_once_with(method="GET", path=path, timeout=None) - self.assertEqual(service_account_email, email) - - def test_get_service_account_email_w_custom_retry(self): - from google.cloud.bigquery.retry import DEFAULT_RETRY - - api_path = "/projects/{}/serviceAccount".format(self.PROJECT) - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - - resource = { - "kind": "bigquery#getServiceAccountResponse", - "email": "bq-123@bigquery-encryption.iam.gserviceaccount.com", - } - api_request_patcher = mock.patch.object( - client._connection, "api_request", side_effect=[ValueError, resource], - ) - - retry = DEFAULT_RETRY.with_deadline(1).with_predicate( - lambda exc: isinstance(exc, ValueError) - ) - - with api_request_patcher as fake_api_request: - service_account_email = client.get_service_account_email( - retry=retry, timeout=7.5 - ) - - self.assertEqual( - service_account_email, "bq-123@bigquery-encryption.iam.gserviceaccount.com" - ) - self.assertEqual( - fake_api_request.call_args_list, - [ - mock.call(method="GET", path=api_path, timeout=7.5), - mock.call(method="GET", path=api_path, timeout=7.5), # was retried once - ], - ) - - def test_list_projects_defaults(self): - from google.cloud.bigquery.client import Project - - PROJECT_1 = "PROJECT_ONE" - PROJECT_2 = "PROJECT_TWO" - TOKEN = "TOKEN" - DATA = { - "nextPageToken": TOKEN, - "projects": [ - { - "kind": "bigquery#project", - "id": PROJECT_1, - "numericId": 1, - "projectReference": {"projectId": PROJECT_1}, - "friendlyName": "One", - }, - { - "kind": "bigquery#project", - "id": PROJECT_2, - "numericId": 2, - "projectReference": {"projectId": PROJECT_2}, - "friendlyName": "Two", - }, - ], - } - creds = _make_credentials() - client = self._make_one(PROJECT_1, creds) - conn = client._connection = make_connection(DATA) - - iterator = client.list_projects() - page = six.next(iterator.pages) - projects = list(page) - token = iterator.next_page_token - - self.assertEqual(len(projects), len(DATA["projects"])) - for found, expected in zip(projects, DATA["projects"]): - self.assertIsInstance(found, Project) - self.assertEqual(found.project_id, expected["id"]) - self.assertEqual(found.numeric_id, expected["numericId"]) - self.assertEqual(found.friendly_name, expected["friendlyName"]) - self.assertEqual(token, TOKEN) - - conn.api_request.assert_called_once_with( - method="GET", path="/projects", query_params={}, timeout=None - ) - - def test_list_projects_w_timeout(self): - PROJECT_1 = "PROJECT_ONE" - TOKEN = "TOKEN" - DATA = { - "nextPageToken": TOKEN, - "projects": [], - } - creds = _make_credentials() - client = self._make_one(PROJECT_1, creds) - conn = client._connection = make_connection(DATA) - - iterator = client.list_projects(timeout=7.5) - six.next(iterator.pages) - - conn.api_request.assert_called_once_with( - method="GET", path="/projects", query_params={}, timeout=7.5 - ) - - def test_list_projects_explicit_response_missing_projects_key(self): - TOKEN = "TOKEN" - DATA = {} - creds = _make_credentials() - client = self._make_one(self.PROJECT, creds) - conn = client._connection = make_connection(DATA) - - iterator = client.list_projects(max_results=3, page_token=TOKEN) - page = six.next(iterator.pages) - projects = list(page) - token = iterator.next_page_token - - self.assertEqual(len(projects), 0) - self.assertIsNone(token) - - conn.api_request.assert_called_once_with( - method="GET", - path="/projects", - query_params={"maxResults": 3, "pageToken": TOKEN}, - timeout=None, - ) - - def test_list_datasets_defaults(self): - from google.cloud.bigquery.dataset import DatasetListItem - - DATASET_1 = "dataset_one" - DATASET_2 = "dataset_two" - PATH = "projects/%s/datasets" % self.PROJECT - TOKEN = "TOKEN" - DATA = { - "nextPageToken": TOKEN, - "datasets": [ - { - "kind": "bigquery#dataset", - "id": "%s:%s" % (self.PROJECT, DATASET_1), - "datasetReference": { - "datasetId": DATASET_1, - "projectId": self.PROJECT, - }, - "friendlyName": None, - }, - { - "kind": "bigquery#dataset", - "id": "%s:%s" % (self.PROJECT, DATASET_2), - "datasetReference": { - "datasetId": DATASET_2, - "projectId": self.PROJECT, - }, - "friendlyName": "Two", - }, - ], - } - creds = _make_credentials() - client = self._make_one(self.PROJECT, creds) - conn = client._connection = make_connection(DATA) - - iterator = client.list_datasets() - page = six.next(iterator.pages) - datasets = list(page) - token = iterator.next_page_token - - self.assertEqual(len(datasets), len(DATA["datasets"])) - for found, expected in zip(datasets, DATA["datasets"]): - self.assertIsInstance(found, DatasetListItem) - self.assertEqual(found.full_dataset_id, expected["id"]) - self.assertEqual(found.friendly_name, expected["friendlyName"]) - self.assertEqual(token, TOKEN) - - conn.api_request.assert_called_once_with( - method="GET", path="/%s" % PATH, query_params={}, timeout=None - ) - - def test_list_datasets_w_project_and_timeout(self): - creds = _make_credentials() - client = self._make_one(self.PROJECT, creds) - conn = client._connection = make_connection({}) - - list(client.list_datasets(project="other-project", timeout=7.5)) - - conn.api_request.assert_called_once_with( - method="GET", - path="/projects/other-project/datasets", - query_params={}, - timeout=7.5, - ) - - def test_list_datasets_explicit_response_missing_datasets_key(self): - PATH = "projects/%s/datasets" % self.PROJECT - TOKEN = "TOKEN" - FILTER = "FILTER" - DATA = {} - creds = _make_credentials() - client = self._make_one(self.PROJECT, creds) - conn = client._connection = make_connection(DATA) - - iterator = client.list_datasets( - include_all=True, filter=FILTER, max_results=3, page_token=TOKEN - ) - page = six.next(iterator.pages) - datasets = list(page) - token = iterator.next_page_token - - self.assertEqual(len(datasets), 0) - self.assertIsNone(token) - - conn.api_request.assert_called_once_with( - method="GET", - path="/%s" % PATH, - query_params={ - "all": True, - "filter": FILTER, - "maxResults": 3, - "pageToken": TOKEN, - }, - timeout=None, - ) - - def test_dataset_with_specified_project(self): - from google.cloud.bigquery.dataset import DatasetReference - - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - catch_warnings = warnings.catch_warnings(record=True) - - with catch_warnings as warned: - dataset = client.dataset(self.DS_ID, self.PROJECT) - - matches = [ - warning - for warning in warned - if warning.category in (DeprecationWarning, PendingDeprecationWarning) - and "Client.dataset" in str(warning) - and "my_project.my_dataset" in str(warning) - ] - assert matches, "A Client.dataset deprecation warning was not raised." - self.assertIsInstance(dataset, DatasetReference) - self.assertEqual(dataset.dataset_id, self.DS_ID) - self.assertEqual(dataset.project, self.PROJECT) - - def test_dataset_with_default_project(self): - from google.cloud.bigquery.dataset import DatasetReference - - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - catch_warnings = warnings.catch_warnings(record=True) - - with catch_warnings as warned: - dataset = client.dataset(self.DS_ID) - - matches = [ - warning - for warning in warned - if warning.category in (DeprecationWarning, PendingDeprecationWarning) - and "Client.dataset" in str(warning) - and "my_project.my_dataset" in str(warning) - ] - assert matches, "A Client.dataset deprecation warning was not raised." - self.assertIsInstance(dataset, DatasetReference) - self.assertEqual(dataset.dataset_id, self.DS_ID) - self.assertEqual(dataset.project, self.PROJECT) - - def test_get_dataset(self): - from google.cloud.exceptions import ServerError - - path = "projects/%s/datasets/%s" % (self.PROJECT, self.DS_ID) - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - resource = { - "id": "%s:%s" % (self.PROJECT, self.DS_ID), - "datasetReference": {"projectId": self.PROJECT, "datasetId": self.DS_ID}, - } - conn = client._connection = make_connection(resource) - dataset_ref = DatasetReference(self.PROJECT, self.DS_ID) - - dataset = client.get_dataset(dataset_ref, timeout=7.5) - - conn.api_request.assert_called_once_with( - method="GET", path="/%s" % path, timeout=7.5 - ) - self.assertEqual(dataset.dataset_id, self.DS_ID) - - # Test retry. - - # Not a cloud API exception (missing 'errors' field). - client._connection = make_connection(Exception(""), resource) - with self.assertRaises(Exception): - client.get_dataset(dataset_ref) - - # Zero-length errors field. - client._connection = make_connection(ServerError(""), resource) - with self.assertRaises(ServerError): - client.get_dataset(dataset_ref) - - # Non-retryable reason. - client._connection = make_connection( - ServerError("", errors=[{"reason": "serious"}]), resource - ) - with self.assertRaises(ServerError): - client.get_dataset(dataset_ref) - - # Retryable reason, but retry is disabled. - client._connection = make_connection( - ServerError("", errors=[{"reason": "backendError"}]), resource - ) - with self.assertRaises(ServerError): - client.get_dataset(dataset_ref, retry=None) - - # Retryable reason, default retry: success. - client._connection = make_connection( - ServerError("", errors=[{"reason": "backendError"}]), resource - ) - dataset = client.get_dataset( - # Test with a string for dataset ID. - dataset_ref.dataset_id - ) - self.assertEqual(dataset.dataset_id, self.DS_ID) - - @unittest.skipIf( - bigquery_storage_v1beta1 is None, "Requires `google-cloud-bigquery-storage`" - ) - def test_create_bqstorage_client(self): - mock_client = mock.create_autospec( - bigquery_storage_v1beta1.BigQueryStorageClient - ) - mock_client_instance = object() - mock_client.return_value = mock_client_instance - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - - with mock.patch( - "google.cloud.bigquery_storage_v1beta1.BigQueryStorageClient", mock_client - ): - bqstorage_client = client._create_bqstorage_client() - - self.assertIs(bqstorage_client, mock_client_instance) - mock_client.assert_called_once_with(credentials=creds) - - def test_create_dataset_minimal(self): - from google.cloud.bigquery.dataset import Dataset - - PATH = "projects/%s/datasets" % self.PROJECT - RESOURCE = { - "datasetReference": {"projectId": self.PROJECT, "datasetId": self.DS_ID}, - "etag": "etag", - "id": "%s:%s" % (self.PROJECT, self.DS_ID), - } - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - conn = client._connection = make_connection(RESOURCE) - - ds_ref = DatasetReference(self.PROJECT, self.DS_ID) - before = Dataset(ds_ref) - - after = client.create_dataset(before, timeout=7.5) - - self.assertEqual(after.dataset_id, self.DS_ID) - self.assertEqual(after.project, self.PROJECT) - self.assertEqual(after.etag, RESOURCE["etag"]) - self.assertEqual(after.full_dataset_id, RESOURCE["id"]) - - conn.api_request.assert_called_once_with( - method="POST", - path="/%s" % PATH, - data={ - "datasetReference": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - }, - "labels": {}, - }, - timeout=7.5, - ) - - def test_create_dataset_w_attrs(self): - from google.cloud.bigquery.dataset import Dataset, AccessEntry - - PATH = "projects/%s/datasets" % self.PROJECT - DESCRIPTION = "DESC" - FRIENDLY_NAME = "FN" - LOCATION = "US" - USER_EMAIL = "phred@example.com" - LABELS = {"color": "red"} - VIEW = { - "projectId": "my-proj", - "datasetId": "starry-skies", - "tableId": "northern-hemisphere", - } - RESOURCE = { - "datasetReference": {"projectId": self.PROJECT, "datasetId": self.DS_ID}, - "etag": "etag", - "id": "%s:%s" % (self.PROJECT, self.DS_ID), - "description": DESCRIPTION, - "friendlyName": FRIENDLY_NAME, - "location": LOCATION, - "defaultTableExpirationMs": "3600", - "labels": LABELS, - "access": [{"role": "OWNER", "userByEmail": USER_EMAIL}, {"view": VIEW}], - } - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - conn = client._connection = make_connection(RESOURCE) - entries = [ - AccessEntry("OWNER", "userByEmail", USER_EMAIL), - AccessEntry(None, "view", VIEW), - ] - - ds_ref = DatasetReference(self.PROJECT, self.DS_ID) - before = Dataset(ds_ref) - before.access_entries = entries - before.description = DESCRIPTION - before.friendly_name = FRIENDLY_NAME - before.default_table_expiration_ms = 3600 - before.location = LOCATION - before.labels = LABELS - - after = client.create_dataset(before) - - self.assertEqual(after.dataset_id, self.DS_ID) - self.assertEqual(after.project, self.PROJECT) - self.assertEqual(after.etag, RESOURCE["etag"]) - self.assertEqual(after.full_dataset_id, RESOURCE["id"]) - self.assertEqual(after.description, DESCRIPTION) - self.assertEqual(after.friendly_name, FRIENDLY_NAME) - self.assertEqual(after.location, LOCATION) - self.assertEqual(after.default_table_expiration_ms, 3600) - self.assertEqual(after.labels, LABELS) - - conn.api_request.assert_called_once_with( - method="POST", - path="/%s" % PATH, - data={ - "datasetReference": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - }, - "description": DESCRIPTION, - "friendlyName": FRIENDLY_NAME, - "location": LOCATION, - "defaultTableExpirationMs": "3600", - "access": [ - {"role": "OWNER", "userByEmail": USER_EMAIL}, - {"view": VIEW}, - ], - "labels": LABELS, - }, - timeout=None, - ) - - def test_create_dataset_w_custom_property(self): - # The library should handle sending properties to the API that are not - # yet part of the library - from google.cloud.bigquery.dataset import Dataset - - path = "/projects/%s/datasets" % self.PROJECT - resource = { - "datasetReference": {"projectId": self.PROJECT, "datasetId": self.DS_ID}, - "newAlphaProperty": "unreleased property", - } - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - conn = client._connection = make_connection(resource) - - ds_ref = DatasetReference(self.PROJECT, self.DS_ID) - before = Dataset(ds_ref) - before._properties["newAlphaProperty"] = "unreleased property" - - after = client.create_dataset(before) - - self.assertEqual(after.dataset_id, self.DS_ID) - self.assertEqual(after.project, self.PROJECT) - self.assertEqual(after._properties["newAlphaProperty"], "unreleased property") - - conn.api_request.assert_called_once_with( - method="POST", - path=path, - data={ - "datasetReference": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - }, - "newAlphaProperty": "unreleased property", - "labels": {}, - }, - timeout=None, - ) - - def test_create_dataset_w_client_location_wo_dataset_location(self): - from google.cloud.bigquery.dataset import Dataset - - PATH = "projects/%s/datasets" % self.PROJECT - RESOURCE = { - "datasetReference": {"projectId": self.PROJECT, "datasetId": self.DS_ID}, - "etag": "etag", - "id": "%s:%s" % (self.PROJECT, self.DS_ID), - "location": self.LOCATION, - } - creds = _make_credentials() - client = self._make_one( - project=self.PROJECT, credentials=creds, location=self.LOCATION - ) - conn = client._connection = make_connection(RESOURCE) - - ds_ref = DatasetReference(self.PROJECT, self.DS_ID) - before = Dataset(ds_ref) - - after = client.create_dataset(before) - - self.assertEqual(after.dataset_id, self.DS_ID) - self.assertEqual(after.project, self.PROJECT) - self.assertEqual(after.etag, RESOURCE["etag"]) - self.assertEqual(after.full_dataset_id, RESOURCE["id"]) - self.assertEqual(after.location, self.LOCATION) - - conn.api_request.assert_called_once_with( - method="POST", - path="/%s" % PATH, - data={ - "datasetReference": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - }, - "labels": {}, - "location": self.LOCATION, - }, - timeout=None, - ) - - def test_create_dataset_w_client_location_w_dataset_location(self): - from google.cloud.bigquery.dataset import Dataset - - PATH = "projects/%s/datasets" % self.PROJECT - OTHER_LOCATION = "EU" - RESOURCE = { - "datasetReference": {"projectId": self.PROJECT, "datasetId": self.DS_ID}, - "etag": "etag", - "id": "%s:%s" % (self.PROJECT, self.DS_ID), - "location": OTHER_LOCATION, - } - creds = _make_credentials() - client = self._make_one( - project=self.PROJECT, credentials=creds, location=self.LOCATION - ) - conn = client._connection = make_connection(RESOURCE) - - ds_ref = DatasetReference(self.PROJECT, self.DS_ID) - before = Dataset(ds_ref) - before.location = OTHER_LOCATION - - after = client.create_dataset(before) - - self.assertEqual(after.dataset_id, self.DS_ID) - self.assertEqual(after.project, self.PROJECT) - self.assertEqual(after.etag, RESOURCE["etag"]) - self.assertEqual(after.full_dataset_id, RESOURCE["id"]) - self.assertEqual(after.location, OTHER_LOCATION) - - conn.api_request.assert_called_once_with( - method="POST", - path="/%s" % PATH, - data={ - "datasetReference": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - }, - "labels": {}, - "location": OTHER_LOCATION, - }, - timeout=None, - ) - - def test_create_dataset_w_reference(self): - path = "/projects/%s/datasets" % self.PROJECT - resource = { - "datasetReference": {"projectId": self.PROJECT, "datasetId": self.DS_ID}, - "etag": "etag", - "id": "%s:%s" % (self.PROJECT, self.DS_ID), - "location": self.LOCATION, - } - creds = _make_credentials() - client = self._make_one( - project=self.PROJECT, credentials=creds, location=self.LOCATION - ) - conn = client._connection = make_connection(resource) - - dataset = client.create_dataset(DatasetReference(self.PROJECT, self.DS_ID)) - - self.assertEqual(dataset.dataset_id, self.DS_ID) - self.assertEqual(dataset.project, self.PROJECT) - self.assertEqual(dataset.etag, resource["etag"]) - self.assertEqual(dataset.full_dataset_id, resource["id"]) - self.assertEqual(dataset.location, self.LOCATION) - - conn.api_request.assert_called_once_with( - method="POST", - path=path, - data={ - "datasetReference": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - }, - "labels": {}, - "location": self.LOCATION, - }, - timeout=None, - ) - - def test_create_dataset_w_fully_qualified_string(self): - path = "/projects/%s/datasets" % self.PROJECT - resource = { - "datasetReference": {"projectId": self.PROJECT, "datasetId": self.DS_ID}, - "etag": "etag", - "id": "%s:%s" % (self.PROJECT, self.DS_ID), - "location": self.LOCATION, - } - creds = _make_credentials() - client = self._make_one( - project=self.PROJECT, credentials=creds, location=self.LOCATION - ) - conn = client._connection = make_connection(resource) - - dataset = client.create_dataset("{}.{}".format(self.PROJECT, self.DS_ID)) - - self.assertEqual(dataset.dataset_id, self.DS_ID) - self.assertEqual(dataset.project, self.PROJECT) - self.assertEqual(dataset.etag, resource["etag"]) - self.assertEqual(dataset.full_dataset_id, resource["id"]) - self.assertEqual(dataset.location, self.LOCATION) - - conn.api_request.assert_called_once_with( - method="POST", - path=path, - data={ - "datasetReference": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - }, - "labels": {}, - "location": self.LOCATION, - }, - timeout=None, - ) - - def test_create_dataset_w_string(self): - path = "/projects/%s/datasets" % self.PROJECT - resource = { - "datasetReference": {"projectId": self.PROJECT, "datasetId": self.DS_ID}, - "etag": "etag", - "id": "%s:%s" % (self.PROJECT, self.DS_ID), - "location": self.LOCATION, - } - creds = _make_credentials() - client = self._make_one( - project=self.PROJECT, credentials=creds, location=self.LOCATION - ) - conn = client._connection = make_connection(resource) - - dataset = client.create_dataset(self.DS_ID) - - self.assertEqual(dataset.dataset_id, self.DS_ID) - self.assertEqual(dataset.project, self.PROJECT) - self.assertEqual(dataset.etag, resource["etag"]) - self.assertEqual(dataset.full_dataset_id, resource["id"]) - self.assertEqual(dataset.location, self.LOCATION) - - conn.api_request.assert_called_once_with( - method="POST", - path=path, - data={ - "datasetReference": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - }, - "labels": {}, - "location": self.LOCATION, - }, - timeout=None, - ) - - def test_create_dataset_alreadyexists_w_exists_ok_false(self): - creds = _make_credentials() - client = self._make_one( - project=self.PROJECT, credentials=creds, location=self.LOCATION - ) - client._connection = make_connection( - google.api_core.exceptions.AlreadyExists("dataset already exists") - ) - - with pytest.raises(google.api_core.exceptions.AlreadyExists): - client.create_dataset(self.DS_ID) - - def test_create_dataset_alreadyexists_w_exists_ok_true(self): - post_path = "/projects/{}/datasets".format(self.PROJECT) - get_path = "/projects/{}/datasets/{}".format(self.PROJECT, self.DS_ID) - resource = { - "datasetReference": {"projectId": self.PROJECT, "datasetId": self.DS_ID}, - "etag": "etag", - "id": "{}:{}".format(self.PROJECT, self.DS_ID), - "location": self.LOCATION, - } - creds = _make_credentials() - client = self._make_one( - project=self.PROJECT, credentials=creds, location=self.LOCATION - ) - conn = client._connection = make_connection( - google.api_core.exceptions.AlreadyExists("dataset already exists"), resource - ) - - dataset = client.create_dataset(self.DS_ID, exists_ok=True) - - self.assertEqual(dataset.dataset_id, self.DS_ID) - self.assertEqual(dataset.project, self.PROJECT) - self.assertEqual(dataset.etag, resource["etag"]) - self.assertEqual(dataset.full_dataset_id, resource["id"]) - self.assertEqual(dataset.location, self.LOCATION) - - conn.api_request.assert_has_calls( - [ - mock.call( - method="POST", - path=post_path, - data={ - "datasetReference": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - }, - "labels": {}, - "location": self.LOCATION, - }, - timeout=None, - ), - mock.call(method="GET", path=get_path, timeout=None), - ] - ) - - def test_create_routine_w_minimal_resource(self): - from google.cloud.bigquery.routine import Routine - from google.cloud.bigquery.routine import RoutineReference - - creds = _make_credentials() - resource = { - "routineReference": { - "projectId": "test-routine-project", - "datasetId": "test_routines", - "routineId": "minimal_routine", - } - } - client = self._make_one(project=self.PROJECT, credentials=creds) - conn = client._connection = make_connection(resource) - full_routine_id = "test-routine-project.test_routines.minimal_routine" - routine = Routine(full_routine_id) - - actual_routine = client.create_routine(routine, timeout=7.5) - - conn.api_request.assert_called_once_with( - method="POST", - path="/projects/test-routine-project/datasets/test_routines/routines", - data=resource, - timeout=7.5, - ) - self.assertEqual( - actual_routine.reference, RoutineReference.from_string(full_routine_id) - ) - - def test_create_routine_w_conflict(self): - from google.cloud.bigquery.routine import Routine - - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - conn = client._connection = make_connection( - google.api_core.exceptions.AlreadyExists("routine already exists") - ) - full_routine_id = "test-routine-project.test_routines.minimal_routine" - routine = Routine(full_routine_id) - - with pytest.raises(google.api_core.exceptions.AlreadyExists): - client.create_routine(routine) - - resource = { - "routineReference": { - "projectId": "test-routine-project", - "datasetId": "test_routines", - "routineId": "minimal_routine", - } - } - conn.api_request.assert_called_once_with( - method="POST", - path="/projects/test-routine-project/datasets/test_routines/routines", - data=resource, - timeout=None, - ) - - def test_create_routine_w_conflict_exists_ok(self): - from google.cloud.bigquery.routine import Routine - - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - resource = { - "routineReference": { - "projectId": "test-routine-project", - "datasetId": "test_routines", - "routineId": "minimal_routine", - } - } - conn = client._connection = make_connection( - google.api_core.exceptions.AlreadyExists("routine already exists"), resource - ) - full_routine_id = "test-routine-project.test_routines.minimal_routine" - routine = Routine(full_routine_id) - - actual_routine = client.create_routine(routine, exists_ok=True) - - self.assertEqual(actual_routine.project, "test-routine-project") - self.assertEqual(actual_routine.dataset_id, "test_routines") - self.assertEqual(actual_routine.routine_id, "minimal_routine") - conn.api_request.assert_has_calls( - [ - mock.call( - method="POST", - path="/projects/test-routine-project/datasets/test_routines/routines", - data=resource, - timeout=None, - ), - mock.call( - method="GET", - path="/projects/test-routine-project/datasets/test_routines/routines/minimal_routine", - timeout=None, - ), - ] - ) - - def test_create_table_w_day_partition(self): - from google.cloud.bigquery.table import Table - from google.cloud.bigquery.table import TimePartitioning - - path = "projects/%s/datasets/%s/tables" % (self.PROJECT, self.DS_ID) - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - resource = self._make_table_resource() - conn = client._connection = make_connection(resource) - table = Table(self.TABLE_REF) - table.time_partitioning = TimePartitioning() - - got = client.create_table(table, timeout=7.5) - - conn.api_request.assert_called_once_with( - method="POST", - path="/%s" % path, - data={ - "tableReference": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.TABLE_ID, - }, - "timePartitioning": {"type": "DAY"}, - "labels": {}, - }, - timeout=7.5, - ) - self.assertEqual(table.time_partitioning.type_, "DAY") - self.assertEqual(got.table_id, self.TABLE_ID) - - def test_create_table_w_custom_property(self): - # The library should handle sending properties to the API that are not - # yet part of the library - from google.cloud.bigquery.table import Table - - path = "projects/%s/datasets/%s/tables" % (self.PROJECT, self.DS_ID) - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - resource = self._make_table_resource() - resource["newAlphaProperty"] = "unreleased property" - conn = client._connection = make_connection(resource) - table = Table(self.TABLE_REF) - table._properties["newAlphaProperty"] = "unreleased property" - - got = client.create_table(table) - - conn.api_request.assert_called_once_with( - method="POST", - path="/%s" % path, - data={ - "tableReference": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.TABLE_ID, - }, - "newAlphaProperty": "unreleased property", - "labels": {}, - }, - timeout=None, - ) - self.assertEqual(got._properties["newAlphaProperty"], "unreleased property") - self.assertEqual(got.table_id, self.TABLE_ID) - - def test_create_table_w_encryption_configuration(self): - from google.cloud.bigquery.encryption_configuration import ( - EncryptionConfiguration, - ) - from google.cloud.bigquery.table import Table - - path = "projects/%s/datasets/%s/tables" % (self.PROJECT, self.DS_ID) - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - resource = self._make_table_resource() - conn = client._connection = make_connection(resource) - table = Table(self.TABLE_REF) - table.encryption_configuration = EncryptionConfiguration( - kms_key_name=self.KMS_KEY_NAME - ) - - got = client.create_table(table) - - conn.api_request.assert_called_once_with( - method="POST", - path="/%s" % path, - data={ - "tableReference": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.TABLE_ID, - }, - "labels": {}, - "encryptionConfiguration": {"kmsKeyName": self.KMS_KEY_NAME}, - }, - timeout=None, - ) - self.assertEqual(got.table_id, self.TABLE_ID) - - def test_create_table_w_day_partition_and_expire(self): - from google.cloud.bigquery.table import Table - from google.cloud.bigquery.table import TimePartitioning - - path = "projects/%s/datasets/%s/tables" % (self.PROJECT, self.DS_ID) - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - resource = self._make_table_resource() - conn = client._connection = make_connection(resource) - table = Table(self.TABLE_REF) - table.time_partitioning = TimePartitioning(expiration_ms=100) - - got = client.create_table(table) - - conn.api_request.assert_called_once_with( - method="POST", - path="/%s" % path, - data={ - "tableReference": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.TABLE_ID, - }, - "timePartitioning": {"type": "DAY", "expirationMs": "100"}, - "labels": {}, - }, - timeout=None, - ) - self.assertEqual(table.time_partitioning.type_, "DAY") - self.assertEqual(table.time_partitioning.expiration_ms, 100) - self.assertEqual(got.table_id, self.TABLE_ID) - - def test_create_table_w_schema_and_query(self): - from google.cloud.bigquery.schema import SchemaField - from google.cloud.bigquery.table import Table - - path = "projects/%s/datasets/%s/tables" % (self.PROJECT, self.DS_ID) - query = "SELECT * from %s:%s" % (self.DS_ID, self.TABLE_ID) - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - resource = self._make_table_resource() - resource.update( - { - "schema": { - "fields": [ - { - "name": "full_name", - "type": "STRING", - "mode": "REQUIRED", - "description": None, - }, - { - "name": "age", - "type": "INTEGER", - "mode": "REQUIRED", - "description": None, - }, - ] - }, - "view": {"query": query}, - } - ) - schema = [ - SchemaField("full_name", "STRING", mode="REQUIRED"), - SchemaField("age", "INTEGER", mode="REQUIRED"), - ] - conn = client._connection = make_connection(resource) - table = Table(self.TABLE_REF, schema=schema) - table.view_query = query - - got = client.create_table(table) - - conn.api_request.assert_called_once_with( - method="POST", - path="/%s" % path, - data={ - "tableReference": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.TABLE_ID, - }, - "schema": { - "fields": [ - { - "name": "full_name", - "type": "STRING", - "mode": "REQUIRED", - "description": None, - }, - { - "name": "age", - "type": "INTEGER", - "mode": "REQUIRED", - "description": None, - }, - ] - }, - "view": {"query": query, "useLegacySql": False}, - "labels": {}, - }, - timeout=None, - ) - self.assertEqual(got.table_id, self.TABLE_ID) - self.assertEqual(got.project, self.PROJECT) - self.assertEqual(got.dataset_id, self.DS_ID) - self.assertEqual(got.schema, schema) - self.assertEqual(got.view_query, query) - - def test_create_table_w_external(self): - from google.cloud.bigquery.external_config import ExternalConfig - from google.cloud.bigquery.job import SourceFormat - from google.cloud.bigquery.table import Table - - path = "projects/%s/datasets/%s/tables" % (self.PROJECT, self.DS_ID) - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - resource = self._make_table_resource() - resource.update( - { - "externalDataConfiguration": { - "sourceFormat": SourceFormat.CSV, - "autodetect": True, - } - } - ) - conn = client._connection = make_connection(resource) - table = Table(self.TABLE_REF) - ec = ExternalConfig("CSV") - ec.autodetect = True - table.external_data_configuration = ec - - got = client.create_table(table) - - conn.api_request.assert_called_once_with( - method="POST", - path="/%s" % path, - data={ - "tableReference": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.TABLE_ID, - }, - "externalDataConfiguration": { - "sourceFormat": SourceFormat.CSV, - "autodetect": True, - }, - "labels": {}, - }, - timeout=None, - ) - self.assertEqual(got.table_id, self.TABLE_ID) - self.assertEqual(got.project, self.PROJECT) - self.assertEqual(got.dataset_id, self.DS_ID) - self.assertEqual( - got.external_data_configuration.source_format, SourceFormat.CSV - ) - self.assertEqual(got.external_data_configuration.autodetect, True) - - def test_create_table_w_reference(self): - path = "projects/%s/datasets/%s/tables" % (self.PROJECT, self.DS_ID) - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - resource = self._make_table_resource() - conn = client._connection = make_connection(resource) - - got = client.create_table(self.TABLE_REF) - - conn.api_request.assert_called_once_with( - method="POST", - path="/%s" % path, - data={ - "tableReference": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.TABLE_ID, - }, - "labels": {}, - }, - timeout=None, - ) - self.assertEqual(got.table_id, self.TABLE_ID) - - def test_create_table_w_fully_qualified_string(self): - path = "projects/%s/datasets/%s/tables" % (self.PROJECT, self.DS_ID) - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - resource = self._make_table_resource() - conn = client._connection = make_connection(resource) - - got = client.create_table( - "{}.{}.{}".format(self.PROJECT, self.DS_ID, self.TABLE_ID) - ) - - conn.api_request.assert_called_once_with( - method="POST", - path="/%s" % path, - data={ - "tableReference": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.TABLE_ID, - }, - "labels": {}, - }, - timeout=None, - ) - self.assertEqual(got.table_id, self.TABLE_ID) - - def test_create_table_w_string(self): - path = "projects/%s/datasets/%s/tables" % (self.PROJECT, self.DS_ID) - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - resource = self._make_table_resource() - conn = client._connection = make_connection(resource) - - got = client.create_table("{}.{}".format(self.DS_ID, self.TABLE_ID)) - - conn.api_request.assert_called_once_with( - method="POST", - path="/%s" % path, - data={ - "tableReference": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.TABLE_ID, - }, - "labels": {}, - }, - timeout=None, - ) - self.assertEqual(got.table_id, self.TABLE_ID) - - def test_create_table_alreadyexists_w_exists_ok_false(self): - post_path = "/projects/{}/datasets/{}/tables".format(self.PROJECT, self.DS_ID) - creds = _make_credentials() - client = self._make_one( - project=self.PROJECT, credentials=creds, location=self.LOCATION - ) - conn = client._connection = make_connection( - google.api_core.exceptions.AlreadyExists("table already exists") - ) - - with pytest.raises(google.api_core.exceptions.AlreadyExists): - client.create_table("{}.{}".format(self.DS_ID, self.TABLE_ID)) - - conn.api_request.assert_called_once_with( - method="POST", - path=post_path, - data={ - "tableReference": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.TABLE_ID, - }, - "labels": {}, - }, - timeout=None, - ) - - def test_create_table_alreadyexists_w_exists_ok_true(self): - post_path = "/projects/{}/datasets/{}/tables".format(self.PROJECT, self.DS_ID) - get_path = "/projects/{}/datasets/{}/tables/{}".format( - self.PROJECT, self.DS_ID, self.TABLE_ID - ) - resource = self._make_table_resource() - creds = _make_credentials() - client = self._make_one( - project=self.PROJECT, credentials=creds, location=self.LOCATION - ) - conn = client._connection = make_connection( - google.api_core.exceptions.AlreadyExists("table already exists"), resource - ) - - got = client.create_table( - "{}.{}".format(self.DS_ID, self.TABLE_ID), exists_ok=True - ) - - self.assertEqual(got.project, self.PROJECT) - self.assertEqual(got.dataset_id, self.DS_ID) - self.assertEqual(got.table_id, self.TABLE_ID) - - conn.api_request.assert_has_calls( - [ - mock.call( - method="POST", - path=post_path, - data={ - "tableReference": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.TABLE_ID, - }, - "labels": {}, - }, - timeout=None, - ), - mock.call(method="GET", path=get_path, timeout=None), - ] - ) - - def test_close(self): - creds = _make_credentials() - http = mock.Mock() - http._auth_request.session = mock.Mock() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - - client.close() - - http.close.assert_called_once() - http._auth_request.session.close.assert_called_once() - - def test_get_model(self): - path = "projects/%s/datasets/%s/models/%s" % ( - self.PROJECT, - self.DS_ID, - self.MODEL_ID, - ) - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - resource = { - "modelReference": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "modelId": self.MODEL_ID, - } - } - conn = client._connection = make_connection(resource) - - model_ref = DatasetReference(self.PROJECT, self.DS_ID).model(self.MODEL_ID) - got = client.get_model(model_ref, timeout=7.5) - - conn.api_request.assert_called_once_with( - method="GET", path="/%s" % path, timeout=7.5 - ) - self.assertEqual(got.model_id, self.MODEL_ID) - - def test_get_model_w_string(self): - path = "projects/%s/datasets/%s/models/%s" % ( - self.PROJECT, - self.DS_ID, - self.MODEL_ID, - ) - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - resource = { - "modelReference": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "modelId": self.MODEL_ID, - } - } - conn = client._connection = make_connection(resource) - - model_id = "{}.{}.{}".format(self.PROJECT, self.DS_ID, self.MODEL_ID) - got = client.get_model(model_id) - - conn.api_request.assert_called_once_with( - method="GET", path="/%s" % path, timeout=None - ) - self.assertEqual(got.model_id, self.MODEL_ID) - - def test_get_routine(self): - from google.cloud.bigquery.routine import Routine - from google.cloud.bigquery.routine import RoutineReference - - full_routine_id = "test-routine-project.test_routines.minimal_routine" - routines = [ - full_routine_id, - Routine(full_routine_id), - RoutineReference.from_string(full_routine_id), - ] - for routine in routines: - creds = _make_credentials() - resource = { - "etag": "im-an-etag", - "routineReference": { - "projectId": "test-routine-project", - "datasetId": "test_routines", - "routineId": "minimal_routine", - }, - "routineType": "SCALAR_FUNCTION", - } - client = self._make_one(project=self.PROJECT, credentials=creds) - conn = client._connection = make_connection(resource) - - actual_routine = client.get_routine(routine, timeout=7.5) - - conn.api_request.assert_called_once_with( - method="GET", - path="/projects/test-routine-project/datasets/test_routines/routines/minimal_routine", - timeout=7.5, - ) - self.assertEqual( - actual_routine.reference, - RoutineReference.from_string(full_routine_id), - msg="routine={}".format(repr(routine)), - ) - self.assertEqual( - actual_routine.etag, - "im-an-etag", - msg="routine={}".format(repr(routine)), - ) - self.assertEqual( - actual_routine.type_, - "SCALAR_FUNCTION", - msg="routine={}".format(repr(routine)), - ) - - def test_get_table(self): - path = "projects/%s/datasets/%s/tables/%s" % ( - self.PROJECT, - self.DS_ID, - self.TABLE_ID, - ) - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - resource = self._make_table_resource() - conn = client._connection = make_connection(resource) - table = client.get_table(self.TABLE_REF, timeout=7.5) - - conn.api_request.assert_called_once_with( - method="GET", path="/%s" % path, timeout=7.5 - ) - self.assertEqual(table.table_id, self.TABLE_ID) - - def test_get_table_sets_user_agent(self): - creds = _make_credentials() - http = mock.create_autospec(requests.Session) - mock_response = http.request( - url=mock.ANY, method=mock.ANY, headers=mock.ANY, data=mock.ANY - ) - http.reset_mock() - mock_response.status_code = 200 - mock_response.json.return_value = self._make_table_resource() - user_agent_override = client_info.ClientInfo(user_agent="my-application/1.2.3") - client = self._make_one( - project=self.PROJECT, - credentials=creds, - client_info=user_agent_override, - _http=http, - ) - - client.get_table(self.TABLE_REF) - - expected_user_agent = user_agent_override.to_user_agent() - http.request.assert_called_once_with( - url=mock.ANY, - method="GET", - headers={ - "X-Goog-API-Client": expected_user_agent, - "Accept-Encoding": "gzip", - "User-Agent": expected_user_agent, - }, - data=mock.ANY, - timeout=None, - ) - self.assertIn("my-application/1.2.3", expected_user_agent) - - def test_update_dataset_w_invalid_field(self): - from google.cloud.bigquery.dataset import Dataset - - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - with self.assertRaises(ValueError): - client.update_dataset( - Dataset("{}.{}".format(self.PROJECT, self.DS_ID)), ["foo"] - ) - - def test_update_dataset(self): - from google.cloud.bigquery.dataset import Dataset, AccessEntry - - PATH = "projects/%s/datasets/%s" % (self.PROJECT, self.DS_ID) - DESCRIPTION = "DESCRIPTION" - FRIENDLY_NAME = "TITLE" - LOCATION = "loc" - LABELS = {"priority": "high"} - ACCESS = [{"role": "OWNER", "userByEmail": "phred@example.com"}] - EXP = 17 - RESOURCE = { - "datasetReference": {"projectId": self.PROJECT, "datasetId": self.DS_ID}, - "etag": "etag", - "description": DESCRIPTION, - "friendlyName": FRIENDLY_NAME, - "location": LOCATION, - "defaultTableExpirationMs": EXP, - "labels": LABELS, - "access": ACCESS, - } - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - conn = client._connection = make_connection(RESOURCE, RESOURCE) - ds = Dataset(DatasetReference(self.PROJECT, self.DS_ID)) - ds.description = DESCRIPTION - ds.friendly_name = FRIENDLY_NAME - ds.location = LOCATION - ds.default_table_expiration_ms = EXP - ds.labels = LABELS - ds.access_entries = [AccessEntry("OWNER", "userByEmail", "phred@example.com")] - ds2 = client.update_dataset( - ds, - ["description", "friendly_name", "location", "labels", "access_entries"], - timeout=7.5, - ) - conn.api_request.assert_called_once_with( - method="PATCH", - data={ - "description": DESCRIPTION, - "friendlyName": FRIENDLY_NAME, - "location": LOCATION, - "labels": LABELS, - "access": ACCESS, - }, - path="/" + PATH, - headers=None, - timeout=7.5, - ) - self.assertEqual(ds2.description, ds.description) - self.assertEqual(ds2.friendly_name, ds.friendly_name) - self.assertEqual(ds2.location, ds.location) - self.assertEqual(ds2.labels, ds.labels) - self.assertEqual(ds2.access_entries, ds.access_entries) - - # ETag becomes If-Match header. - ds._properties["etag"] = "etag" - client.update_dataset(ds, []) - req = conn.api_request.call_args - self.assertEqual(req[1]["headers"]["If-Match"], "etag") - - def test_update_dataset_w_custom_property(self): - # The library should handle sending properties to the API that are not - # yet part of the library - from google.cloud.bigquery.dataset import Dataset - - path = "/projects/%s/datasets/%s" % (self.PROJECT, self.DS_ID) - resource = { - "datasetReference": {"projectId": self.PROJECT, "datasetId": self.DS_ID}, - "newAlphaProperty": "unreleased property", - } - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - conn = client._connection = make_connection(resource) - dataset = Dataset(DatasetReference(self.PROJECT, self.DS_ID)) - dataset._properties["newAlphaProperty"] = "unreleased property" - - dataset = client.update_dataset(dataset, ["newAlphaProperty"]) - conn.api_request.assert_called_once_with( - method="PATCH", - data={"newAlphaProperty": "unreleased property"}, - path=path, - headers=None, - timeout=None, - ) - - self.assertEqual(dataset.dataset_id, self.DS_ID) - self.assertEqual(dataset.project, self.PROJECT) - self.assertEqual(dataset._properties["newAlphaProperty"], "unreleased property") - - def test_update_model(self): - from google.cloud.bigquery.model import Model - - path = "projects/%s/datasets/%s/models/%s" % ( - self.PROJECT, - self.DS_ID, - self.MODEL_ID, - ) - description = "description" - title = "title" - expires = datetime.datetime( - 2012, 12, 21, 16, 0, 0, tzinfo=google.cloud._helpers.UTC - ) - resource = { - "modelReference": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "modelId": self.MODEL_ID, - }, - "description": description, - "etag": "etag", - "expirationTime": str(google.cloud._helpers._millis(expires)), - "friendlyName": title, - "labels": {"x": "y"}, - } - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - conn = client._connection = make_connection(resource, resource) - model_id = "{}.{}.{}".format(self.PROJECT, self.DS_ID, self.MODEL_ID) - model = Model(model_id) - model.description = description - model.friendly_name = title - model.expires = expires - model.labels = {"x": "y"} - - updated_model = client.update_model( - model, ["description", "friendly_name", "labels", "expires"], timeout=7.5 - ) - - sent = { - "description": description, - "expirationTime": str(google.cloud._helpers._millis(expires)), - "friendlyName": title, - "labels": {"x": "y"}, - } - conn.api_request.assert_called_once_with( - method="PATCH", data=sent, path="/" + path, headers=None, timeout=7.5 - ) - self.assertEqual(updated_model.model_id, model.model_id) - self.assertEqual(updated_model.description, model.description) - self.assertEqual(updated_model.friendly_name, model.friendly_name) - self.assertEqual(updated_model.labels, model.labels) - self.assertEqual(updated_model.expires, model.expires) - - # ETag becomes If-Match header. - model._proto.etag = "etag" - client.update_model(model, []) - req = conn.api_request.call_args - self.assertEqual(req[1]["headers"]["If-Match"], "etag") - - def test_update_routine(self): - from google.cloud.bigquery.routine import Routine - from google.cloud.bigquery.routine import RoutineArgument - - full_routine_id = "routines-project.test_routines.updated_routine" - resource = { - "routineReference": { - "projectId": "routines-project", - "datasetId": "test_routines", - "routineId": "updated_routine", - }, - "routineType": "SCALAR_FUNCTION", - "language": "SQL", - "definitionBody": "x * 3", - "arguments": [{"name": "x", "dataType": {"typeKind": "INT64"}}], - "returnType": None, - "someNewField": "someValue", - } - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - conn = client._connection = make_connection(resource, resource) - routine = Routine(full_routine_id) - routine.arguments = [ - RoutineArgument( - name="x", - data_type=bigquery_v2.types.StandardSqlDataType( - type_kind=bigquery_v2.enums.StandardSqlDataType.TypeKind.INT64 - ), - ) - ] - routine.body = "x * 3" - routine.language = "SQL" - routine.type_ = "SCALAR_FUNCTION" - routine._properties["someNewField"] = "someValue" - - actual_routine = client.update_routine( - routine, - ["arguments", "language", "body", "type_", "return_type", "someNewField"], - timeout=7.5, - ) - - # TODO: routineReference isn't needed when the Routines API supports - # partial updates. - sent = resource - conn.api_request.assert_called_once_with( - method="PUT", - data=sent, - path="/projects/routines-project/datasets/test_routines/routines/updated_routine", - headers=None, - timeout=7.5, - ) - self.assertEqual(actual_routine.arguments, routine.arguments) - self.assertEqual(actual_routine.body, routine.body) - self.assertEqual(actual_routine.language, routine.language) - self.assertEqual(actual_routine.type_, routine.type_) - - # ETag becomes If-Match header. - routine._properties["etag"] = "im-an-etag" - client.update_routine(routine, []) - req = conn.api_request.call_args - self.assertEqual(req[1]["headers"]["If-Match"], "im-an-etag") - - def test_update_table(self): - from google.cloud.bigquery.schema import SchemaField - from google.cloud.bigquery.table import Table - - path = "projects/%s/datasets/%s/tables/%s" % ( - self.PROJECT, - self.DS_ID, - self.TABLE_ID, - ) - description = "description" - title = "title" - resource = self._make_table_resource() - resource.update( - { - "schema": { - "fields": [ - { - "name": "full_name", - "type": "STRING", - "mode": "REQUIRED", - "description": None, - }, - { - "name": "age", - "type": "INTEGER", - "mode": "REQUIRED", - "description": None, - }, - ] - }, - "etag": "etag", - "description": description, - "friendlyName": title, - "labels": {"x": "y"}, - } - ) - schema = [ - SchemaField("full_name", "STRING", mode="REQUIRED"), - SchemaField("age", "INTEGER", mode="REQUIRED"), - ] - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - conn = client._connection = make_connection(resource, resource) - table = Table(self.TABLE_REF, schema=schema) - table.description = description - table.friendly_name = title - table.labels = {"x": "y"} - - updated_table = client.update_table( - table, ["schema", "description", "friendly_name", "labels"], timeout=7.5 - ) - - sent = { - "schema": { - "fields": [ - { - "name": "full_name", - "type": "STRING", - "mode": "REQUIRED", - "description": None, - }, - { - "name": "age", - "type": "INTEGER", - "mode": "REQUIRED", - "description": None, - }, - ] - }, - "description": description, - "friendlyName": title, - "labels": {"x": "y"}, - } - conn.api_request.assert_called_once_with( - method="PATCH", data=sent, path="/" + path, headers=None, timeout=7.5 - ) - self.assertEqual(updated_table.description, table.description) - self.assertEqual(updated_table.friendly_name, table.friendly_name) - self.assertEqual(updated_table.schema, table.schema) - self.assertEqual(updated_table.labels, table.labels) - - # ETag becomes If-Match header. - table._properties["etag"] = "etag" - client.update_table(table, []) - req = conn.api_request.call_args - self.assertEqual(req[1]["headers"]["If-Match"], "etag") - - def test_update_table_w_custom_property(self): - from google.cloud.bigquery.table import Table - - path = "projects/%s/datasets/%s/tables/%s" % ( - self.PROJECT, - self.DS_ID, - self.TABLE_ID, - ) - resource = self._make_table_resource() - resource["newAlphaProperty"] = "unreleased property" - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - conn = client._connection = make_connection(resource) - table = Table(self.TABLE_REF) - table._properties["newAlphaProperty"] = "unreleased property" - - updated_table = client.update_table(table, ["newAlphaProperty"]) - - conn.api_request.assert_called_once_with( - method="PATCH", - path="/%s" % path, - data={"newAlphaProperty": "unreleased property"}, - headers=None, - timeout=None, - ) - self.assertEqual( - updated_table._properties["newAlphaProperty"], "unreleased property" - ) - - def test_update_table_only_use_legacy_sql(self): - from google.cloud.bigquery.table import Table - - path = "projects/%s/datasets/%s/tables/%s" % ( - self.PROJECT, - self.DS_ID, - self.TABLE_ID, - ) - resource = self._make_table_resource() - resource["view"] = {"useLegacySql": True} - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - conn = client._connection = make_connection(resource) - table = Table(self.TABLE_REF) - table.view_use_legacy_sql = True - - updated_table = client.update_table(table, ["view_use_legacy_sql"]) - - conn.api_request.assert_called_once_with( - method="PATCH", - path="/%s" % path, - data={"view": {"useLegacySql": True}}, - headers=None, - timeout=None, - ) - self.assertEqual(updated_table.view_use_legacy_sql, table.view_use_legacy_sql) - - def test_update_table_w_query(self): - import datetime - from google.cloud._helpers import UTC - from google.cloud._helpers import _millis - from google.cloud.bigquery.schema import SchemaField - from google.cloud.bigquery.table import Table - - path = "projects/%s/datasets/%s/tables/%s" % ( - self.PROJECT, - self.DS_ID, - self.TABLE_ID, - ) - query = "select fullname, age from person_ages" - location = "EU" - exp_time = datetime.datetime(2015, 8, 1, 23, 59, 59, tzinfo=UTC) - schema_resource = { - "fields": [ - { - "name": "full_name", - "type": "STRING", - "mode": "REQUIRED", - "description": None, - }, - { - "name": "age", - "type": "INTEGER", - "mode": "REQUIRED", - "description": None, - }, - ] - } - schema = [ - SchemaField("full_name", "STRING", mode="REQUIRED"), - SchemaField("age", "INTEGER", mode="REQUIRED"), - ] - resource = self._make_table_resource() - resource.update( - { - "schema": schema_resource, - "view": {"query": query, "useLegacySql": True}, - "location": location, - "expirationTime": _millis(exp_time), - } - ) - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - conn = client._connection = make_connection(resource) - table = Table(self.TABLE_REF, schema=schema) - table.expires = exp_time - table.view_query = query - table.view_use_legacy_sql = True - updated_properties = ["schema", "view_query", "expires", "view_use_legacy_sql"] - - updated_table = client.update_table(table, updated_properties) - - self.assertEqual(updated_table.schema, table.schema) - self.assertEqual(updated_table.view_query, table.view_query) - self.assertEqual(updated_table.expires, table.expires) - self.assertEqual(updated_table.view_use_legacy_sql, table.view_use_legacy_sql) - self.assertEqual(updated_table.location, location) - - conn.api_request.assert_called_once_with( - method="PATCH", - path="/%s" % path, - data={ - "view": {"query": query, "useLegacySql": True}, - "expirationTime": str(_millis(exp_time)), - "schema": schema_resource, - }, - headers=None, - timeout=None, - ) - - def test_update_table_w_schema_None(self): - # Simulate deleting schema: not sure if back-end will actually - # allow this operation, but the spec says it is optional. - path = "projects/%s/datasets/%s/tables/%s" % ( - self.PROJECT, - self.DS_ID, - self.TABLE_ID, - ) - resource1 = self._make_table_resource() - resource1.update( - { - "schema": { - "fields": [ - {"name": "full_name", "type": "STRING", "mode": "REQUIRED"}, - {"name": "age", "type": "INTEGER", "mode": "REQUIRED"}, - ] - } - } - ) - resource2 = self._make_table_resource() - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - conn = client._connection = make_connection(resource1, resource2) - table = client.get_table( - # Test with string for table ID - "{}.{}.{}".format( - self.TABLE_REF.project, - self.TABLE_REF.dataset_id, - self.TABLE_REF.table_id, - ) - ) - table.schema = None - - updated_table = client.update_table(table, ["schema"]) - - self.assertEqual(len(conn.api_request.call_args_list), 2) - req = conn.api_request.call_args_list[1] - self.assertEqual(req[1]["method"], "PATCH") - sent = {"schema": None} - self.assertEqual(req[1]["data"], sent) - self.assertEqual(req[1]["path"], "/%s" % path) - self.assertEqual(len(updated_table.schema), 0) - - def test_update_table_delete_property(self): - from google.cloud.bigquery.table import Table - - description = "description" - title = "title" - path = "projects/%s/datasets/%s/tables/%s" % ( - self.PROJECT, - self.DS_ID, - self.TABLE_ID, - ) - resource1 = self._make_table_resource() - resource1.update({"description": description, "friendlyName": title}) - resource2 = self._make_table_resource() - resource2["description"] = None - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - conn = client._connection = make_connection(resource1, resource2) - table = Table(self.TABLE_REF) - table.description = description - table.friendly_name = title - table2 = client.update_table(table, ["description", "friendly_name"]) - self.assertEqual(table2.description, table.description) - table2.description = None - - table3 = client.update_table(table2, ["description"]) - self.assertEqual(len(conn.api_request.call_args_list), 2) - req = conn.api_request.call_args_list[1] - self.assertEqual(req[1]["method"], "PATCH") - self.assertEqual(req[1]["path"], "/%s" % path) - sent = {"description": None} - self.assertEqual(req[1]["data"], sent) - self.assertIsNone(table3.description) - - def test_list_tables_empty_w_timeout(self): - path = "/projects/{}/datasets/{}/tables".format(self.PROJECT, self.DS_ID) - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - conn = client._connection = make_connection({}) - - dataset = DatasetReference(self.PROJECT, self.DS_ID) - iterator = client.list_tables(dataset, timeout=7.5) - self.assertIs(iterator.dataset, dataset) - page = six.next(iterator.pages) - tables = list(page) - token = iterator.next_page_token - - self.assertEqual(tables, []) - self.assertIsNone(token) - conn.api_request.assert_called_once_with( - method="GET", path=path, query_params={}, timeout=7.5 - ) - - def test_list_models_empty_w_timeout(self): - path = "/projects/{}/datasets/{}/models".format(self.PROJECT, self.DS_ID) - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - conn = client._connection = make_connection({}) - - dataset_id = "{}.{}".format(self.PROJECT, self.DS_ID) - iterator = client.list_models(dataset_id, timeout=7.5) - page = six.next(iterator.pages) - models = list(page) - token = iterator.next_page_token - - self.assertEqual(models, []) - self.assertIsNone(token) - conn.api_request.assert_called_once_with( - method="GET", path=path, query_params={}, timeout=7.5 - ) - - def test_list_models_defaults(self): - from google.cloud.bigquery.model import Model - - MODEL_1 = "model_one" - MODEL_2 = "model_two" - PATH = "projects/%s/datasets/%s/models" % (self.PROJECT, self.DS_ID) - TOKEN = "TOKEN" - DATA = { - "nextPageToken": TOKEN, - "models": [ - { - "modelReference": { - "modelId": MODEL_1, - "datasetId": self.DS_ID, - "projectId": self.PROJECT, - } - }, - { - "modelReference": { - "modelId": MODEL_2, - "datasetId": self.DS_ID, - "projectId": self.PROJECT, - } - }, - ], - } - - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - conn = client._connection = make_connection(DATA) - dataset = DatasetReference(self.PROJECT, self.DS_ID) - - iterator = client.list_models(dataset) - self.assertIs(iterator.dataset, dataset) - page = six.next(iterator.pages) - models = list(page) - token = iterator.next_page_token - - self.assertEqual(len(models), len(DATA["models"])) - for found, expected in zip(models, DATA["models"]): - self.assertIsInstance(found, Model) - self.assertEqual(found.model_id, expected["modelReference"]["modelId"]) - self.assertEqual(token, TOKEN) - - conn.api_request.assert_called_once_with( - method="GET", path="/%s" % PATH, query_params={}, timeout=None - ) - - def test_list_models_wrong_type(self): - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - with self.assertRaises(TypeError): - client.list_models(DatasetReference(self.PROJECT, self.DS_ID).model("foo")) - - def test_list_routines_empty_w_timeout(self): - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - conn = client._connection = make_connection({}) - - iterator = client.list_routines("test-routines.test_routines", timeout=7.5) - page = six.next(iterator.pages) - routines = list(page) - token = iterator.next_page_token - - self.assertEqual(routines, []) - self.assertIsNone(token) - conn.api_request.assert_called_once_with( - method="GET", - path="/projects/test-routines/datasets/test_routines/routines", - query_params={}, - timeout=7.5, - ) - - def test_list_routines_defaults(self): - from google.cloud.bigquery.routine import Routine - - project_id = "test-routines" - dataset_id = "test_routines" - path = "/projects/test-routines/datasets/test_routines/routines" - routine_1 = "routine_one" - routine_2 = "routine_two" - token = "TOKEN" - resource = { - "nextPageToken": token, - "routines": [ - { - "routineReference": { - "routineId": routine_1, - "datasetId": dataset_id, - "projectId": project_id, - } - }, - { - "routineReference": { - "routineId": routine_2, - "datasetId": dataset_id, - "projectId": project_id, - } - }, - ], - } - - creds = _make_credentials() - client = self._make_one(project=project_id, credentials=creds) - conn = client._connection = make_connection(resource) - dataset = DatasetReference(client.project, dataset_id) - - iterator = client.list_routines(dataset) - self.assertIs(iterator.dataset, dataset) - page = six.next(iterator.pages) - routines = list(page) - actual_token = iterator.next_page_token - - self.assertEqual(len(routines), len(resource["routines"])) - for found, expected in zip(routines, resource["routines"]): - self.assertIsInstance(found, Routine) - self.assertEqual( - found.routine_id, expected["routineReference"]["routineId"] - ) - self.assertEqual(actual_token, token) - - conn.api_request.assert_called_once_with( - method="GET", path=path, query_params={}, timeout=None - ) - - def test_list_routines_wrong_type(self): - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - with self.assertRaises(TypeError): - client.list_routines( - DatasetReference(self.PROJECT, self.DS_ID).table("foo") - ) - - def test_list_tables_defaults(self): - from google.cloud.bigquery.table import TableListItem - - TABLE_1 = "table_one" - TABLE_2 = "table_two" - PATH = "projects/%s/datasets/%s/tables" % (self.PROJECT, self.DS_ID) - TOKEN = "TOKEN" - DATA = { - "nextPageToken": TOKEN, - "tables": [ - { - "kind": "bigquery#table", - "id": "%s:%s.%s" % (self.PROJECT, self.DS_ID, TABLE_1), - "tableReference": { - "tableId": TABLE_1, - "datasetId": self.DS_ID, - "projectId": self.PROJECT, - }, - "type": "TABLE", - }, - { - "kind": "bigquery#table", - "id": "%s:%s.%s" % (self.PROJECT, self.DS_ID, TABLE_2), - "tableReference": { - "tableId": TABLE_2, - "datasetId": self.DS_ID, - "projectId": self.PROJECT, - }, - "type": "TABLE", - }, - ], - } - - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - conn = client._connection = make_connection(DATA) - dataset = DatasetReference(self.PROJECT, self.DS_ID) - - iterator = client.list_tables(dataset) - self.assertIs(iterator.dataset, dataset) - page = six.next(iterator.pages) - tables = list(page) - token = iterator.next_page_token - - self.assertEqual(len(tables), len(DATA["tables"])) - for found, expected in zip(tables, DATA["tables"]): - self.assertIsInstance(found, TableListItem) - self.assertEqual(found.full_table_id, expected["id"]) - self.assertEqual(found.table_type, expected["type"]) - self.assertEqual(token, TOKEN) - - conn.api_request.assert_called_once_with( - method="GET", path="/%s" % PATH, query_params={}, timeout=None - ) - - def test_list_tables_explicit(self): - from google.cloud.bigquery.table import TableListItem - - TABLE_1 = "table_one" - TABLE_2 = "table_two" - PATH = "projects/%s/datasets/%s/tables" % (self.PROJECT, self.DS_ID) - TOKEN = "TOKEN" - DATA = { - "tables": [ - { - "kind": "bigquery#dataset", - "id": "%s:%s.%s" % (self.PROJECT, self.DS_ID, TABLE_1), - "tableReference": { - "tableId": TABLE_1, - "datasetId": self.DS_ID, - "projectId": self.PROJECT, - }, - "type": "TABLE", - }, - { - "kind": "bigquery#dataset", - "id": "%s:%s.%s" % (self.PROJECT, self.DS_ID, TABLE_2), - "tableReference": { - "tableId": TABLE_2, - "datasetId": self.DS_ID, - "projectId": self.PROJECT, - }, - "type": "TABLE", - }, - ] - } - - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - conn = client._connection = make_connection(DATA) - dataset = DatasetReference(self.PROJECT, self.DS_ID) - - iterator = client.list_tables( - # Test with string for dataset ID. - self.DS_ID, - max_results=3, - page_token=TOKEN, - ) - self.assertEqual(iterator.dataset, dataset) - page = six.next(iterator.pages) - tables = list(page) - token = iterator.next_page_token - - self.assertEqual(len(tables), len(DATA["tables"])) - for found, expected in zip(tables, DATA["tables"]): - self.assertIsInstance(found, TableListItem) - self.assertEqual(found.full_table_id, expected["id"]) - self.assertEqual(found.table_type, expected["type"]) - self.assertIsNone(token) - - conn.api_request.assert_called_once_with( - method="GET", - path="/%s" % PATH, - query_params={"maxResults": 3, "pageToken": TOKEN}, - timeout=None, - ) - - def test_list_tables_wrong_type(self): - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - with self.assertRaises(TypeError): - client.list_tables(DatasetReference(self.PROJECT, self.DS_ID).table("foo")) - - def test_delete_dataset(self): - from google.cloud.bigquery.dataset import Dataset - from google.cloud.bigquery.dataset import DatasetReference - - ds_ref = DatasetReference(self.PROJECT, self.DS_ID) - datasets = (ds_ref, Dataset(ds_ref), "{}.{}".format(self.PROJECT, self.DS_ID)) - PATH = "projects/%s/datasets/%s" % (self.PROJECT, self.DS_ID) - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - conn = client._connection = make_connection(*([{}] * len(datasets))) - for arg in datasets: - client.delete_dataset(arg, timeout=7.5) - conn.api_request.assert_called_with( - method="DELETE", path="/%s" % PATH, query_params={}, timeout=7.5 - ) - - def test_delete_dataset_delete_contents(self): - from google.cloud.bigquery.dataset import Dataset - - PATH = "projects/%s/datasets/%s" % (self.PROJECT, self.DS_ID) - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - conn = client._connection = make_connection({}, {}) - ds_ref = DatasetReference(self.PROJECT, self.DS_ID) - for arg in (ds_ref, Dataset(ds_ref)): - client.delete_dataset(arg, delete_contents=True) - conn.api_request.assert_called_with( - method="DELETE", - path="/%s" % PATH, - query_params={"deleteContents": "true"}, - timeout=None, - ) - - def test_delete_dataset_wrong_type(self): - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - with self.assertRaises(TypeError): - client.delete_dataset( - DatasetReference(self.PROJECT, self.DS_ID).table("foo") - ) - - def test_delete_dataset_w_not_found_ok_false(self): - path = "/projects/{}/datasets/{}".format(self.PROJECT, self.DS_ID) - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection( - google.api_core.exceptions.NotFound("dataset not found") - ) - - with self.assertRaises(google.api_core.exceptions.NotFound): - client.delete_dataset(self.DS_ID) - - conn.api_request.assert_called_with( - method="DELETE", path=path, query_params={}, timeout=None - ) - - def test_delete_dataset_w_not_found_ok_true(self): - path = "/projects/{}/datasets/{}".format(self.PROJECT, self.DS_ID) - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection( - google.api_core.exceptions.NotFound("dataset not found") - ) - - client.delete_dataset(self.DS_ID, not_found_ok=True) - - conn.api_request.assert_called_with( - method="DELETE", path=path, query_params={}, timeout=None - ) - - def test_delete_model(self): - from google.cloud.bigquery.model import Model - - path = "projects/%s/datasets/%s/models/%s" % ( - self.PROJECT, - self.DS_ID, - self.MODEL_ID, - ) - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - model_id = "{}.{}.{}".format(self.PROJECT, self.DS_ID, self.MODEL_ID) - models = ( - model_id, - DatasetReference(self.PROJECT, self.DS_ID).model(self.MODEL_ID), - Model(model_id), - ) - conn = client._connection = make_connection(*([{}] * len(models))) - - for arg in models: - client.delete_model(arg, timeout=7.5) - conn.api_request.assert_called_with( - method="DELETE", path="/%s" % path, timeout=7.5 - ) - - def test_delete_model_w_wrong_type(self): - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - with self.assertRaises(TypeError): - client.delete_model(DatasetReference(self.PROJECT, self.DS_ID)) - - def test_delete_model_w_not_found_ok_false(self): - path = "/projects/{}/datasets/{}/models/{}".format( - self.PROJECT, self.DS_ID, self.MODEL_ID - ) - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection( - google.api_core.exceptions.NotFound("model not found") - ) - - with self.assertRaises(google.api_core.exceptions.NotFound): - client.delete_model("{}.{}".format(self.DS_ID, self.MODEL_ID)) - - conn.api_request.assert_called_with(method="DELETE", path=path, timeout=None) - - def test_delete_model_w_not_found_ok_true(self): - path = "/projects/{}/datasets/{}/models/{}".format( - self.PROJECT, self.DS_ID, self.MODEL_ID - ) - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection( - google.api_core.exceptions.NotFound("model not found") - ) - - client.delete_model( - "{}.{}".format(self.DS_ID, self.MODEL_ID), not_found_ok=True - ) - - conn.api_request.assert_called_with(method="DELETE", path=path, timeout=None) - - def test_delete_routine(self): - from google.cloud.bigquery.routine import Routine - from google.cloud.bigquery.routine import RoutineReference - - full_routine_id = "test-routine-project.test_routines.minimal_routine" - routines = [ - full_routine_id, - Routine(full_routine_id), - RoutineReference.from_string(full_routine_id), - ] - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection(*([{}] * len(routines))) - - for routine in routines: - client.delete_routine(routine, timeout=7.5) - conn.api_request.assert_called_with( - method="DELETE", - path="/projects/test-routine-project/datasets/test_routines/routines/minimal_routine", - timeout=7.5, - ) - - def test_delete_routine_w_wrong_type(self): - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - with self.assertRaises(TypeError): - client.delete_routine(DatasetReference(self.PROJECT, self.DS_ID)) - - def test_delete_routine_w_not_found_ok_false(self): - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection( - google.api_core.exceptions.NotFound("routine not found") - ) - - with self.assertRaises(google.api_core.exceptions.NotFound): - client.delete_routine("routines-project.test_routines.test_routine") - - conn.api_request.assert_called_with( - method="DELETE", - path="/projects/routines-project/datasets/test_routines/routines/test_routine", - timeout=None, - ) - - def test_delete_routine_w_not_found_ok_true(self): - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection( - google.api_core.exceptions.NotFound("routine not found") - ) - - client.delete_routine( - "routines-project.test_routines.test_routine", not_found_ok=True - ) - - conn.api_request.assert_called_with( - method="DELETE", - path="/projects/routines-project/datasets/test_routines/routines/test_routine", - timeout=None, - ) - - def test_delete_table(self): - from google.cloud.bigquery.table import Table - - tables = ( - self.TABLE_REF, - Table(self.TABLE_REF), - "{}.{}.{}".format( - self.TABLE_REF.project, - self.TABLE_REF.dataset_id, - self.TABLE_REF.table_id, - ), - ) - path = "projects/%s/datasets/%s/tables/%s" % ( - self.PROJECT, - self.DS_ID, - self.TABLE_ID, - ) - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection(*([{}] * len(tables))) - - for arg in tables: - client.delete_table(arg, timeout=7.5) - conn.api_request.assert_called_with( - method="DELETE", path="/%s" % path, timeout=7.5 - ) - - def test_delete_table_w_wrong_type(self): - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - with self.assertRaises(TypeError): - client.delete_table(DatasetReference(self.PROJECT, self.DS_ID)) - - def test_delete_table_w_not_found_ok_false(self): - path = "/projects/{}/datasets/{}/tables/{}".format( - self.PROJECT, self.DS_ID, self.TABLE_ID - ) - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection( - google.api_core.exceptions.NotFound("table not found") - ) - - with self.assertRaises(google.api_core.exceptions.NotFound): - client.delete_table("{}.{}".format(self.DS_ID, self.TABLE_ID)) - - conn.api_request.assert_called_with(method="DELETE", path=path, timeout=None) - - def test_delete_table_w_not_found_ok_true(self): - path = "/projects/{}/datasets/{}/tables/{}".format( - self.PROJECT, self.DS_ID, self.TABLE_ID - ) - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection( - google.api_core.exceptions.NotFound("table not found") - ) - - client.delete_table( - "{}.{}".format(self.DS_ID, self.TABLE_ID), not_found_ok=True - ) - - conn.api_request.assert_called_with(method="DELETE", path=path, timeout=None) - - def test_job_from_resource_unknown_type(self): - from google.cloud.bigquery.job import UnknownJob - - creds = _make_credentials() - client = self._make_one(self.PROJECT, creds) - got = client.job_from_resource({}) # Can parse redacted job. - self.assertIsInstance(got, UnknownJob) - self.assertEqual(got.project, self.PROJECT) - - def test_get_job_miss_w_explict_project(self): - from google.cloud.exceptions import NotFound - - OTHER_PROJECT = "OTHER_PROJECT" - JOB_ID = "NONESUCH" - creds = _make_credentials() - client = self._make_one(self.PROJECT, creds) - conn = client._connection = make_connection() - - with self.assertRaises(NotFound): - client.get_job(JOB_ID, project=OTHER_PROJECT, location=self.LOCATION) - - conn.api_request.assert_called_once_with( - method="GET", - path="/projects/OTHER_PROJECT/jobs/NONESUCH", - query_params={"projection": "full", "location": self.LOCATION}, - timeout=None, - ) - - def test_get_job_miss_w_client_location(self): - from google.cloud.exceptions import NotFound - - OTHER_PROJECT = "OTHER_PROJECT" - JOB_ID = "NONESUCH" - creds = _make_credentials() - client = self._make_one(self.PROJECT, creds, location=self.LOCATION) - conn = client._connection = make_connection() - - with self.assertRaises(NotFound): - client.get_job(JOB_ID, project=OTHER_PROJECT) - - conn.api_request.assert_called_once_with( - method="GET", - path="/projects/OTHER_PROJECT/jobs/NONESUCH", - query_params={"projection": "full", "location": self.LOCATION}, - timeout=None, - ) - - def test_get_job_hit_w_timeout(self): - from google.cloud.bigquery.job import CreateDisposition - from google.cloud.bigquery.job import QueryJob - from google.cloud.bigquery.job import WriteDisposition - - JOB_ID = "query_job" - QUERY_DESTINATION_TABLE = "query_destination_table" - QUERY = "SELECT * from test_dataset:test_table" - ASYNC_QUERY_DATA = { - "id": "{}:{}".format(self.PROJECT, JOB_ID), - "jobReference": {"projectId": self.PROJECT, "jobId": "query_job"}, - "state": "DONE", - "configuration": { - "query": { - "query": QUERY, - "destinationTable": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": QUERY_DESTINATION_TABLE, - }, - "createDisposition": CreateDisposition.CREATE_IF_NEEDED, - "writeDisposition": WriteDisposition.WRITE_TRUNCATE, - } - }, - } - creds = _make_credentials() - client = self._make_one(self.PROJECT, creds) - conn = client._connection = make_connection(ASYNC_QUERY_DATA) - - job = client.get_job(JOB_ID, timeout=7.5) - - self.assertIsInstance(job, QueryJob) - self.assertEqual(job.job_id, JOB_ID) - self.assertEqual(job.create_disposition, CreateDisposition.CREATE_IF_NEEDED) - self.assertEqual(job.write_disposition, WriteDisposition.WRITE_TRUNCATE) - - conn.api_request.assert_called_once_with( - method="GET", - path="/projects/PROJECT/jobs/query_job", - query_params={"projection": "full"}, - timeout=7.5, - ) - - def test_cancel_job_miss_w_explict_project(self): - from google.cloud.exceptions import NotFound - - OTHER_PROJECT = "OTHER_PROJECT" - JOB_ID = "NONESUCH" - creds = _make_credentials() - client = self._make_one(self.PROJECT, creds) - conn = client._connection = make_connection() - - with self.assertRaises(NotFound): - client.cancel_job(JOB_ID, project=OTHER_PROJECT, location=self.LOCATION) - - conn.api_request.assert_called_once_with( - method="POST", - path="/projects/OTHER_PROJECT/jobs/NONESUCH/cancel", - query_params={"projection": "full", "location": self.LOCATION}, - timeout=None, - ) - - def test_cancel_job_miss_w_client_location(self): - from google.cloud.exceptions import NotFound - - OTHER_PROJECT = "OTHER_PROJECT" - JOB_ID = "NONESUCH" - creds = _make_credentials() - client = self._make_one(self.PROJECT, creds, location=self.LOCATION) - conn = client._connection = make_connection() - - with self.assertRaises(NotFound): - client.cancel_job(JOB_ID, project=OTHER_PROJECT) - - conn.api_request.assert_called_once_with( - method="POST", - path="/projects/OTHER_PROJECT/jobs/NONESUCH/cancel", - query_params={"projection": "full", "location": self.LOCATION}, - timeout=None, - ) - - def test_cancel_job_hit(self): - from google.cloud.bigquery.job import QueryJob - - JOB_ID = "query_job" - QUERY = "SELECT * from test_dataset:test_table" - QUERY_JOB_RESOURCE = { - "id": "{}:{}".format(self.PROJECT, JOB_ID), - "jobReference": {"projectId": self.PROJECT, "jobId": "query_job"}, - "state": "RUNNING", - "configuration": {"query": {"query": QUERY}}, - } - RESOURCE = {"job": QUERY_JOB_RESOURCE} - creds = _make_credentials() - client = self._make_one(self.PROJECT, creds) - conn = client._connection = make_connection(RESOURCE) - - job = client.cancel_job(JOB_ID) - - self.assertIsInstance(job, QueryJob) - self.assertEqual(job.job_id, JOB_ID) - self.assertEqual(job.query, QUERY) - - conn.api_request.assert_called_once_with( - method="POST", - path="/projects/PROJECT/jobs/query_job/cancel", - query_params={"projection": "full"}, - timeout=None, - ) - - def test_cancel_job_w_timeout(self): - JOB_ID = "query_job" - QUERY = "SELECT * from test_dataset:test_table" - QUERY_JOB_RESOURCE = { - "id": "{}:{}".format(self.PROJECT, JOB_ID), - "jobReference": {"projectId": self.PROJECT, "jobId": "query_job"}, - "state": "RUNNING", - "configuration": {"query": {"query": QUERY}}, - } - RESOURCE = {"job": QUERY_JOB_RESOURCE} - - creds = _make_credentials() - client = self._make_one(self.PROJECT, creds) - conn = client._connection = make_connection(RESOURCE) - - client.cancel_job(JOB_ID, timeout=7.5) - - conn.api_request.assert_called_once_with( - method="POST", - path="/projects/{}/jobs/query_job/cancel".format(self.PROJECT), - query_params={"projection": "full"}, - timeout=7.5, - ) - - def test_list_jobs_defaults(self): - from google.cloud.bigquery.job import CopyJob - from google.cloud.bigquery.job import CreateDisposition - from google.cloud.bigquery.job import ExtractJob - from google.cloud.bigquery.job import LoadJob - from google.cloud.bigquery.job import QueryJob - from google.cloud.bigquery.job import WriteDisposition - - SOURCE_TABLE = "source_table" - DESTINATION_TABLE = "destination_table" - QUERY_DESTINATION_TABLE = "query_destination_table" - SOURCE_URI = "gs://test_bucket/src_object*" - DESTINATION_URI = "gs://test_bucket/dst_object*" - JOB_TYPES = { - "load_job": LoadJob, - "copy_job": CopyJob, - "extract_job": ExtractJob, - "query_job": QueryJob, - } - PATH = "projects/%s/jobs" % self.PROJECT - TOKEN = "TOKEN" - QUERY = "SELECT * from test_dataset:test_table" - ASYNC_QUERY_DATA = { - "id": "%s:%s" % (self.PROJECT, "query_job"), - "jobReference": {"projectId": self.PROJECT, "jobId": "query_job"}, - "state": "DONE", - "configuration": { - "query": { - "query": QUERY, - "destinationTable": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": QUERY_DESTINATION_TABLE, - }, - "createDisposition": CreateDisposition.CREATE_IF_NEEDED, - "writeDisposition": WriteDisposition.WRITE_TRUNCATE, - } - }, - } - EXTRACT_DATA = { - "id": "%s:%s" % (self.PROJECT, "extract_job"), - "jobReference": {"projectId": self.PROJECT, "jobId": "extract_job"}, - "state": "DONE", - "configuration": { - "extract": { - "sourceTable": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": SOURCE_TABLE, - }, - "destinationUris": [DESTINATION_URI], - } - }, - } - COPY_DATA = { - "id": "%s:%s" % (self.PROJECT, "copy_job"), - "jobReference": {"projectId": self.PROJECT, "jobId": "copy_job"}, - "state": "DONE", - "configuration": { - "copy": { - "sourceTables": [ - { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": SOURCE_TABLE, - } - ], - "destinationTable": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": DESTINATION_TABLE, - }, - } - }, - } - LOAD_DATA = { - "id": "%s:%s" % (self.PROJECT, "load_job"), - "jobReference": {"projectId": self.PROJECT, "jobId": "load_job"}, - "state": "DONE", - "configuration": { - "load": { - "destinationTable": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": SOURCE_TABLE, - }, - "sourceUris": [SOURCE_URI], - } - }, - } - DATA = { - "nextPageToken": TOKEN, - "jobs": [ASYNC_QUERY_DATA, EXTRACT_DATA, COPY_DATA, LOAD_DATA], - } - creds = _make_credentials() - client = self._make_one(self.PROJECT, creds) - conn = client._connection = make_connection(DATA) - - iterator = client.list_jobs() - page = six.next(iterator.pages) - jobs = list(page) - token = iterator.next_page_token - - self.assertEqual(len(jobs), len(DATA["jobs"])) - for found, expected in zip(jobs, DATA["jobs"]): - name = expected["jobReference"]["jobId"] - self.assertIsInstance(found, JOB_TYPES[name]) - self.assertEqual(found.job_id, name) - self.assertEqual(token, TOKEN) - - conn.api_request.assert_called_once_with( - method="GET", - path="/%s" % PATH, - query_params={"projection": "full"}, - timeout=None, - ) - - def test_list_jobs_load_job_wo_sourceUris(self): - from google.cloud.bigquery.job import LoadJob - - SOURCE_TABLE = "source_table" - JOB_TYPES = {"load_job": LoadJob} - PATH = "projects/%s/jobs" % self.PROJECT - TOKEN = "TOKEN" - LOAD_DATA = { - "id": "%s:%s" % (self.PROJECT, "load_job"), - "jobReference": {"projectId": self.PROJECT, "jobId": "load_job"}, - "state": "DONE", - "configuration": { - "load": { - "destinationTable": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": SOURCE_TABLE, - } - } - }, - } - DATA = {"nextPageToken": TOKEN, "jobs": [LOAD_DATA]} - creds = _make_credentials() - client = self._make_one(self.PROJECT, creds) - conn = client._connection = make_connection(DATA) - - iterator = client.list_jobs() - page = six.next(iterator.pages) - jobs = list(page) - token = iterator.next_page_token - - self.assertEqual(len(jobs), len(DATA["jobs"])) - for found, expected in zip(jobs, DATA["jobs"]): - name = expected["jobReference"]["jobId"] - self.assertIsInstance(found, JOB_TYPES[name]) - self.assertEqual(found.job_id, name) - self.assertEqual(token, TOKEN) - - conn.api_request.assert_called_once_with( - method="GET", - path="/%s" % PATH, - query_params={"projection": "full"}, - timeout=None, - ) - - def test_list_jobs_explicit_missing(self): - PATH = "projects/%s/jobs" % self.PROJECT - DATA = {} - TOKEN = "TOKEN" - creds = _make_credentials() - client = self._make_one(self.PROJECT, creds) - conn = client._connection = make_connection(DATA) - - iterator = client.list_jobs( - max_results=1000, page_token=TOKEN, all_users=True, state_filter="done" - ) - page = six.next(iterator.pages) - jobs = list(page) - token = iterator.next_page_token - - self.assertEqual(len(jobs), 0) - self.assertIsNone(token) - - conn.api_request.assert_called_once_with( - method="GET", - path="/%s" % PATH, - query_params={ - "projection": "full", - "maxResults": 1000, - "pageToken": TOKEN, - "allUsers": True, - "stateFilter": "done", - }, - timeout=None, - ) - - def test_list_jobs_w_project(self): - creds = _make_credentials() - client = self._make_one(self.PROJECT, creds) - conn = client._connection = make_connection({}) - - list(client.list_jobs(project="other-project")) - - conn.api_request.assert_called_once_with( - method="GET", - path="/projects/other-project/jobs", - query_params={"projection": "full"}, - timeout=None, - ) - - def test_list_jobs_w_timeout(self): - creds = _make_credentials() - client = self._make_one(self.PROJECT, creds) - conn = client._connection = make_connection({}) - - list(client.list_jobs(timeout=7.5)) - - conn.api_request.assert_called_once_with( - method="GET", - path="/projects/{}/jobs".format(self.PROJECT), - query_params={"projection": "full"}, - timeout=7.5, - ) - - def test_list_jobs_w_time_filter(self): - creds = _make_credentials() - client = self._make_one(self.PROJECT, creds) - conn = client._connection = make_connection({}) - - # One millisecond after the unix epoch. - start_time = datetime.datetime(1970, 1, 1, 0, 0, 0, 1000) - # One millisecond after the the 2038 31-bit signed int rollover - end_time = datetime.datetime(2038, 1, 19, 3, 14, 7, 1000) - end_time_millis = (((2 ** 31) - 1) * 1000) + 1 - - list(client.list_jobs(min_creation_time=start_time, max_creation_time=end_time)) - - conn.api_request.assert_called_once_with( - method="GET", - path="/projects/%s/jobs" % self.PROJECT, - query_params={ - "projection": "full", - "minCreationTime": "1", - "maxCreationTime": str(end_time_millis), - }, - timeout=None, - ) - - def test_list_jobs_w_parent_job_filter(self): - from google.cloud.bigquery import job - - creds = _make_credentials() - client = self._make_one(self.PROJECT, creds) - conn = client._connection = make_connection({}, {}) - - parent_job_args = ["parent-job-123", job._AsyncJob("parent-job-123", client)] - - for parent_job in parent_job_args: - list(client.list_jobs(parent_job=parent_job)) - conn.api_request.assert_called_once_with( - method="GET", - path="/projects/%s/jobs" % self.PROJECT, - query_params={"projection": "full", "parentJobId": "parent-job-123"}, - timeout=None, - ) - conn.api_request.reset_mock() - - def test_load_table_from_uri(self): - from google.cloud.bigquery.job import LoadJob, LoadJobConfig - - JOB = "job_name" - DESTINATION = "destination_table" - SOURCE_URI = "http://example.com/source.csv" - RESOURCE = { - "jobReference": {"projectId": self.PROJECT, "jobId": JOB}, - "configuration": { - "load": { - "sourceUris": [SOURCE_URI], - "destinationTable": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": DESTINATION, - }, - } - }, - } - creds = _make_credentials() - http = object() - job_config = LoadJobConfig() - original_config_copy = copy.deepcopy(job_config) - - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection(RESOURCE) - destination = DatasetReference(self.PROJECT, self.DS_ID).table(DESTINATION) - - job = client.load_table_from_uri( - SOURCE_URI, destination, job_id=JOB, job_config=job_config, timeout=7.5 - ) - - # Check that load_table_from_uri actually starts the job. - conn.api_request.assert_called_once_with( - method="POST", - path="/projects/%s/jobs" % self.PROJECT, - data=RESOURCE, - timeout=7.5, - ) - - # the original config object should not have been modified - self.assertEqual(job_config.to_api_repr(), original_config_copy.to_api_repr()) - - self.assertIsInstance(job, LoadJob) - self.assertIsInstance(job._configuration, LoadJobConfig) - self.assertIs(job._client, client) - self.assertEqual(job.job_id, JOB) - self.assertEqual(list(job.source_uris), [SOURCE_URI]) - self.assertIs(job.destination, destination) - - conn = client._connection = make_connection(RESOURCE) - - job = client.load_table_from_uri([SOURCE_URI], destination, job_id=JOB) - self.assertIsInstance(job, LoadJob) - self.assertIs(job._client, client) - self.assertEqual(job.job_id, JOB) - self.assertEqual(list(job.source_uris), [SOURCE_URI]) - self.assertIs(job.destination, destination) - - def test_load_table_from_uri_w_explicit_project(self): - job_id = "this-is-a-job-id" - destination_id = "destination_table" - source_uri = "gs://example/source.csv" - resource = { - "jobReference": { - "projectId": "other-project", - "location": self.LOCATION, - "jobId": job_id, - }, - "configuration": { - "load": { - "sourceUris": [source_uri], - "destinationTable": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": destination_id, - }, - } - }, - } - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection(resource) - destination = DatasetReference(self.PROJECT, self.DS_ID).table(destination_id) - - client.load_table_from_uri( - source_uri, - destination, - job_id=job_id, - project="other-project", - location=self.LOCATION, - ) - - # Check that load_table_from_uri actually starts the job. - conn.api_request.assert_called_once_with( - method="POST", - path="/projects/other-project/jobs", - data=resource, - timeout=None, - ) - - def test_load_table_from_uri_w_client_location(self): - job_id = "this-is-a-job-id" - destination_id = "destination_table" - source_uri = "gs://example/source.csv" - resource = { - "jobReference": { - "projectId": "other-project", - "location": self.LOCATION, - "jobId": job_id, - }, - "configuration": { - "load": { - "sourceUris": [source_uri], - "destinationTable": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": destination_id, - }, - } - }, - } - creds = _make_credentials() - http = object() - client = self._make_one( - project=self.PROJECT, credentials=creds, _http=http, location=self.LOCATION - ) - conn = client._connection = make_connection(resource) - - client.load_table_from_uri( - source_uri, - # Test with string for table ID. - "{}.{}".format(self.DS_ID, destination_id), - job_id=job_id, - project="other-project", - ) - - # Check that load_table_from_uri actually starts the job. - conn.api_request.assert_called_once_with( - method="POST", - path="/projects/other-project/jobs", - data=resource, - timeout=None, - ) - - def test_load_table_from_uri_w_invalid_job_config(self): - from google.cloud.bigquery import job - - JOB = "job_name" - DESTINATION = "destination_table" - SOURCE_URI = "http://example.com/source.csv" - - creds = _make_credentials() - http = object() - job_config = job.CopyJobConfig() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - destination = DatasetReference(self.PROJECT, self.DS_ID).table(DESTINATION) - - with self.assertRaises(TypeError) as exc: - client.load_table_from_uri( - SOURCE_URI, destination, job_id=JOB, job_config=job_config - ) - - self.assertIn("Expected an instance of LoadJobConfig", exc.exception.args[0]) - - @staticmethod - def _mock_requests_response(status_code, headers, content=b""): - return mock.Mock( - content=content, - headers=headers, - status_code=status_code, - spec=["content", "headers", "status_code"], - ) - - def _mock_transport(self, status_code, headers, content=b""): - fake_transport = mock.Mock(spec=["request"]) - fake_response = self._mock_requests_response( - status_code, headers, content=content - ) - fake_transport.request.return_value = fake_response - return fake_transport - - def _initiate_resumable_upload_helper(self, num_retries=None): - from google.resumable_media.requests import ResumableUpload - from google.cloud.bigquery.client import _DEFAULT_CHUNKSIZE - from google.cloud.bigquery.client import _GENERIC_CONTENT_TYPE - from google.cloud.bigquery.client import _get_upload_headers - from google.cloud.bigquery.job import LoadJob - from google.cloud.bigquery.job import LoadJobConfig - from google.cloud.bigquery.job import SourceFormat - - # Create mocks to be checked for doing transport. - resumable_url = "http://test.invalid?upload_id=hey-you" - response_headers = {"location": resumable_url} - fake_transport = self._mock_transport(http_client.OK, response_headers) - client = self._make_one(project=self.PROJECT, _http=fake_transport) - conn = client._connection = make_connection() - - # Create some mock arguments and call the method under test. - data = b"goodbye gudbi gootbee" - stream = io.BytesIO(data) - config = LoadJobConfig() - config.source_format = SourceFormat.CSV - job = LoadJob(None, None, self.TABLE_REF, client, job_config=config) - metadata = job.to_api_repr() - upload, transport = client._initiate_resumable_upload( - stream, metadata, num_retries - ) - - # Check the returned values. - self.assertIsInstance(upload, ResumableUpload) - upload_url = ( - "https://bigquery.googleapis.com/upload/bigquery/v2/projects/" - + self.PROJECT - + "/jobs?uploadType=resumable" - ) - self.assertEqual(upload.upload_url, upload_url) - expected_headers = _get_upload_headers(conn.user_agent) - self.assertEqual(upload._headers, expected_headers) - self.assertFalse(upload.finished) - self.assertEqual(upload._chunk_size, _DEFAULT_CHUNKSIZE) - self.assertIs(upload._stream, stream) - self.assertIsNone(upload._total_bytes) - self.assertEqual(upload._content_type, _GENERIC_CONTENT_TYPE) - self.assertEqual(upload.resumable_url, resumable_url) - - retry_strategy = upload._retry_strategy - self.assertEqual(retry_strategy.max_sleep, 64.0) - if num_retries is None: - self.assertEqual(retry_strategy.max_cumulative_retry, 600.0) - self.assertIsNone(retry_strategy.max_retries) - else: - self.assertIsNone(retry_strategy.max_cumulative_retry) - self.assertEqual(retry_strategy.max_retries, num_retries) - self.assertIs(transport, fake_transport) - # Make sure we never read from the stream. - self.assertEqual(stream.tell(), 0) - - # Check the mocks. - request_headers = expected_headers.copy() - request_headers["x-upload-content-type"] = _GENERIC_CONTENT_TYPE - fake_transport.request.assert_called_once_with( - "POST", - upload_url, - data=json.dumps(metadata).encode("utf-8"), - headers=request_headers, - timeout=mock.ANY, - ) - - def test__initiate_resumable_upload(self): - self._initiate_resumable_upload_helper() - - def test__initiate_resumable_upload_with_retry(self): - self._initiate_resumable_upload_helper(num_retries=11) - - def _do_multipart_upload_success_helper(self, get_boundary, num_retries=None): - from google.cloud.bigquery.client import _get_upload_headers - from google.cloud.bigquery.job import LoadJob - from google.cloud.bigquery.job import LoadJobConfig - from google.cloud.bigquery.job import SourceFormat - - fake_transport = self._mock_transport(http_client.OK, {}) - client = self._make_one(project=self.PROJECT, _http=fake_transport) - conn = client._connection = make_connection() - - # Create some mock arguments. - data = b"Bzzzz-zap \x00\x01\xf4" - stream = io.BytesIO(data) - config = LoadJobConfig() - config.source_format = SourceFormat.CSV - job = LoadJob(None, None, self.TABLE_REF, client, job_config=config) - metadata = job.to_api_repr() - size = len(data) - response = client._do_multipart_upload(stream, metadata, size, num_retries) - - # Check the mocks and the returned value. - self.assertIs(response, fake_transport.request.return_value) - self.assertEqual(stream.tell(), size) - get_boundary.assert_called_once_with() - - upload_url = ( - "https://bigquery.googleapis.com/upload/bigquery/v2/projects/" - + self.PROJECT - + "/jobs?uploadType=multipart" - ) - payload = ( - b"--==0==\r\n" - + b"content-type: application/json; charset=UTF-8\r\n\r\n" - + json.dumps(metadata).encode("utf-8") - + b"\r\n" - + b"--==0==\r\n" - + b"content-type: */*\r\n\r\n" - + data - + b"\r\n" - + b"--==0==--" - ) - headers = _get_upload_headers(conn.user_agent) - headers["content-type"] = b'multipart/related; boundary="==0=="' - fake_transport.request.assert_called_once_with( - "POST", upload_url, data=payload, headers=headers, timeout=mock.ANY - ) - - @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") - def test__do_multipart_upload(self, get_boundary): - self._do_multipart_upload_success_helper(get_boundary) - - @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") - def test__do_multipart_upload_with_retry(self, get_boundary): - self._do_multipart_upload_success_helper(get_boundary, num_retries=8) - - def test_copy_table(self): - from google.cloud.bigquery.job import CopyJob - - JOB = "job_name" - SOURCE = "source_table" - DESTINATION = "destination_table" - RESOURCE = { - "jobReference": {"projectId": self.PROJECT, "jobId": JOB}, - "configuration": { - "copy": { - "sourceTables": [ - { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": SOURCE, - } - ], - "destinationTable": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": DESTINATION, - }, - } - }, - } - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection(RESOURCE) - dataset = DatasetReference(self.PROJECT, self.DS_ID) - source = dataset.table(SOURCE) - destination = dataset.table(DESTINATION) - - job = client.copy_table(source, destination, job_id=JOB, timeout=7.5) - - # Check that copy_table actually starts the job. - conn.api_request.assert_called_once_with( - method="POST", - path="/projects/%s/jobs" % self.PROJECT, - data=RESOURCE, - timeout=7.5, - ) - - self.assertIsInstance(job, CopyJob) - self.assertIs(job._client, client) - self.assertEqual(job.job_id, JOB) - self.assertEqual(list(job.sources), [source]) - self.assertIs(job.destination, destination) - - conn = client._connection = make_connection(RESOURCE) - source2 = dataset.table(SOURCE + "2") - job = client.copy_table([source, source2], destination, job_id=JOB) - self.assertIsInstance(job, CopyJob) - self.assertIs(job._client, client) - self.assertEqual(job.job_id, JOB) - self.assertEqual(list(job.sources), [source, source2]) - self.assertIs(job.destination, destination) - - def test_copy_table_w_explicit_project(self): - job_id = "this-is-a-job-id" - source_id = "source_table" - destination_id = "destination_table" - resource = { - "jobReference": { - "projectId": "other-project", - "location": self.LOCATION, - "jobId": job_id, - }, - "configuration": { - "copy": { - "sourceTables": [ - { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": source_id, - } - ], - "destinationTable": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": destination_id, - }, - } - }, - } - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection(resource) - dataset = DatasetReference(self.PROJECT, self.DS_ID) - source = dataset.table(source_id) - destination = dataset.table(destination_id) - - client.copy_table( - source, - destination, - job_id=job_id, - project="other-project", - location=self.LOCATION, - ) - - # Check that copy_table actually starts the job. - conn.api_request.assert_called_once_with( - method="POST", - path="/projects/other-project/jobs", - data=resource, - timeout=None, - ) - - def test_copy_table_w_client_location(self): - job_id = "this-is-a-job-id" - source_id = "source_table" - destination_id = "destination_table" - resource = { - "jobReference": { - "projectId": "other-project", - "location": self.LOCATION, - "jobId": job_id, - }, - "configuration": { - "copy": { - "sourceTables": [ - { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": source_id, - } - ], - "destinationTable": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": destination_id, - }, - } - }, - } - creds = _make_credentials() - http = object() - client = self._make_one( - project=self.PROJECT, credentials=creds, _http=http, location=self.LOCATION - ) - conn = client._connection = make_connection(resource) - - client.copy_table( - # Test with string for table IDs. - "{}.{}".format(self.DS_ID, source_id), - "{}.{}".format(self.DS_ID, destination_id), - job_id=job_id, - project="other-project", - ) - - # Check that copy_table actually starts the job. - conn.api_request.assert_called_once_with( - method="POST", - path="/projects/other-project/jobs", - data=resource, - timeout=None, - ) - - def test_copy_table_w_source_strings(self): - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - client._connection = make_connection({}) - sources = [ - "dataset_wo_proj.some_table", - "other_project.other_dataset.other_table", - DatasetReference(client.project, "dataset_from_ref").table( - "table_from_ref" - ), - ] - destination = "some_project.some_dataset.destination_table" - - job = client.copy_table(sources, destination) - - expected_sources = [ - DatasetReference(client.project, "dataset_wo_proj").table("some_table"), - DatasetReference("other_project", "other_dataset").table("other_table"), - DatasetReference(client.project, "dataset_from_ref").table( - "table_from_ref" - ), - ] - self.assertEqual(list(job.sources), expected_sources) - expected_destination = DatasetReference("some_project", "some_dataset").table( - "destination_table" - ) - self.assertEqual(job.destination, expected_destination) - - def test_copy_table_w_invalid_job_config(self): - from google.cloud.bigquery import job - - JOB = "job_name" - SOURCE = "source_table" - DESTINATION = "destination_table" - - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - job_config = job.ExtractJobConfig() - dataset = DatasetReference(self.PROJECT, self.DS_ID) - source = dataset.table(SOURCE) - destination = dataset.table(DESTINATION) - with self.assertRaises(TypeError) as exc: - client.copy_table(source, destination, job_id=JOB, job_config=job_config) - - self.assertIn("Expected an instance of CopyJobConfig", exc.exception.args[0]) - - def test_copy_table_w_valid_job_config(self): - from google.cloud.bigquery.job import CopyJobConfig - - JOB = "job_name" - SOURCE = "source_table" - DESTINATION = "destination_table" - RESOURCE = { - "jobReference": {"projectId": self.PROJECT, "jobId": JOB}, - "configuration": { - "copy": { - "sourceTables": [ - { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": SOURCE, - } - ], - "destinationTable": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": DESTINATION, - }, - } - }, - } - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection(RESOURCE) - dataset = DatasetReference(self.PROJECT, self.DS_ID) - source = dataset.table(SOURCE) - destination = dataset.table(DESTINATION) - - job_config = CopyJobConfig() - original_config_copy = copy.deepcopy(job_config) - job = client.copy_table(source, destination, job_id=JOB, job_config=job_config) - - # Check that copy_table actually starts the job. - conn.api_request.assert_called_once_with( - method="POST", - path="/projects/%s/jobs" % self.PROJECT, - data=RESOURCE, - timeout=None, - ) - self.assertIsInstance(job._configuration, CopyJobConfig) - - # the original config object should not have been modified - assert job_config.to_api_repr() == original_config_copy.to_api_repr() - - def test_extract_table(self): - from google.cloud.bigquery.job import ExtractJob - - JOB = "job_id" - SOURCE = "source_table" - DESTINATION = "gs://bucket_name/object_name" - RESOURCE = { - "jobReference": {"projectId": self.PROJECT, "jobId": JOB}, - "configuration": { - "extract": { - "sourceTable": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": SOURCE, - }, - "destinationUris": [DESTINATION], - } - }, - } - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection(RESOURCE) - dataset = DatasetReference(self.PROJECT, self.DS_ID) - source = dataset.table(SOURCE) - - job = client.extract_table(source, DESTINATION, job_id=JOB, timeout=7.5) - - # Check that extract_table actually starts the job. - conn.api_request.assert_called_once_with( - method="POST", path="/projects/PROJECT/jobs", data=RESOURCE, timeout=7.5, - ) - - # Check the job resource. - self.assertIsInstance(job, ExtractJob) - self.assertIs(job._client, client) - self.assertEqual(job.job_id, JOB) - self.assertEqual(job.source, source) - self.assertEqual(list(job.destination_uris), [DESTINATION]) - - def test_extract_table_w_invalid_job_config(self): - from google.cloud.bigquery import job - - JOB = "job_id" - SOURCE = "source_table" - DESTINATION = "gs://bucket_name/object_name" - - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - dataset = DatasetReference(self.PROJECT, self.DS_ID) - source = dataset.table(SOURCE) - job_config = job.LoadJobConfig() - with self.assertRaises(TypeError) as exc: - client.extract_table(source, DESTINATION, job_id=JOB, job_config=job_config) - - self.assertIn("Expected an instance of ExtractJobConfig", exc.exception.args[0]) - - def test_extract_table_w_explicit_project(self): - job_id = "job_id" - source_id = "source_table" - destination = "gs://bucket_name/object_name" - resource = { - "jobReference": { - "projectId": "other-project", - "location": self.LOCATION, - "jobId": job_id, - }, - "configuration": { - "extract": { - "sourceTable": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": source_id, - }, - "destinationUris": [destination], - } - }, - } - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection(resource) - dataset = DatasetReference(self.PROJECT, self.DS_ID) - source = dataset.table(source_id) - - client.extract_table( - source, - destination, - job_id=job_id, - project="other-project", - location=self.LOCATION, - ) - - # Check that extract_table actually starts the job. - conn.api_request.assert_called_once_with( - method="POST", - path="/projects/other-project/jobs", - data=resource, - timeout=None, - ) - - def test_extract_table_w_client_location(self): - job_id = "job_id" - source_id = "source_table" - destination = "gs://bucket_name/object_name" - resource = { - "jobReference": { - "projectId": "other-project", - "location": self.LOCATION, - "jobId": job_id, - }, - "configuration": { - "extract": { - "sourceTable": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": source_id, - }, - "destinationUris": [destination], - } - }, - } - creds = _make_credentials() - http = object() - client = self._make_one( - project=self.PROJECT, credentials=creds, _http=http, location=self.LOCATION - ) - conn = client._connection = make_connection(resource) - - client.extract_table( - # Test with string for table ID. - "{}.{}".format(self.DS_ID, source_id), - destination, - job_id=job_id, - project="other-project", - ) - - # Check that extract_table actually starts the job. - conn.api_request.assert_called_once_with( - method="POST", - path="/projects/other-project/jobs", - data=resource, - timeout=None, - ) - - def test_extract_table_generated_job_id(self): - from google.cloud.bigquery.job import ExtractJob - from google.cloud.bigquery.job import ExtractJobConfig - from google.cloud.bigquery.job import DestinationFormat - - JOB = "job_id" - SOURCE = "source_table" - DESTINATION = "gs://bucket_name/object_name" - RESOURCE = { - "jobReference": {"projectId": self.PROJECT, "jobId": JOB}, - "configuration": { - "extract": { - "sourceTable": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": SOURCE, - }, - "destinationUris": [DESTINATION], - "destinationFormat": "NEWLINE_DELIMITED_JSON", - } - }, - } - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection(RESOURCE) - dataset = DatasetReference(self.PROJECT, self.DS_ID) - source = dataset.table(SOURCE) - job_config = ExtractJobConfig() - job_config.destination_format = DestinationFormat.NEWLINE_DELIMITED_JSON - original_config_copy = copy.deepcopy(job_config) - - job = client.extract_table(source, DESTINATION, job_config=job_config) - - # Check that extract_table actually starts the job. - conn.api_request.assert_called_once() - _, req = conn.api_request.call_args - self.assertEqual(req["method"], "POST") - self.assertEqual(req["path"], "/projects/PROJECT/jobs") - self.assertIsInstance(req["data"]["jobReference"]["jobId"], six.string_types) - self.assertIsNone(req["timeout"]) - - # Check the job resource. - self.assertIsInstance(job, ExtractJob) - self.assertIs(job._client, client) - self.assertEqual(job.source, source) - self.assertEqual(list(job.destination_uris), [DESTINATION]) - - # the original config object should not have been modified - assert job_config.to_api_repr() == original_config_copy.to_api_repr() - - def test_extract_table_w_destination_uris(self): - from google.cloud.bigquery.job import ExtractJob - - JOB = "job_id" - SOURCE = "source_table" - DESTINATION1 = "gs://bucket_name/object_one" - DESTINATION2 = "gs://bucket_name/object_two" - RESOURCE = { - "jobReference": {"projectId": self.PROJECT, "jobId": JOB}, - "configuration": { - "extract": { - "sourceTable": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": SOURCE, - }, - "destinationUris": [DESTINATION1, DESTINATION2], - } - }, - } - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection(RESOURCE) - dataset = DatasetReference(self.PROJECT, self.DS_ID) - source = dataset.table(SOURCE) - - job = client.extract_table(source, [DESTINATION1, DESTINATION2], job_id=JOB) - - # Check that extract_table actually starts the job. - conn.api_request.assert_called_once() - _, req = conn.api_request.call_args - self.assertEqual(req["method"], "POST") - self.assertEqual(req["path"], "/projects/PROJECT/jobs") - self.assertIsNone(req["timeout"]) - - # Check the job resource. - self.assertIsInstance(job, ExtractJob) - self.assertIs(job._client, client) - self.assertEqual(job.job_id, JOB) - self.assertEqual(job.source, source) - self.assertEqual(list(job.destination_uris), [DESTINATION1, DESTINATION2]) - - def test_query_defaults(self): - from google.cloud.bigquery.job import QueryJob - - QUERY = "select count(*) from persons" - RESOURCE = { - "jobReference": {"projectId": self.PROJECT, "jobId": "some-random-id"}, - "configuration": {"query": {"query": QUERY, "useLegacySql": False}}, - } - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection(RESOURCE) - - job = client.query(QUERY) - - self.assertIsInstance(job, QueryJob) - self.assertIsInstance(job.job_id, six.string_types) - self.assertIs(job._client, client) - self.assertEqual(job.query, QUERY) - self.assertEqual(job.udf_resources, []) - self.assertEqual(job.query_parameters, []) - - # Check that query actually starts the job. - conn.api_request.assert_called_once() - _, req = conn.api_request.call_args - self.assertEqual(req["method"], "POST") - self.assertEqual(req["path"], "/projects/PROJECT/jobs") - self.assertIsNone(req["timeout"]) - sent = req["data"] - self.assertIsInstance(sent["jobReference"]["jobId"], six.string_types) - sent_config = sent["configuration"]["query"] - self.assertEqual(sent_config["query"], QUERY) - self.assertFalse(sent_config["useLegacySql"]) - - def test_query_w_explicit_timeout(self): - query = "select count(*) from persons" - resource = { - "jobReference": {"projectId": self.PROJECT, "jobId": mock.ANY}, - "configuration": {"query": {"query": query, "useLegacySql": False}}, - } - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection(resource) - - client.query(query, timeout=7.5) - - # Check that query actually starts the job. - conn.api_request.assert_called_once_with( - method="POST", - path="/projects/{}/jobs".format(self.PROJECT), - data=resource, - timeout=7.5, - ) - - def test_query_w_explicit_project(self): - job_id = "some-job-id" - query = "select count(*) from persons" - resource = { - "jobReference": { - "projectId": "other-project", - "location": self.LOCATION, - "jobId": job_id, - }, - "configuration": {"query": {"query": query, "useLegacySql": False}}, - } - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection(resource) - - client.query( - query, job_id=job_id, project="other-project", location=self.LOCATION - ) - - # Check that query actually starts the job. - conn.api_request.assert_called_once_with( - method="POST", - path="/projects/other-project/jobs", - data=resource, - timeout=None, - ) - - def test_query_w_explicit_job_config(self): - job_id = "some-job-id" - query = "select count(*) from persons" - resource = { - "jobReference": { - "jobId": job_id, - "projectId": self.PROJECT, - "location": self.LOCATION, - }, - "configuration": { - "query": { - "query": query, - "defaultDataset": { - "projectId": self.PROJECT, - "datasetId": "some-dataset", - }, - "useLegacySql": False, - "useQueryCache": True, - "maximumBytesBilled": "2000", - } - }, - } - - creds = _make_credentials() - http = object() - - from google.cloud.bigquery import QueryJobConfig, DatasetReference - - default_job_config = QueryJobConfig() - default_job_config.default_dataset = DatasetReference( - self.PROJECT, "some-dataset" - ) - default_job_config.maximum_bytes_billed = 1000 - - client = self._make_one( - project=self.PROJECT, - credentials=creds, - _http=http, - default_query_job_config=default_job_config, - ) - conn = client._connection = make_connection(resource) - - job_config = QueryJobConfig() - job_config.use_query_cache = True - job_config.maximum_bytes_billed = 2000 - original_config_copy = copy.deepcopy(job_config) - - client.query( - query, job_id=job_id, location=self.LOCATION, job_config=job_config - ) - - # Check that query actually starts the job. - conn.api_request.assert_called_once_with( - method="POST", path="/projects/PROJECT/jobs", data=resource, timeout=None - ) - - # the original config object should not have been modified - assert job_config.to_api_repr() == original_config_copy.to_api_repr() - - def test_query_preserving_explicit_job_config(self): - job_id = "some-job-id" - query = "select count(*) from persons" - resource = { - "jobReference": { - "jobId": job_id, - "projectId": self.PROJECT, - "location": self.LOCATION, - }, - "configuration": { - "query": { - "query": query, - "useLegacySql": False, - "useQueryCache": True, - "maximumBytesBilled": "2000", - } - }, - } - - creds = _make_credentials() - http = object() - - from google.cloud.bigquery import QueryJobConfig - - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http,) - conn = client._connection = make_connection(resource) - - job_config = QueryJobConfig() - job_config.use_query_cache = True - job_config.maximum_bytes_billed = 2000 - original_config_copy = copy.deepcopy(job_config) - - client.query( - query, job_id=job_id, location=self.LOCATION, job_config=job_config - ) - - # Check that query actually starts the job. - conn.api_request.assert_called_once_with( - method="POST", path="/projects/PROJECT/jobs", data=resource, timeout=None - ) - - # the original config object should not have been modified - assert job_config.to_api_repr() == original_config_copy.to_api_repr() - - def test_query_preserving_explicit_default_job_config(self): - job_id = "some-job-id" - query = "select count(*) from persons" - resource = { - "jobReference": { - "jobId": job_id, - "projectId": self.PROJECT, - "location": self.LOCATION, - }, - "configuration": { - "query": { - "query": query, - "defaultDataset": { - "projectId": self.PROJECT, - "datasetId": "some-dataset", - }, - "useLegacySql": False, - "maximumBytesBilled": "1000", - } - }, - } - - creds = _make_credentials() - http = object() - - from google.cloud.bigquery import QueryJobConfig, DatasetReference - - default_job_config = QueryJobConfig() - default_job_config.default_dataset = DatasetReference( - self.PROJECT, "some-dataset" - ) - default_job_config.maximum_bytes_billed = 1000 - default_config_copy = copy.deepcopy(default_job_config) - - client = self._make_one( - project=self.PROJECT, - credentials=creds, - _http=http, - default_query_job_config=default_job_config, - ) - conn = client._connection = make_connection(resource) - - client.query(query, job_id=job_id, location=self.LOCATION, job_config=None) - - # Check that query actually starts the job. - conn.api_request.assert_called_once_with( - method="POST", path="/projects/PROJECT/jobs", data=resource, timeout=None - ) - - # the original default config object should not have been modified - assert default_job_config.to_api_repr() == default_config_copy.to_api_repr() - - def test_query_w_invalid_job_config(self): - from google.cloud.bigquery import QueryJobConfig, DatasetReference - from google.cloud.bigquery import job - - job_id = "some-job-id" - query = "select count(*) from persons" - creds = _make_credentials() - http = object() - default_job_config = QueryJobConfig() - default_job_config.default_dataset = DatasetReference( - self.PROJECT, "some-dataset" - ) - default_job_config.maximum_bytes_billed = 1000 - - client = self._make_one( - project=self.PROJECT, - credentials=creds, - _http=http, - default_query_job_config=default_job_config, - ) - - job_config = job.LoadJobConfig() - - with self.assertRaises(TypeError) as exc: - client.query( - query, job_id=job_id, location=self.LOCATION, job_config=job_config - ) - self.assertIn("Expected an instance of QueryJobConfig", exc.exception.args[0]) - - def test_query_w_explicit_job_config_override(self): - job_id = "some-job-id" - query = "select count(*) from persons" - resource = { - "jobReference": { - "jobId": job_id, - "projectId": self.PROJECT, - "location": self.LOCATION, - }, - "configuration": { - "query": { - "query": query, - "defaultDataset": None, - "useLegacySql": False, - "useQueryCache": True, - "maximumBytesBilled": "2000", - } - }, - } - - creds = _make_credentials() - http = object() - - from google.cloud.bigquery import QueryJobConfig, DatasetReference - - default_job_config = QueryJobConfig() - default_job_config.default_dataset = DatasetReference( - self.PROJECT, "some-dataset" - ) - default_job_config.maximum_bytes_billed = 1000 - - client = self._make_one( - project=self.PROJECT, - credentials=creds, - _http=http, - default_query_job_config=default_job_config, - ) - conn = client._connection = make_connection(resource) - - job_config = QueryJobConfig() - job_config.use_query_cache = True - job_config.maximum_bytes_billed = 2000 - job_config.default_dataset = None - - client.query( - query, job_id=job_id, location=self.LOCATION, job_config=job_config - ) - - # Check that query actually starts the job. - conn.api_request.assert_called_once_with( - method="POST", path="/projects/PROJECT/jobs", data=resource, timeout=None - ) - - def test_query_w_client_default_config_no_incoming(self): - job_id = "some-job-id" - query = "select count(*) from persons" - resource = { - "jobReference": { - "jobId": job_id, - "projectId": self.PROJECT, - "location": self.LOCATION, - }, - "configuration": { - "query": { - "query": query, - "useLegacySql": False, - "maximumBytesBilled": "1000", - } - }, - } - - creds = _make_credentials() - http = object() - - from google.cloud.bigquery import QueryJobConfig - - default_job_config = QueryJobConfig() - default_job_config.maximum_bytes_billed = 1000 - - client = self._make_one( - project=self.PROJECT, - credentials=creds, - _http=http, - default_query_job_config=default_job_config, - ) - conn = client._connection = make_connection(resource) - - client.query(query, job_id=job_id, location=self.LOCATION) - - # Check that query actually starts the job. - conn.api_request.assert_called_once_with( - method="POST", path="/projects/PROJECT/jobs", data=resource, timeout=None - ) - - def test_query_w_invalid_default_job_config(self): - job_id = "some-job-id" - query = "select count(*) from persons" - creds = _make_credentials() - http = object() - default_job_config = object() - client = self._make_one( - project=self.PROJECT, - credentials=creds, - _http=http, - default_query_job_config=default_job_config, - ) - - with self.assertRaises(TypeError) as exc: - client.query(query, job_id=job_id, location=self.LOCATION) - self.assertIn("Expected an instance of QueryJobConfig", exc.exception.args[0]) - - def test_query_w_client_location(self): - job_id = "some-job-id" - query = "select count(*) from persons" - resource = { - "jobReference": { - "projectId": "other-project", - "location": self.LOCATION, - "jobId": job_id, - }, - "configuration": {"query": {"query": query, "useLegacySql": False}}, - } - creds = _make_credentials() - http = object() - client = self._make_one( - project=self.PROJECT, credentials=creds, _http=http, location=self.LOCATION - ) - conn = client._connection = make_connection(resource) - - client.query(query, job_id=job_id, project="other-project") - - # Check that query actually starts the job. - conn.api_request.assert_called_once_with( - method="POST", - path="/projects/other-project/jobs", - data=resource, - timeout=None, - ) - - def test_query_detect_location(self): - query = "select count(*) from persons" - resource_location = "EU" - resource = { - "jobReference": { - "projectId": self.PROJECT, - # Location not set in request, but present in the response. - "location": resource_location, - "jobId": "some-random-id", - }, - "configuration": {"query": {"query": query, "useLegacySql": False}}, - } - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection(resource) - - job = client.query(query) - - self.assertEqual(job.location, resource_location) - - # Check that request did not contain a location. - conn.api_request.assert_called_once() - _, req = conn.api_request.call_args - sent = req["data"] - self.assertIsNone(sent["jobReference"].get("location")) - - def test_query_w_udf_resources(self): - from google.cloud.bigquery.job import QueryJob - from google.cloud.bigquery.job import QueryJobConfig - from google.cloud.bigquery.query import UDFResource - - RESOURCE_URI = "gs://some-bucket/js/lib.js" - JOB = "job_name" - QUERY = "select count(*) from persons" - RESOURCE = { - "jobReference": {"projectId": self.PROJECT, "jobId": JOB}, - "configuration": { - "query": { - "query": QUERY, - "useLegacySql": True, - "userDefinedFunctionResources": [{"resourceUri": RESOURCE_URI}], - } - }, - } - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection(RESOURCE) - udf_resources = [UDFResource("resourceUri", RESOURCE_URI)] - config = QueryJobConfig() - config.udf_resources = udf_resources - config.use_legacy_sql = True - - job = client.query(QUERY, job_config=config, job_id=JOB) - - self.assertIsInstance(job, QueryJob) - self.assertIs(job._client, client) - self.assertEqual(job.job_id, JOB) - self.assertEqual(job.query, QUERY) - self.assertEqual(job.udf_resources, udf_resources) - self.assertEqual(job.query_parameters, []) - - # Check that query actually starts the job. - conn.api_request.assert_called_once() - _, req = conn.api_request.call_args - self.assertEqual(req["method"], "POST") - self.assertEqual(req["path"], "/projects/PROJECT/jobs") - self.assertIsNone(req["timeout"]) - sent = req["data"] - self.assertIsInstance(sent["jobReference"]["jobId"], six.string_types) - sent_config = sent["configuration"]["query"] - self.assertEqual(sent_config["query"], QUERY) - self.assertTrue(sent_config["useLegacySql"]) - self.assertEqual( - sent_config["userDefinedFunctionResources"][0], - {"resourceUri": RESOURCE_URI}, - ) - - def test_query_w_query_parameters(self): - from google.cloud.bigquery.job import QueryJob - from google.cloud.bigquery.job import QueryJobConfig - from google.cloud.bigquery.query import ScalarQueryParameter - - JOB = "job_name" - QUERY = "select count(*) from persons" - RESOURCE = { - "jobReference": {"projectId": self.PROJECT, "jobId": JOB}, - "configuration": { - "query": { - "query": QUERY, - "useLegacySql": False, - "queryParameters": [ - { - "name": "foo", - "parameterType": {"type": "INT64"}, - "parameterValue": {"value": "123"}, - } - ], - } - }, - } - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection(RESOURCE) - query_parameters = [ScalarQueryParameter("foo", "INT64", 123)] - config = QueryJobConfig() - config.query_parameters = query_parameters - - job = client.query(QUERY, job_config=config, job_id=JOB) - - self.assertIsInstance(job, QueryJob) - self.assertIs(job._client, client) - self.assertEqual(job.job_id, JOB) - self.assertEqual(job.query, QUERY) - self.assertEqual(job.udf_resources, []) - self.assertEqual(job.query_parameters, query_parameters) - - # Check that query actually starts the job. - conn.api_request.assert_called_once() - _, req = conn.api_request.call_args - self.assertEqual(req["method"], "POST") - self.assertEqual(req["path"], "/projects/PROJECT/jobs") - self.assertIsNone(req["timeout"]) - sent = req["data"] - self.assertEqual(sent["jobReference"]["jobId"], JOB) - sent_config = sent["configuration"]["query"] - self.assertEqual(sent_config["query"], QUERY) - self.assertFalse(sent_config["useLegacySql"]) - self.assertEqual( - sent_config["queryParameters"][0], - { - "name": "foo", - "parameterType": {"type": "INT64"}, - "parameterValue": {"value": "123"}, - }, - ) - - def test_insert_rows_w_timeout(self): - from google.cloud.bigquery.schema import SchemaField - from google.cloud.bigquery.table import Table - - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection({}) - table = Table(self.TABLE_REF) - - ROWS = [ - ("Phred Phlyntstone", 32), - ("Bharney Rhubble", 33), - ] - schema = [ - SchemaField("full_name", "STRING", mode="REQUIRED"), - SchemaField("age", "INTEGER", mode="REQUIRED"), - ] - - client.insert_rows(table, ROWS, selected_fields=schema, timeout=7.5) - - conn.api_request.assert_called_once() - _, req = conn.api_request.call_args - self.assertEqual(req.get("timeout"), 7.5) - - def test_insert_rows_wo_schema(self): - from google.cloud.bigquery.table import Table - - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - table = Table(self.TABLE_REF) - ROWS = [ - ("Phred Phlyntstone", 32), - ("Bharney Rhubble", 33), - ("Wylma Phlyntstone", 29), - ("Bhettye Rhubble", 27), - ] - - with self.assertRaises(ValueError) as exc: - client.insert_rows(table, ROWS) - - self.assertIn("Could not determine schema for table", exc.exception.args[0]) - - def test_insert_rows_w_schema(self): - import datetime - from google.cloud._helpers import UTC - from google.cloud._helpers import _datetime_to_rfc3339 - from google.cloud._helpers import _microseconds_from_datetime - from google.cloud.bigquery.schema import SchemaField - - WHEN_TS = 1437767599.006 - WHEN = datetime.datetime.utcfromtimestamp(WHEN_TS).replace(tzinfo=UTC) - PATH = "projects/%s/datasets/%s/tables/%s/insertAll" % ( - self.PROJECT, - self.DS_ID, - self.TABLE_ID, - ) - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection({}) - schema = [ - SchemaField("full_name", "STRING", mode="REQUIRED"), - SchemaField("age", "INTEGER", mode="REQUIRED"), - SchemaField("joined", "TIMESTAMP", mode="NULLABLE"), - ] - ROWS = [ - ("Phred Phlyntstone", 32, _datetime_to_rfc3339(WHEN)), - ("Bharney Rhubble", 33, WHEN + datetime.timedelta(seconds=1)), - ("Wylma Phlyntstone", 29, WHEN + datetime.timedelta(seconds=2)), - ("Bhettye Rhubble", 27, None), - ] - - def _row_data(row): - result = {"full_name": row[0], "age": str(row[1])} - joined = row[2] - if isinstance(joined, datetime.datetime): - joined = _microseconds_from_datetime(joined) * 1e-6 - if joined is not None: - result["joined"] = joined - return result - - SENT = { - "rows": [ - {"json": _row_data(row), "insertId": str(i)} - for i, row in enumerate(ROWS) - ] - } - - with mock.patch("uuid.uuid4", side_effect=map(str, range(len(ROWS)))): - # Test with using string IDs for the table. - errors = client.insert_rows( - "{}.{}".format(self.DS_ID, self.TABLE_ID), ROWS, selected_fields=schema - ) - - self.assertEqual(len(errors), 0) - conn.api_request.assert_called_once() - _, req = conn.api_request.call_args - self.assertEqual(req["method"], "POST") - self.assertEqual(req["path"], "/%s" % PATH) - self.assertEqual(req["data"], SENT) - self.assertIsNone(req["timeout"]) - - def test_insert_rows_w_list_of_dictionaries(self): - import datetime - from google.cloud._helpers import UTC - from google.cloud._helpers import _datetime_to_rfc3339 - from google.cloud._helpers import _microseconds_from_datetime - from google.cloud.bigquery.schema import SchemaField - from google.cloud.bigquery.table import Table - - WHEN_TS = 1437767599.006 - WHEN = datetime.datetime.utcfromtimestamp(WHEN_TS).replace(tzinfo=UTC) - PATH = "projects/%s/datasets/%s/tables/%s/insertAll" % ( - self.PROJECT, - self.DS_ID, - self.TABLE_ID, - ) - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection({}) - schema = [ - SchemaField("full_name", "STRING", mode="REQUIRED"), - SchemaField("age", "INTEGER", mode="REQUIRED"), - SchemaField("joined", "TIMESTAMP", mode="NULLABLE"), - ] - table = Table(self.TABLE_REF, schema=schema) - ROWS = [ - { - "full_name": "Phred Phlyntstone", - "age": 32, - "joined": _datetime_to_rfc3339(WHEN), - }, - { - "full_name": "Bharney Rhubble", - "age": 33, - "joined": WHEN + datetime.timedelta(seconds=1), - }, - { - "full_name": "Wylma Phlyntstone", - "age": 29, - "joined": WHEN + datetime.timedelta(seconds=2), - }, - {"full_name": "Bhettye Rhubble", "age": 27, "joined": None}, - ] - - def _row_data(row): - joined = row["joined"] - if joined is None: - row = copy.deepcopy(row) - del row["joined"] - elif isinstance(joined, datetime.datetime): - row["joined"] = _microseconds_from_datetime(joined) * 1e-6 - row["age"] = str(row["age"]) - return row - - SENT = { - "rows": [ - {"json": _row_data(row), "insertId": str(i)} - for i, row in enumerate(ROWS) - ] - } - - with mock.patch("uuid.uuid4", side_effect=map(str, range(len(ROWS)))): - errors = client.insert_rows(table, ROWS) - - self.assertEqual(len(errors), 0) - conn.api_request.assert_called_once_with( - method="POST", path="/%s" % PATH, data=SENT, timeout=None - ) - - def test_insert_rows_w_list_of_Rows(self): - from google.cloud.bigquery.schema import SchemaField - from google.cloud.bigquery.table import Table - from google.cloud.bigquery.table import Row - - PATH = "projects/%s/datasets/%s/tables/%s/insertAll" % ( - self.PROJECT, - self.DS_ID, - self.TABLE_ID, - ) - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection({}) - schema = [ - SchemaField("full_name", "STRING", mode="REQUIRED"), - SchemaField("age", "INTEGER", mode="REQUIRED"), - ] - table = Table(self.TABLE_REF, schema=schema) - f2i = {"full_name": 0, "age": 1} - ROWS = [ - Row(("Phred Phlyntstone", 32), f2i), - Row(("Bharney Rhubble", 33), f2i), - Row(("Wylma Phlyntstone", 29), f2i), - Row(("Bhettye Rhubble", 27), f2i), - ] - - def _row_data(row): - return {"full_name": row[0], "age": str(row[1])} - - SENT = { - "rows": [ - {"json": _row_data(row), "insertId": str(i)} - for i, row in enumerate(ROWS) - ] - } - - with mock.patch("uuid.uuid4", side_effect=map(str, range(len(ROWS)))): - errors = client.insert_rows(table, ROWS) - - self.assertEqual(len(errors), 0) - conn.api_request.assert_called_once_with( - method="POST", path="/%s" % PATH, data=SENT, timeout=None - ) - - def test_insert_rows_w_skip_invalid_and_ignore_unknown(self): - from google.cloud.bigquery.schema import SchemaField - from google.cloud.bigquery.table import Table - - PATH = "projects/%s/datasets/%s/tables/%s/insertAll" % ( - self.PROJECT, - self.DS_ID, - self.TABLE_ID, - ) - RESPONSE = { - "insertErrors": [ - { - "index": 1, - "errors": [ - { - "reason": "REASON", - "location": "LOCATION", - "debugInfo": "INFO", - "message": "MESSAGE", - } - ], - } - ] - } - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection(RESPONSE) - schema = [ - SchemaField("full_name", "STRING", mode="REQUIRED"), - SchemaField("age", "INTEGER", mode="REQUIRED"), - SchemaField("voter", "BOOLEAN", mode="NULLABLE"), - ] - table = Table(self.TABLE_REF, schema=schema) - ROWS = [ - ("Phred Phlyntstone", 32, True), - ("Bharney Rhubble", 33, False), - ("Wylma Phlyntstone", 29, True), - ("Bhettye Rhubble", 27, True), - ] - - def _row_data(row): - return { - "full_name": row[0], - "age": str(row[1]), - "voter": row[2] and "true" or "false", - } - - SENT = { - "skipInvalidRows": True, - "ignoreUnknownValues": True, - "templateSuffix": "20160303", - "rows": [ - {"insertId": index, "json": _row_data(row)} - for index, row in enumerate(ROWS) - ], - } - - errors = client.insert_rows( - table, - ROWS, - row_ids=[index for index, _ in enumerate(ROWS)], - skip_invalid_rows=True, - ignore_unknown_values=True, - template_suffix="20160303", - ) - - self.assertEqual(len(errors), 1) - self.assertEqual(errors[0]["index"], 1) - self.assertEqual(len(errors[0]["errors"]), 1) - self.assertEqual( - errors[0]["errors"][0], RESPONSE["insertErrors"][0]["errors"][0] - ) - conn.api_request.assert_called_once_with( - method="POST", path="/%s" % PATH, data=SENT, timeout=None - ) - - def test_insert_rows_w_repeated_fields(self): - from google.cloud.bigquery.schema import SchemaField - from google.cloud.bigquery.table import Table - - PATH = "projects/%s/datasets/%s/tables/%s/insertAll" % ( - self.PROJECT, - self.DS_ID, - self.TABLE_ID, - ) - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection({}) - color = SchemaField("color", "STRING", mode="REPEATED") - items = SchemaField("items", "INTEGER", mode="REPEATED") - score = SchemaField("score", "INTEGER") - times = SchemaField("times", "TIMESTAMP", mode="REPEATED") - distances = SchemaField("distances", "FLOAT", mode="REPEATED") - structs = SchemaField( - "structs", "RECORD", mode="REPEATED", fields=[score, times, distances] - ) - table = Table(self.TABLE_REF, schema=[color, items, structs]) - ROWS = [ - ( - ["red", "green"], - [1, 2], - [ - ( - 12, - [ - datetime.datetime(2018, 12, 1, 12, 0, 0, tzinfo=pytz.utc), - datetime.datetime(2018, 12, 1, 13, 0, 0, tzinfo=pytz.utc), - ], - [1.25, 2.5], - ), - { - "score": 13, - "times": [ - datetime.datetime(2018, 12, 2, 12, 0, 0, tzinfo=pytz.utc), - datetime.datetime(2018, 12, 2, 13, 0, 0, tzinfo=pytz.utc), - ], - "distances": [-1.25, -2.5], - }, - ], - ), - {"color": None, "items": [], "structs": [(None, [], [3.5])]}, - ] - - SENT = { - "rows": [ - { - "json": { - "color": ["red", "green"], - "items": ["1", "2"], - "structs": [ - { - "score": "12", - "times": [ - 1543665600.0, # 2018-12-01 12:00 UTC - 1543669200.0, # 2018-12-01 13:00 UTC - ], - "distances": [1.25, 2.5], - }, - { - "score": "13", - "times": [ - 1543752000.0, # 2018-12-02 12:00 UTC - 1543755600.0, # 2018-12-02 13:00 UTC - ], - "distances": [-1.25, -2.5], - }, - ], - }, - "insertId": "0", - }, - { - "json": { - "items": [], - "structs": [{"times": [], "distances": [3.5]}], - }, - "insertId": "1", - }, - ] - } - - with mock.patch("uuid.uuid4", side_effect=map(str, range(len(ROWS)))): - errors = client.insert_rows(table, ROWS) - - self.assertEqual(len(errors), 0) - conn.api_request.assert_called_once_with( - method="POST", path="/%s" % PATH, data=SENT, timeout=None, - ) - - def test_insert_rows_w_record_schema(self): - from google.cloud.bigquery.schema import SchemaField - - PATH = "projects/%s/datasets/%s/tables/%s/insertAll" % ( - self.PROJECT, - self.DS_ID, - self.TABLE_ID, - ) - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection({}) - full_name = SchemaField("full_name", "STRING", mode="REQUIRED") - area_code = SchemaField("area_code", "STRING", "REQUIRED") - local_number = SchemaField("local_number", "STRING", "REQUIRED") - rank = SchemaField("rank", "INTEGER", "REQUIRED") - phone = SchemaField( - "phone", "RECORD", mode="NULLABLE", fields=[area_code, local_number, rank] - ) - ROWS = [ - ( - "Phred Phlyntstone", - {"area_code": "800", "local_number": "555-1212", "rank": 1}, - ), - ("Bharney Rhubble", ("877", "768-5309", 2)), - ("Wylma Phlyntstone", None), - ] - - SENT = { - "rows": [ - { - "json": { - "full_name": "Phred Phlyntstone", - "phone": { - "area_code": "800", - "local_number": "555-1212", - "rank": "1", - }, - }, - "insertId": "0", - }, - { - "json": { - "full_name": "Bharney Rhubble", - "phone": { - "area_code": "877", - "local_number": "768-5309", - "rank": "2", - }, - }, - "insertId": "1", - }, - {"json": {"full_name": "Wylma Phlyntstone"}, "insertId": "2"}, - ] - } - - with mock.patch("uuid.uuid4", side_effect=map(str, range(len(ROWS)))): - errors = client.insert_rows( - self.TABLE_REF, ROWS, selected_fields=[full_name, phone] - ) - - self.assertEqual(len(errors), 0) - conn.api_request.assert_called_once_with( - method="POST", path="/%s" % PATH, data=SENT, timeout=None - ) - - def test_insert_rows_w_explicit_none_insert_ids(self): - from google.cloud.bigquery.schema import SchemaField - from google.cloud.bigquery.table import Table - - PATH = "projects/{}/datasets/{}/tables/{}/insertAll".format( - self.PROJECT, self.DS_ID, self.TABLE_ID, - ) - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection({}) - schema = [ - SchemaField("full_name", "STRING", mode="REQUIRED"), - SchemaField("age", "INTEGER", mode="REQUIRED"), - ] - table = Table(self.TABLE_REF, schema=schema) - ROWS = [ - {"full_name": "Phred Phlyntstone", "age": 32}, - {"full_name": "Bharney Rhubble", "age": 33}, - ] - - def _row_data(row): - row["age"] = str(row["age"]) - return row - - SENT = {"rows": [{"json": _row_data(row), "insertId": None} for row in ROWS]} - - errors = client.insert_rows(table, ROWS, row_ids=[None] * len(ROWS)) - - self.assertEqual(len(errors), 0) - conn.api_request.assert_called_once_with( - method="POST", path="/{}".format(PATH), data=SENT, timeout=None, - ) - - def test_insert_rows_errors(self): - from google.cloud.bigquery.schema import SchemaField - from google.cloud.bigquery.table import Table - - ROWS = [ - ("Phred Phlyntstone", 32, True), - ("Bharney Rhubble", 33, False), - ("Wylma Phlyntstone", 29, True), - ("Bhettye Rhubble", 27, True), - ] - creds = _make_credentials() - http = object() - - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - - # table ref with no selected fields - with self.assertRaises(ValueError): - client.insert_rows(self.TABLE_REF, ROWS) - - # table with no schema - with self.assertRaises(ValueError): - client.insert_rows(Table(self.TABLE_REF), ROWS) - - # neither Table nor TableReference - with self.assertRaises(TypeError): - client.insert_rows(1, ROWS) - - schema = [ - SchemaField("full_name", "STRING", mode="REQUIRED"), - ] - table = Table(self.TABLE_REF, schema=schema) - - # rows is just a dict - with self.assertRaises(TypeError): - client.insert_rows(table, {"full_name": "value"}) - - def test_insert_rows_w_numeric(self): - from google.cloud.bigquery import table - from google.cloud.bigquery.schema import SchemaField - - project = "PROJECT" - ds_id = "DS_ID" - table_id = "TABLE_ID" - creds = _make_credentials() - http = object() - client = self._make_one(project=project, credentials=creds, _http=http) - conn = client._connection = make_connection({}) - table_ref = DatasetReference(project, ds_id).table(table_id) - schema = [SchemaField("account", "STRING"), SchemaField("balance", "NUMERIC")] - insert_table = table.Table(table_ref, schema=schema) - rows = [ - ("Savings", decimal.Decimal("23.47")), - ("Checking", decimal.Decimal("1.98")), - ("Mortgage", decimal.Decimal("-12345678909.87654321")), - ] - - with mock.patch("uuid.uuid4", side_effect=map(str, range(len(rows)))): - errors = client.insert_rows(insert_table, rows) - - self.assertEqual(len(errors), 0) - rows_json = [ - {"account": "Savings", "balance": "23.47"}, - {"account": "Checking", "balance": "1.98"}, - {"account": "Mortgage", "balance": "-12345678909.87654321"}, - ] - sent = { - "rows": [ - {"json": row, "insertId": str(i)} for i, row in enumerate(rows_json) - ] - } - conn.api_request.assert_called_once_with( - method="POST", - path="/projects/{}/datasets/{}/tables/{}/insertAll".format( - project, ds_id, table_id - ), - data=sent, - timeout=None, - ) - - @unittest.skipIf(pandas is None, "Requires `pandas`") - def test_insert_rows_from_dataframe(self): - from google.cloud.bigquery.schema import SchemaField - from google.cloud.bigquery.table import Table - - API_PATH = "/projects/{}/datasets/{}/tables/{}/insertAll".format( - self.PROJECT, self.DS_ID, self.TABLE_REF.table_id - ) - - dataframe = pandas.DataFrame( - [ - {"name": u"Little One", "age": 10, "adult": False}, - {"name": u"Young Gun", "age": 20, "adult": True}, - {"name": u"Dad", "age": 30, "adult": True}, - {"name": u"Stranger", "age": 40, "adult": True}, - ] - ) - - # create client - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection({}, {}) - - # create table - schema = [ - SchemaField("name", "STRING", mode="REQUIRED"), - SchemaField("age", "INTEGER", mode="REQUIRED"), - SchemaField("adult", "BOOLEAN", mode="REQUIRED"), - ] - table = Table(self.TABLE_REF, schema=schema) - - with mock.patch("uuid.uuid4", side_effect=map(str, range(len(dataframe)))): - error_info = client.insert_rows_from_dataframe( - table, dataframe, chunk_size=3, timeout=7.5 - ) - - self.assertEqual(len(error_info), 2) - for chunk_errors in error_info: - assert chunk_errors == [] - - EXPECTED_SENT_DATA = [ - { - "rows": [ - { - "insertId": "0", - "json": {"name": "Little One", "age": "10", "adult": "false"}, - }, - { - "insertId": "1", - "json": {"name": "Young Gun", "age": "20", "adult": "true"}, - }, - { - "insertId": "2", - "json": {"name": "Dad", "age": "30", "adult": "true"}, - }, - ] - }, - { - "rows": [ - { - "insertId": "3", - "json": {"name": "Stranger", "age": "40", "adult": "true"}, - } - ] - }, - ] - - actual_calls = conn.api_request.call_args_list - - for call, expected_data in six.moves.zip_longest( - actual_calls, EXPECTED_SENT_DATA - ): - expected_call = mock.call( - method="POST", path=API_PATH, data=expected_data, timeout=7.5 - ) - assert call == expected_call - - @unittest.skipIf(pandas is None, "Requires `pandas`") - def test_insert_rows_from_dataframe_many_columns(self): - from google.cloud.bigquery.schema import SchemaField - from google.cloud.bigquery.table import Table - - API_PATH = "/projects/{}/datasets/{}/tables/{}/insertAll".format( - self.PROJECT, self.DS_ID, self.TABLE_REF.table_id - ) - N_COLUMNS = 256 # should be >= 256 - - dataframe = pandas.DataFrame( - [{"foo_{}".format(i): "bar_{}".format(i) for i in range(N_COLUMNS)}] - ) - - # create client - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection({}, {}) - - # create table - schema = [SchemaField("foo_{}".format(i), "STRING") for i in range(N_COLUMNS)] - table = Table(self.TABLE_REF, schema=schema) - - with mock.patch("uuid.uuid4", side_effect=map(str, range(len(dataframe)))): - error_info = client.insert_rows_from_dataframe( - table, dataframe, chunk_size=3 - ) - - assert len(error_info) == 1 - assert error_info[0] == [] - - EXPECTED_SENT_DATA = { - "rows": [ - { - "insertId": "0", - "json": { - "foo_{}".format(i): "bar_{}".format(i) for i in range(N_COLUMNS) - }, - } - ] - } - expected_call = mock.call( - method="POST", path=API_PATH, data=EXPECTED_SENT_DATA, timeout=None - ) - - actual_calls = conn.api_request.call_args_list - assert len(actual_calls) == 1 - assert actual_calls[0] == expected_call - - @unittest.skipIf(pandas is None, "Requires `pandas`") - def test_insert_rows_from_dataframe_w_explicit_none_insert_ids(self): - from google.cloud.bigquery.schema import SchemaField - from google.cloud.bigquery.table import Table - - API_PATH = "/projects/{}/datasets/{}/tables/{}/insertAll".format( - self.PROJECT, self.DS_ID, self.TABLE_REF.table_id - ) - - dataframe = pandas.DataFrame( - [ - {"name": u"Little One", "adult": False}, - {"name": u"Young Gun", "adult": True}, - ] - ) - - # create client - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection({}, {}) - - # create table - schema = [ - SchemaField("name", "STRING", mode="REQUIRED"), - SchemaField("adult", "BOOLEAN", mode="REQUIRED"), - ] - table = Table(self.TABLE_REF, schema=schema) - - error_info = client.insert_rows_from_dataframe( - table, dataframe, row_ids=[None] * len(dataframe) - ) - - self.assertEqual(len(error_info), 1) - assert error_info[0] == [] # no chunk errors - - EXPECTED_SENT_DATA = { - "rows": [ - {"insertId": None, "json": {"name": "Little One", "adult": "false"}}, - {"insertId": None, "json": {"name": "Young Gun", "adult": "true"}}, - ] - } - - actual_calls = conn.api_request.call_args_list - assert len(actual_calls) == 1 - assert actual_calls[0] == mock.call( - method="POST", path=API_PATH, data=EXPECTED_SENT_DATA, timeout=None - ) - - def test_insert_rows_json(self): - from google.cloud.bigquery.dataset import DatasetReference - from google.cloud.bigquery.schema import SchemaField - from google.cloud.bigquery.table import Table - - PROJECT = "PROJECT" - DS_ID = "DS_ID" - TABLE_ID = "TABLE_ID" - PATH = "projects/%s/datasets/%s/tables/%s/insertAll" % ( - PROJECT, - DS_ID, - TABLE_ID, - ) - creds = _make_credentials() - http = object() - client = self._make_one(project=PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection({}) - table_ref = DatasetReference(PROJECT, DS_ID).table(TABLE_ID) - schema = [ - SchemaField("full_name", "STRING", mode="REQUIRED"), - SchemaField("age", "INTEGER", mode="REQUIRED"), - SchemaField("joined", "TIMESTAMP", mode="NULLABLE"), - ] - table = Table(table_ref, schema=schema) - ROWS = [ - { - "full_name": "Phred Phlyntstone", - "age": "32", - "joined": "2015-07-24T19:53:19.006000Z", - }, - {"full_name": "Bharney Rhubble", "age": "33", "joined": 1437767600.006}, - {"full_name": "Wylma Phlyntstone", "age": "29", "joined": 1437767601.006}, - {"full_name": "Bhettye Rhubble", "age": "27", "joined": None}, - ] - - SENT = { - "rows": [{"json": row, "insertId": str(i)} for i, row in enumerate(ROWS)] - } - - with mock.patch("uuid.uuid4", side_effect=map(str, range(len(ROWS)))): - errors = client.insert_rows_json(table, ROWS, timeout=7.5) - - self.assertEqual(len(errors), 0) - conn.api_request.assert_called_once_with( - method="POST", path="/%s" % PATH, data=SENT, timeout=7.5, - ) - - def test_insert_rows_json_with_string_id(self): - rows = [{"col1": "val1"}] - creds = _make_credentials() - http = object() - client = self._make_one( - project="default-project", credentials=creds, _http=http - ) - conn = client._connection = make_connection({}) - - with mock.patch("uuid.uuid4", side_effect=map(str, range(len(rows)))): - errors = client.insert_rows_json("proj.dset.tbl", rows) - - self.assertEqual(len(errors), 0) - expected = { - "rows": [{"json": row, "insertId": str(i)} for i, row in enumerate(rows)] - } - conn.api_request.assert_called_once_with( - method="POST", - path="/projects/proj/datasets/dset/tables/tbl/insertAll", - data=expected, - timeout=None, - ) - - def test_insert_rows_json_w_explicit_none_insert_ids(self): - rows = [{"col1": "val1"}, {"col2": "val2"}] - creds = _make_credentials() - http = object() - client = self._make_one( - project="default-project", credentials=creds, _http=http - ) - conn = client._connection = make_connection({}) - - errors = client.insert_rows_json( - "proj.dset.tbl", rows, row_ids=[None] * len(rows), - ) - - self.assertEqual(len(errors), 0) - expected = {"rows": [{"json": row, "insertId": None} for row in rows]} - conn.api_request.assert_called_once_with( - method="POST", - path="/projects/proj/datasets/dset/tables/tbl/insertAll", - data=expected, - timeout=None, - ) - - def test_insert_rows_w_wrong_arg(self): - from google.cloud.bigquery.dataset import DatasetReference - from google.cloud.bigquery.schema import SchemaField - from google.cloud.bigquery.table import Table - - PROJECT = "PROJECT" - DS_ID = "DS_ID" - TABLE_ID = "TABLE_ID" - ROW = {"full_name": "Bhettye Rhubble", "age": "27", "joined": None} - - creds = _make_credentials() - client = self._make_one(project=PROJECT, credentials=creds, _http=object()) - client._connection = make_connection({}) - - table_ref = DatasetReference(PROJECT, DS_ID).table(TABLE_ID) - schema = [ - SchemaField("full_name", "STRING", mode="REQUIRED"), - SchemaField("age", "INTEGER", mode="REQUIRED"), - SchemaField("joined", "TIMESTAMP", mode="NULLABLE"), - ] - table = Table(table_ref, schema=schema) - - with self.assertRaises(TypeError): - client.insert_rows_json(table, ROW) - - def test_list_partitions(self): - from google.cloud.bigquery.table import Table - - rows = 3 - meta_info = _make_list_partitons_meta_info( - self.PROJECT, self.DS_ID, self.TABLE_ID, rows - ) - - data = { - "totalRows": str(rows), - "rows": [ - {"f": [{"v": "20180101"}]}, - {"f": [{"v": "20180102"}]}, - {"f": [{"v": "20180103"}]}, - ], - } - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - client._connection = make_connection(meta_info, data) - table = Table(self.TABLE_REF) - - partition_list = client.list_partitions(table) - self.assertEqual(len(partition_list), rows) - self.assertIn("20180102", partition_list) - - def test_list_partitions_with_string_id(self): - meta_info = _make_list_partitons_meta_info( - self.PROJECT, self.DS_ID, self.TABLE_ID, 0 - ) - - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - client._connection = make_connection(meta_info, {}) - - partition_list = client.list_partitions( - "{}.{}".format(self.DS_ID, self.TABLE_ID) - ) - - self.assertEqual(len(partition_list), 0) - - def test_list_partitions_splitting_timout_between_requests(self): - from google.cloud.bigquery.table import Table - - row_count = 2 - meta_info = _make_list_partitons_meta_info( - self.PROJECT, self.DS_ID, self.TABLE_ID, row_count - ) - - data = { - "totalRows": str(row_count), - "rows": [{"f": [{"v": "20180101"}]}, {"f": [{"v": "20180102"}]}], - } - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - client._connection = make_connection(meta_info, data) - table = Table(self.TABLE_REF) - - with freezegun.freeze_time("2019-01-01 00:00:00", tick=False) as frozen_time: - - def delayed_get_table(*args, **kwargs): - frozen_time.tick(delta=1.4) - return orig_get_table(*args, **kwargs) - - orig_get_table = client.get_table - client.get_table = mock.Mock(side_effect=delayed_get_table) - - client.list_partitions(table, timeout=5.0) - - client.get_table.assert_called_once() - _, kwargs = client.get_table.call_args - self.assertEqual(kwargs.get("timeout"), 5.0) - - client._connection.api_request.assert_called() - _, kwargs = client._connection.api_request.call_args - self.assertAlmostEqual(kwargs.get("timeout"), 3.6, places=5) - - def test_list_rows(self): - import datetime - from google.cloud._helpers import UTC - from google.cloud.bigquery.schema import SchemaField - from google.cloud.bigquery.table import Table - from google.cloud.bigquery.table import Row - - PATH = "projects/%s/datasets/%s/tables/%s/data" % ( - self.PROJECT, - self.DS_ID, - self.TABLE_ID, - ) - WHEN_TS = 1437767599.006 - WHEN = datetime.datetime.utcfromtimestamp(WHEN_TS).replace(tzinfo=UTC) - WHEN_1 = WHEN + datetime.timedelta(seconds=1) - WHEN_2 = WHEN + datetime.timedelta(seconds=2) - ROWS = 1234 - TOKEN = "TOKEN" - - def _bigquery_timestamp_float_repr(ts_float): - # Preserve microsecond precision for E+09 timestamps - return "%0.15E" % (ts_float,) - - DATA = { - "totalRows": str(ROWS), - "pageToken": TOKEN, - "rows": [ - { - "f": [ - {"v": "Phred Phlyntstone"}, - {"v": "32"}, - {"v": _bigquery_timestamp_float_repr(WHEN_TS)}, - ] - }, - { - "f": [ - {"v": "Bharney Rhubble"}, - {"v": "33"}, - {"v": _bigquery_timestamp_float_repr(WHEN_TS + 1)}, - ] - }, - { - "f": [ - {"v": "Wylma Phlyntstone"}, - {"v": "29"}, - {"v": _bigquery_timestamp_float_repr(WHEN_TS + 2)}, - ] - }, - {"f": [{"v": "Bhettye Rhubble"}, {"v": None}, {"v": None}]}, - ], - } - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection(DATA, DATA) - full_name = SchemaField("full_name", "STRING", mode="REQUIRED") - age = SchemaField("age", "INTEGER", mode="NULLABLE") - joined = SchemaField("joined", "TIMESTAMP", mode="NULLABLE") - table = Table(self.TABLE_REF, schema=[full_name, age, joined]) - - iterator = client.list_rows(table, timeout=7.5) - page = six.next(iterator.pages) - rows = list(page) - total_rows = iterator.total_rows - page_token = iterator.next_page_token - - f2i = {"full_name": 0, "age": 1, "joined": 2} - self.assertEqual(len(rows), 4) - self.assertEqual(rows[0], Row(("Phred Phlyntstone", 32, WHEN), f2i)) - self.assertEqual(rows[1], Row(("Bharney Rhubble", 33, WHEN_1), f2i)) - self.assertEqual(rows[2], Row(("Wylma Phlyntstone", 29, WHEN_2), f2i)) - self.assertEqual(rows[3], Row(("Bhettye Rhubble", None, None), f2i)) - self.assertEqual(total_rows, ROWS) - self.assertEqual(page_token, TOKEN) - - conn.api_request.assert_called_once_with( - method="GET", path="/%s" % PATH, query_params={}, timeout=7.5 - ) - - def test_list_rows_empty_table(self): - response = {"totalRows": "0", "rows": []} - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - client._connection = make_connection(response, response) - - # Table that has no schema because it's an empty table. - rows = client.list_rows( - # Test with using a string for the table ID. - "{}.{}.{}".format( - self.TABLE_REF.project, - self.TABLE_REF.dataset_id, - self.TABLE_REF.table_id, - ), - selected_fields=[], - ) - - # When a table reference / string and selected_fields is provided, - # total_rows can't be populated until iteration starts. - self.assertIsNone(rows.total_rows) - self.assertEqual(tuple(rows), ()) - self.assertEqual(rows.total_rows, 0) - - def test_list_rows_query_params(self): - from google.cloud.bigquery.schema import SchemaField - from google.cloud.bigquery.table import Table - - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - table = Table( - self.TABLE_REF, schema=[SchemaField("age", "INTEGER", mode="NULLABLE")] - ) - tests = [ - ({}, {}), - ({"start_index": 1}, {"startIndex": 1}), - ({"max_results": 2}, {"maxResults": 2}), - ({"start_index": 1, "max_results": 2}, {"startIndex": 1, "maxResults": 2}), - ] - conn = client._connection = make_connection(*len(tests) * [{}]) - for i, test in enumerate(tests): - iterator = client.list_rows(table, **test[0]) - six.next(iterator.pages) - req = conn.api_request.call_args_list[i] - self.assertEqual(req[1]["query_params"], test[1], "for kwargs %s" % test[0]) - - def test_list_rows_repeated_fields(self): - from google.cloud.bigquery.schema import SchemaField - - PATH = "projects/%s/datasets/%s/tables/%s/data" % ( - self.PROJECT, - self.DS_ID, - self.TABLE_ID, - ) - ROWS = 1234 - TOKEN = "TOKEN" - DATA = { - "totalRows": ROWS, - "pageToken": TOKEN, - "rows": [ - { - "f": [ - {"v": [{"v": "red"}, {"v": "green"}]}, - { - "v": [ - { - "v": { - "f": [ - {"v": [{"v": "1"}, {"v": "2"}]}, - {"v": [{"v": "3.1415"}, {"v": "1.414"}]}, - ] - } - } - ] - }, - ] - } - ], - } - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection(DATA) - color = SchemaField("color", "STRING", mode="REPEATED") - index = SchemaField("index", "INTEGER", "REPEATED") - score = SchemaField("score", "FLOAT", "REPEATED") - struct = SchemaField("struct", "RECORD", mode="REPEATED", fields=[index, score]) - - iterator = client.list_rows(self.TABLE_REF, selected_fields=[color, struct]) - page = six.next(iterator.pages) - rows = list(page) - total_rows = iterator.total_rows - page_token = iterator.next_page_token - - self.assertEqual(len(rows), 1) - self.assertEqual(rows[0][0], ["red", "green"]) - self.assertEqual(rows[0][1], [{"index": [1, 2], "score": [3.1415, 1.414]}]) - self.assertEqual(total_rows, ROWS) - self.assertEqual(page_token, TOKEN) - - conn.api_request.assert_called_once_with( - method="GET", - path="/%s" % PATH, - query_params={"selectedFields": "color,struct"}, - timeout=None, - ) - - def test_list_rows_w_record_schema(self): - from google.cloud.bigquery.schema import SchemaField - from google.cloud.bigquery.table import Table - - PATH = "projects/%s/datasets/%s/tables/%s/data" % ( - self.PROJECT, - self.DS_ID, - self.TABLE_ID, - ) - ROWS = 1234 - TOKEN = "TOKEN" - DATA = { - "totalRows": ROWS, - "pageToken": TOKEN, - "rows": [ - { - "f": [ - {"v": "Phred Phlyntstone"}, - {"v": {"f": [{"v": "800"}, {"v": "555-1212"}, {"v": 1}]}}, - ] - }, - { - "f": [ - {"v": "Bharney Rhubble"}, - {"v": {"f": [{"v": "877"}, {"v": "768-5309"}, {"v": 2}]}}, - ] - }, - {"f": [{"v": "Wylma Phlyntstone"}, {"v": None}]}, - ], - } - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection(DATA) - full_name = SchemaField("full_name", "STRING", mode="REQUIRED") - area_code = SchemaField("area_code", "STRING", "REQUIRED") - local_number = SchemaField("local_number", "STRING", "REQUIRED") - rank = SchemaField("rank", "INTEGER", "REQUIRED") - phone = SchemaField( - "phone", "RECORD", mode="NULLABLE", fields=[area_code, local_number, rank] - ) - table = Table(self.TABLE_REF, schema=[full_name, phone]) - - iterator = client.list_rows(table) - page = six.next(iterator.pages) - rows = list(page) - total_rows = iterator.total_rows - page_token = iterator.next_page_token - - self.assertEqual(len(rows), 3) - self.assertEqual(rows[0][0], "Phred Phlyntstone") - self.assertEqual( - rows[0][1], {"area_code": "800", "local_number": "555-1212", "rank": 1} - ) - self.assertEqual(rows[1][0], "Bharney Rhubble") - self.assertEqual( - rows[1][1], {"area_code": "877", "local_number": "768-5309", "rank": 2} - ) - self.assertEqual(rows[2][0], "Wylma Phlyntstone") - self.assertIsNone(rows[2][1]) - self.assertEqual(total_rows, ROWS) - self.assertEqual(page_token, TOKEN) - - conn.api_request.assert_called_once_with( - method="GET", path="/%s" % PATH, query_params={}, timeout=None - ) - - def test_list_rows_with_missing_schema(self): - from google.cloud.bigquery.table import Table, TableListItem - - table_path = "/projects/{}/datasets/{}/tables/{}".format( - self.PROJECT, self.DS_ID, self.TABLE_ID - ) - tabledata_path = "{}/data".format(table_path) - - table_list_item_data = { - "id": "%s:%s:%s" % (self.PROJECT, self.DS_ID, self.TABLE_ID), - "tableReference": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.TABLE_ID, - }, - } - table_data = copy.deepcopy(table_list_item_data) - # Intentionally make wrong, since total_rows can update during iteration. - table_data["numRows"] = 2 - table_data["schema"] = { - "fields": [ - {"name": "name", "type": "STRING"}, - {"name": "age", "type": "INTEGER"}, - ] - } - rows_data = { - "totalRows": 3, - "pageToken": None, - "rows": [ - {"f": [{"v": "Phred Phlyntstone"}, {"v": "32"}]}, - {"f": [{"v": "Bharney Rhubble"}, {"v": "31"}]}, - {"f": [{"v": "Wylma Phlyntstone"}, {"v": None}]}, - ], - } - - creds = _make_credentials() - http = object() - - schemaless_tables = ( - "{}.{}".format(self.DS_ID, self.TABLE_ID), - self.TABLE_REF, - Table(self.TABLE_REF), - TableListItem(table_list_item_data), - ) - - for table in schemaless_tables: - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - conn = client._connection = make_connection(table_data, rows_data) - - row_iter = client.list_rows(table) - - conn.api_request.assert_called_once_with( - method="GET", path=table_path, timeout=None - ) - conn.api_request.reset_mock() - self.assertEqual(row_iter.total_rows, 2, msg=repr(table)) - - rows = list(row_iter) - conn.api_request.assert_called_once_with( - method="GET", path=tabledata_path, query_params={}, timeout=None - ) - self.assertEqual(row_iter.total_rows, 3, msg=repr(table)) - self.assertEqual(rows[0].name, "Phred Phlyntstone", msg=repr(table)) - self.assertEqual(rows[1].age, 31, msg=repr(table)) - self.assertIsNone(rows[2].age, msg=repr(table)) - - def test_list_rows_splitting_timout_between_requests(self): - from google.cloud.bigquery.schema import SchemaField - from google.cloud.bigquery.table import Table - - response = {"totalRows": "0", "rows": []} - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - client._connection = make_connection(response, response) - - table = Table( - self.TABLE_REF, schema=[SchemaField("field_x", "INTEGER", mode="NULLABLE")] - ) - - with freezegun.freeze_time("1970-01-01 00:00:00", tick=False) as frozen_time: - - def delayed_get_table(*args, **kwargs): - frozen_time.tick(delta=1.4) - return table - - client.get_table = mock.Mock(side_effect=delayed_get_table) - - rows_iter = client.list_rows( - "{}.{}.{}".format( - self.TABLE_REF.project, - self.TABLE_REF.dataset_id, - self.TABLE_REF.table_id, - ), - timeout=5.0, - ) - six.next(rows_iter.pages) - - client.get_table.assert_called_once() - _, kwargs = client.get_table.call_args - self.assertEqual(kwargs.get("timeout"), 5.0) - - client._connection.api_request.assert_called_once() - _, kwargs = client._connection.api_request.call_args - self.assertAlmostEqual(kwargs.get("timeout"), 3.6) - - def test_list_rows_error(self): - creds = _make_credentials() - http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) - - # neither Table nor TableReference - with self.assertRaises(TypeError): - client.list_rows(1) - - -class Test_make_job_id(unittest.TestCase): - def _call_fut(self, job_id, prefix=None): - from google.cloud.bigquery.client import _make_job_id - - return _make_job_id(job_id, prefix=prefix) - - def test__make_job_id_wo_suffix(self): - job_id = self._call_fut("job_id") - - self.assertEqual(job_id, "job_id") - - def test__make_job_id_w_suffix(self): - with mock.patch("uuid.uuid4", side_effect=["212345"]): - job_id = self._call_fut(None, prefix="job_id") - - self.assertEqual(job_id, "job_id212345") - - def test__make_random_job_id(self): - with mock.patch("uuid.uuid4", side_effect=["212345"]): - job_id = self._call_fut(None) - - self.assertEqual(job_id, "212345") - - def test__make_job_id_w_job_id_overrides_prefix(self): - job_id = self._call_fut("job_id", prefix="unused_prefix") - - self.assertEqual(job_id, "job_id") - - -class TestClientUpload(object): - # NOTE: This is a "partner" to `TestClient` meant to test some of the - # "load_table_from_file" portions of `Client`. It also uses - # `pytest`-style tests rather than `unittest`-style. - from google.cloud.bigquery.job import SourceFormat - - TABLE_REF = DatasetReference("project_id", "test_dataset").table("test_table") - - LOCATION = "us-central" - - @staticmethod - def _make_client(transport=None, location=None): - from google.cloud.bigquery import _http - from google.cloud.bigquery import client - - cl = client.Client( - project="project_id", - credentials=_make_credentials(), - _http=transport, - location=location, - ) - cl._connection = mock.create_autospec(_http.Connection, instance=True) - return cl - - @staticmethod - def _make_response(status_code, content="", headers={}): - """Make a mock HTTP response.""" - import requests - - response = requests.Response() - response.request = requests.Request("POST", "http://example.com").prepare() - response._content = content.encode("utf-8") - response.headers.update(headers) - response.status_code = status_code - return response - - @classmethod - def _make_do_upload_patch(cls, client, method, resource={}, side_effect=None): - """Patches the low-level upload helpers.""" - if side_effect is None: - side_effect = [ - cls._make_response( - http_client.OK, - json.dumps(resource), - {"Content-Type": "application/json"}, - ) - ] - return mock.patch.object(client, method, side_effect=side_effect, autospec=True) - - EXPECTED_CONFIGURATION = { - "jobReference": {"projectId": "project_id", "jobId": "job_id"}, - "configuration": { - "load": { - "sourceFormat": SourceFormat.CSV, - "destinationTable": { - "projectId": "project_id", - "datasetId": "test_dataset", - "tableId": "test_table", - }, - } - }, - } - - @staticmethod - def _make_file_obj(): - return io.BytesIO(b"hello, is it me you're looking for?") - - def _make_gzip_file_obj(self, writable): - if writable: - return gzip.GzipFile(mode="w", fileobj=io.BytesIO()) - else: - return gzip.GzipFile(mode="r", fileobj=self._make_file_obj()) - - @staticmethod - def _make_config(): - from google.cloud.bigquery.job import LoadJobConfig - from google.cloud.bigquery.job import SourceFormat - - config = LoadJobConfig() - config.source_format = SourceFormat.CSV - return config - - # High-level tests - - def test_load_table_from_file_resumable(self): - from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES - - client = self._make_client() - file_obj = self._make_file_obj() - job_config = self._make_config() - original_config_copy = copy.deepcopy(job_config) - - do_upload_patch = self._make_do_upload_patch( - client, "_do_resumable_upload", self.EXPECTED_CONFIGURATION - ) - with do_upload_patch as do_upload: - client.load_table_from_file( - file_obj, self.TABLE_REF, job_id="job_id", job_config=job_config, - ) - - do_upload.assert_called_once_with( - file_obj, self.EXPECTED_CONFIGURATION, _DEFAULT_NUM_RETRIES - ) - - # the original config object should not have been modified - assert job_config.to_api_repr() == original_config_copy.to_api_repr() - - def test_load_table_from_file_w_explicit_project(self): - from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES - - client = self._make_client() - file_obj = self._make_file_obj() - - do_upload_patch = self._make_do_upload_patch( - client, "_do_resumable_upload", self.EXPECTED_CONFIGURATION - ) - with do_upload_patch as do_upload: - client.load_table_from_file( - file_obj, - self.TABLE_REF, - job_id="job_id", - project="other-project", - location=self.LOCATION, - job_config=self._make_config(), - ) - - expected_resource = copy.deepcopy(self.EXPECTED_CONFIGURATION) - expected_resource["jobReference"]["location"] = self.LOCATION - expected_resource["jobReference"]["projectId"] = "other-project" - do_upload.assert_called_once_with( - file_obj, expected_resource, _DEFAULT_NUM_RETRIES - ) - - def test_load_table_from_file_w_client_location(self): - from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES - - client = self._make_client(location=self.LOCATION) - file_obj = self._make_file_obj() - - do_upload_patch = self._make_do_upload_patch( - client, "_do_resumable_upload", self.EXPECTED_CONFIGURATION - ) - with do_upload_patch as do_upload: - client.load_table_from_file( - file_obj, - # Test with string for table ID. - "{}.{}.{}".format( - self.TABLE_REF.project, - self.TABLE_REF.dataset_id, - self.TABLE_REF.table_id, - ), - job_id="job_id", - project="other-project", - job_config=self._make_config(), - ) - - expected_resource = copy.deepcopy(self.EXPECTED_CONFIGURATION) - expected_resource["jobReference"]["location"] = self.LOCATION - expected_resource["jobReference"]["projectId"] = "other-project" - do_upload.assert_called_once_with( - file_obj, expected_resource, _DEFAULT_NUM_RETRIES - ) - - def test_load_table_from_file_resumable_metadata(self): - from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES - from google.cloud.bigquery.job import CreateDisposition - from google.cloud.bigquery.job import WriteDisposition - - client = self._make_client() - file_obj = self._make_file_obj() - - config = self._make_config() - config.allow_jagged_rows = False - config.allow_quoted_newlines = False - config.create_disposition = CreateDisposition.CREATE_IF_NEEDED - config.encoding = "utf8" - config.field_delimiter = "," - config.ignore_unknown_values = False - config.max_bad_records = 0 - config.quote_character = '"' - config.skip_leading_rows = 1 - config.write_disposition = WriteDisposition.WRITE_APPEND - config.null_marker = r"\N" - - expected_config = { - "jobReference": {"projectId": "project_id", "jobId": "job_id"}, - "configuration": { - "load": { - "destinationTable": { - "projectId": self.TABLE_REF.project, - "datasetId": self.TABLE_REF.dataset_id, - "tableId": self.TABLE_REF.table_id, - }, - "sourceFormat": config.source_format, - "allowJaggedRows": config.allow_jagged_rows, - "allowQuotedNewlines": config.allow_quoted_newlines, - "createDisposition": config.create_disposition, - "encoding": config.encoding, - "fieldDelimiter": config.field_delimiter, - "ignoreUnknownValues": config.ignore_unknown_values, - "maxBadRecords": config.max_bad_records, - "quote": config.quote_character, - "skipLeadingRows": str(config.skip_leading_rows), - "writeDisposition": config.write_disposition, - "nullMarker": config.null_marker, - } - }, - } - - do_upload_patch = self._make_do_upload_patch( - client, "_do_resumable_upload", expected_config - ) - with do_upload_patch as do_upload: - client.load_table_from_file( - file_obj, self.TABLE_REF, job_id="job_id", job_config=config - ) - - do_upload.assert_called_once_with( - file_obj, expected_config, _DEFAULT_NUM_RETRIES - ) - - def test_load_table_from_file_multipart(self): - from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES - - client = self._make_client() - file_obj = self._make_file_obj() - file_obj_size = 10 - config = self._make_config() - - do_upload_patch = self._make_do_upload_patch( - client, "_do_multipart_upload", self.EXPECTED_CONFIGURATION - ) - with do_upload_patch as do_upload: - client.load_table_from_file( - file_obj, - self.TABLE_REF, - job_id="job_id", - job_config=config, - size=file_obj_size, - ) - - do_upload.assert_called_once_with( - file_obj, self.EXPECTED_CONFIGURATION, file_obj_size, _DEFAULT_NUM_RETRIES - ) - - def test_load_table_from_file_with_retries(self): - client = self._make_client() - file_obj = self._make_file_obj() - num_retries = 20 - - do_upload_patch = self._make_do_upload_patch( - client, "_do_resumable_upload", self.EXPECTED_CONFIGURATION - ) - with do_upload_patch as do_upload: - client.load_table_from_file( - file_obj, - self.TABLE_REF, - num_retries=num_retries, - job_id="job_id", - job_config=self._make_config(), - ) - - do_upload.assert_called_once_with( - file_obj, self.EXPECTED_CONFIGURATION, num_retries - ) - - def test_load_table_from_file_with_rewind(self): - client = self._make_client() - file_obj = self._make_file_obj() - file_obj.seek(2) - - with self._make_do_upload_patch( - client, "_do_resumable_upload", self.EXPECTED_CONFIGURATION - ): - client.load_table_from_file(file_obj, self.TABLE_REF, rewind=True) - - assert file_obj.tell() == 0 - - def test_load_table_from_file_with_readable_gzip(self): - from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES - - client = self._make_client() - gzip_file = self._make_gzip_file_obj(writable=False) - - do_upload_patch = self._make_do_upload_patch( - client, "_do_resumable_upload", self.EXPECTED_CONFIGURATION - ) - with do_upload_patch as do_upload: - client.load_table_from_file( - gzip_file, - self.TABLE_REF, - job_id="job_id", - job_config=self._make_config(), - ) - - do_upload.assert_called_once_with( - gzip_file, self.EXPECTED_CONFIGURATION, _DEFAULT_NUM_RETRIES - ) - - def test_load_table_from_file_with_writable_gzip(self): - client = self._make_client() - gzip_file = self._make_gzip_file_obj(writable=True) - - with pytest.raises(ValueError): - client.load_table_from_file( - gzip_file, - self.TABLE_REF, - job_id="job_id", - job_config=self._make_config(), - ) - - def test_load_table_from_file_failure(self): - from google.resumable_media import InvalidResponse - from google.cloud import exceptions - - client = self._make_client() - file_obj = self._make_file_obj() - - response = self._make_response( - content="Someone is already in this spot.", status_code=http_client.CONFLICT - ) - - do_upload_patch = self._make_do_upload_patch( - client, "_do_resumable_upload", side_effect=InvalidResponse(response) - ) - - with do_upload_patch, pytest.raises(exceptions.Conflict) as exc_info: - client.load_table_from_file(file_obj, self.TABLE_REF, rewind=True) - - assert response.text in exc_info.value.message - assert exc_info.value.errors == [] - - def test_load_table_from_file_bad_mode(self): - client = self._make_client() - file_obj = mock.Mock(spec=["mode"]) - file_obj.mode = "x" - - with pytest.raises(ValueError): - client.load_table_from_file(file_obj, self.TABLE_REF) - - def test_load_table_from_file_w_invalid_job_config(self): - from google.cloud.bigquery import job - - client = self._make_client() - gzip_file = self._make_gzip_file_obj(writable=True) - config = job.QueryJobConfig() - with pytest.raises(TypeError) as exc: - client.load_table_from_file( - gzip_file, self.TABLE_REF, job_id="job_id", job_config=config - ) - err_msg = str(exc.value) - assert "Expected an instance of LoadJobConfig" in err_msg - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - def test_load_table_from_dataframe(self): - from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES - from google.cloud.bigquery import job - from google.cloud.bigquery.schema import SchemaField - - client = self._make_client() - records = [{"id": 1, "age": 100}, {"id": 2, "age": 60}] - dataframe = pandas.DataFrame(records) - - get_table_patch = mock.patch( - "google.cloud.bigquery.client.Client.get_table", - autospec=True, - return_value=mock.Mock( - schema=[SchemaField("id", "INTEGER"), SchemaField("age", "INTEGER")] - ), - ) - load_patch = mock.patch( - "google.cloud.bigquery.client.Client.load_table_from_file", autospec=True - ) - with load_patch as load_table_from_file, get_table_patch: - client.load_table_from_dataframe(dataframe, self.TABLE_REF) - - load_table_from_file.assert_called_once_with( - client, - mock.ANY, - self.TABLE_REF, - num_retries=_DEFAULT_NUM_RETRIES, - rewind=True, - job_id=mock.ANY, - job_id_prefix=None, - location=None, - project=None, - job_config=mock.ANY, - ) - - sent_file = load_table_from_file.mock_calls[0][1][1] - assert sent_file.closed - - sent_config = load_table_from_file.mock_calls[0][2]["job_config"] - assert sent_config.source_format == job.SourceFormat.PARQUET - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - def test_load_table_from_dataframe_w_client_location(self): - from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES - from google.cloud.bigquery import job - from google.cloud.bigquery.schema import SchemaField - - client = self._make_client(location=self.LOCATION) - records = [{"id": 1, "age": 100}, {"id": 2, "age": 60}] - dataframe = pandas.DataFrame(records) - - get_table_patch = mock.patch( - "google.cloud.bigquery.client.Client.get_table", - autospec=True, - return_value=mock.Mock( - schema=[SchemaField("id", "INTEGER"), SchemaField("age", "INTEGER")] - ), - ) - load_patch = mock.patch( - "google.cloud.bigquery.client.Client.load_table_from_file", autospec=True - ) - with load_patch as load_table_from_file, get_table_patch: - client.load_table_from_dataframe(dataframe, self.TABLE_REF) - - load_table_from_file.assert_called_once_with( - client, - mock.ANY, - self.TABLE_REF, - num_retries=_DEFAULT_NUM_RETRIES, - rewind=True, - job_id=mock.ANY, - job_id_prefix=None, - location=self.LOCATION, - project=None, - job_config=mock.ANY, - ) - - sent_file = load_table_from_file.mock_calls[0][1][1] - assert sent_file.closed - - sent_config = load_table_from_file.mock_calls[0][2]["job_config"] - assert sent_config.source_format == job.SourceFormat.PARQUET - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - def test_load_table_from_dataframe_w_custom_job_config(self): - from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES - from google.cloud.bigquery import job - from google.cloud.bigquery.schema import SchemaField - - client = self._make_client() - records = [{"id": 1, "age": 100}, {"id": 2, "age": 60}] - dataframe = pandas.DataFrame(records) - job_config = job.LoadJobConfig( - write_disposition=job.WriteDisposition.WRITE_TRUNCATE - ) - original_config_copy = copy.deepcopy(job_config) - - get_table_patch = mock.patch( - "google.cloud.bigquery.client.Client.get_table", - autospec=True, - return_value=mock.Mock( - schema=[SchemaField("id", "INTEGER"), SchemaField("age", "INTEGER")] - ), - ) - load_patch = mock.patch( - "google.cloud.bigquery.client.Client.load_table_from_file", autospec=True - ) - with load_patch as load_table_from_file, get_table_patch as get_table: - client.load_table_from_dataframe( - dataframe, self.TABLE_REF, job_config=job_config, location=self.LOCATION - ) - - # no need to fetch and inspect table schema for WRITE_TRUNCATE jobs - assert not get_table.called - - load_table_from_file.assert_called_once_with( - client, - mock.ANY, - self.TABLE_REF, - num_retries=_DEFAULT_NUM_RETRIES, - rewind=True, - job_id=mock.ANY, - job_id_prefix=None, - location=self.LOCATION, - project=None, - job_config=mock.ANY, - ) - - sent_config = load_table_from_file.mock_calls[0][2]["job_config"] - assert sent_config.source_format == job.SourceFormat.PARQUET - assert sent_config.write_disposition == job.WriteDisposition.WRITE_TRUNCATE - - # the original config object should not have been modified - assert job_config.to_api_repr() == original_config_copy.to_api_repr() - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - def test_load_table_from_dataframe_w_automatic_schema(self): - from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES - from google.cloud.bigquery import job - from google.cloud.bigquery.schema import SchemaField - - client = self._make_client() - df_data = collections.OrderedDict( - [ - ("int_col", [1, 2, 3]), - ("float_col", [1.0, 2.0, 3.0]), - ("bool_col", [True, False, True]), - ( - "dt_col", - pandas.Series( - [ - datetime.datetime(2010, 1, 2, 3, 44, 50), - datetime.datetime(2011, 2, 3, 14, 50, 59), - datetime.datetime(2012, 3, 14, 15, 16), - ], - dtype="datetime64[ns]", - ), - ), - ( - "ts_col", - pandas.Series( - [ - datetime.datetime(2010, 1, 2, 3, 44, 50), - datetime.datetime(2011, 2, 3, 14, 50, 59), - datetime.datetime(2012, 3, 14, 15, 16), - ], - dtype="datetime64[ns]", - ).dt.tz_localize(pytz.utc), - ), - ] - ) - dataframe = pandas.DataFrame(df_data, columns=df_data.keys()) - load_patch = mock.patch( - "google.cloud.bigquery.client.Client.load_table_from_file", autospec=True - ) - - get_table_patch = mock.patch( - "google.cloud.bigquery.client.Client.get_table", - autospec=True, - side_effect=google.api_core.exceptions.NotFound("Table not found"), - ) - with load_patch as load_table_from_file, get_table_patch: - client.load_table_from_dataframe( - dataframe, self.TABLE_REF, location=self.LOCATION - ) - - load_table_from_file.assert_called_once_with( - client, - mock.ANY, - self.TABLE_REF, - num_retries=_DEFAULT_NUM_RETRIES, - rewind=True, - job_id=mock.ANY, - job_id_prefix=None, - location=self.LOCATION, - project=None, - job_config=mock.ANY, - ) - - sent_config = load_table_from_file.mock_calls[0][2]["job_config"] - assert sent_config.source_format == job.SourceFormat.PARQUET - assert tuple(sent_config.schema) == ( - SchemaField("int_col", "INTEGER"), - SchemaField("float_col", "FLOAT"), - SchemaField("bool_col", "BOOLEAN"), - SchemaField("dt_col", "TIMESTAMP"), - SchemaField("ts_col", "TIMESTAMP"), - ) - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - def test_load_table_from_dataframe_w_index_and_auto_schema(self): - from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES - from google.cloud.bigquery import job - from google.cloud.bigquery.schema import SchemaField - - client = self._make_client() - df_data = collections.OrderedDict( - [("int_col", [10, 20, 30]), ("float_col", [1.0, 2.0, 3.0])] - ) - dataframe = pandas.DataFrame( - df_data, - index=pandas.Index(name="unique_name", data=["one", "two", "three"]), - ) - - load_patch = mock.patch( - "google.cloud.bigquery.client.Client.load_table_from_file", autospec=True - ) - - get_table_patch = mock.patch( - "google.cloud.bigquery.client.Client.get_table", - autospec=True, - return_value=mock.Mock( - schema=[ - SchemaField("int_col", "INTEGER"), - SchemaField("float_col", "FLOAT"), - SchemaField("unique_name", "STRING"), - ] - ), - ) - with load_patch as load_table_from_file, get_table_patch: - client.load_table_from_dataframe( - dataframe, self.TABLE_REF, location=self.LOCATION - ) - - load_table_from_file.assert_called_once_with( - client, - mock.ANY, - self.TABLE_REF, - num_retries=_DEFAULT_NUM_RETRIES, - rewind=True, - job_id=mock.ANY, - job_id_prefix=None, - location=self.LOCATION, - project=None, - job_config=mock.ANY, - ) - - sent_config = load_table_from_file.mock_calls[0][2]["job_config"] - assert sent_config.source_format == job.SourceFormat.PARQUET - - sent_schema = sorted(sent_config.schema, key=operator.attrgetter("name")) - expected_sent_schema = [ - SchemaField("float_col", "FLOAT"), - SchemaField("int_col", "INTEGER"), - SchemaField("unique_name", "STRING"), - ] - assert sent_schema == expected_sent_schema - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - def test_load_table_from_dataframe_unknown_table(self): - from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES - - client = self._make_client() - records = [{"id": 1, "age": 100}, {"id": 2, "age": 60}] - dataframe = pandas.DataFrame(records) - - get_table_patch = mock.patch( - "google.cloud.bigquery.client.Client.get_table", - autospec=True, - side_effect=google.api_core.exceptions.NotFound("Table not found"), - ) - load_patch = mock.patch( - "google.cloud.bigquery.client.Client.load_table_from_file", autospec=True - ) - with load_patch as load_table_from_file, get_table_patch: - # there should be no error - client.load_table_from_dataframe(dataframe, self.TABLE_REF) - - load_table_from_file.assert_called_once_with( - client, - mock.ANY, - self.TABLE_REF, - num_retries=_DEFAULT_NUM_RETRIES, - rewind=True, - job_id=mock.ANY, - job_id_prefix=None, - location=None, - project=None, - job_config=mock.ANY, - ) - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(fastparquet is None, "Requires `fastparquet`") - def test_load_table_from_dataframe_no_schema_warning_wo_pyarrow(self): - client = self._make_client() - - # Pick at least one column type that translates to Pandas dtype - # "object". A string column matches that. - records = [{"name": "Monty", "age": 100}, {"name": "Python", "age": 60}] - dataframe = pandas.DataFrame(records) - - get_table_patch = mock.patch( - "google.cloud.bigquery.client.Client.get_table", - autospec=True, - side_effect=google.api_core.exceptions.NotFound("Table not found"), - ) - load_patch = mock.patch( - "google.cloud.bigquery.client.Client.load_table_from_file", autospec=True - ) - pyarrow_patch = mock.patch("google.cloud.bigquery.client.pyarrow", None) - pyarrow_patch_helpers = mock.patch( - "google.cloud.bigquery._pandas_helpers.pyarrow", None - ) - catch_warnings = warnings.catch_warnings(record=True) - - with get_table_patch, load_patch, pyarrow_patch, pyarrow_patch_helpers, catch_warnings as warned: - client.load_table_from_dataframe( - dataframe, self.TABLE_REF, location=self.LOCATION - ) - - matches = [ - warning - for warning in warned - if warning.category in (DeprecationWarning, PendingDeprecationWarning) - and "could not be detected" in str(warning) - and "please provide a schema" in str(warning) - ] - assert matches, "A missing schema deprecation warning was not raised." - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - def test_load_table_from_dataframe_struct_fields_error(self): - from google.cloud.bigquery import job - from google.cloud.bigquery.schema import SchemaField - - client = self._make_client() - - records = [{"float_column": 3.14, "struct_column": [{"foo": 1}, {"bar": -1}]}] - dataframe = pandas.DataFrame(data=records) - - schema = [ - SchemaField("float_column", "FLOAT"), - SchemaField( - "agg_col", - "RECORD", - fields=[SchemaField("foo", "INTEGER"), SchemaField("bar", "INTEGER")], - ), - ] - job_config = job.LoadJobConfig(schema=schema) - - load_patch = mock.patch( - "google.cloud.bigquery.client.Client.load_table_from_file", autospec=True - ) - - with pytest.raises(ValueError) as exc_info, load_patch: - client.load_table_from_dataframe( - dataframe, self.TABLE_REF, job_config=job_config, location=self.LOCATION - ) - - err_msg = str(exc_info.value) - assert "struct" in err_msg - assert "not support" in err_msg - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - def test_load_table_from_dataframe_w_partial_schema(self): - from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES - from google.cloud.bigquery import job - from google.cloud.bigquery.schema import SchemaField - - client = self._make_client() - df_data = collections.OrderedDict( - [ - ("int_col", [1, 2, 3]), - ("int_as_float_col", [1.0, float("nan"), 3.0]), - ("float_col", [1.0, 2.0, 3.0]), - ("bool_col", [True, False, True]), - ( - "dt_col", - pandas.Series( - [ - datetime.datetime(2010, 1, 2, 3, 44, 50), - datetime.datetime(2011, 2, 3, 14, 50, 59), - datetime.datetime(2012, 3, 14, 15, 16), - ], - dtype="datetime64[ns]", - ), - ), - ( - "ts_col", - pandas.Series( - [ - datetime.datetime(2010, 1, 2, 3, 44, 50), - datetime.datetime(2011, 2, 3, 14, 50, 59), - datetime.datetime(2012, 3, 14, 15, 16), - ], - dtype="datetime64[ns]", - ).dt.tz_localize(pytz.utc), - ), - ("string_col", [u"abc", None, u"def"]), - ("bytes_col", [b"abc", b"def", None]), - ] - ) - dataframe = pandas.DataFrame(df_data, columns=df_data.keys()) - load_patch = mock.patch( - "google.cloud.bigquery.client.Client.load_table_from_file", autospec=True - ) - - schema = ( - SchemaField("int_as_float_col", "INTEGER"), - SchemaField("string_col", "STRING"), - SchemaField("bytes_col", "BYTES"), - ) - job_config = job.LoadJobConfig(schema=schema) - with load_patch as load_table_from_file: - client.load_table_from_dataframe( - dataframe, self.TABLE_REF, job_config=job_config, location=self.LOCATION - ) - - load_table_from_file.assert_called_once_with( - client, - mock.ANY, - self.TABLE_REF, - num_retries=_DEFAULT_NUM_RETRIES, - rewind=True, - job_id=mock.ANY, - job_id_prefix=None, - location=self.LOCATION, - project=None, - job_config=mock.ANY, - ) - - sent_config = load_table_from_file.mock_calls[0][2]["job_config"] - assert sent_config.source_format == job.SourceFormat.PARQUET - assert tuple(sent_config.schema) == ( - SchemaField("int_col", "INTEGER"), - SchemaField("int_as_float_col", "INTEGER"), - SchemaField("float_col", "FLOAT"), - SchemaField("bool_col", "BOOLEAN"), - SchemaField("dt_col", "TIMESTAMP"), - SchemaField("ts_col", "TIMESTAMP"), - SchemaField("string_col", "STRING"), - SchemaField("bytes_col", "BYTES"), - ) - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - def test_load_table_from_dataframe_w_partial_schema_extra_types(self): - from google.cloud.bigquery import job - from google.cloud.bigquery.schema import SchemaField - - client = self._make_client() - df_data = collections.OrderedDict( - [ - ("int_col", [1, 2, 3]), - ("int_as_float_col", [1.0, float("nan"), 3.0]), - ("string_col", [u"abc", None, u"def"]), - ] - ) - dataframe = pandas.DataFrame(df_data, columns=df_data.keys()) - load_patch = mock.patch( - "google.cloud.bigquery.client.Client.load_table_from_file", autospec=True - ) - - schema = ( - SchemaField("int_as_float_col", "INTEGER"), - SchemaField("string_col", "STRING"), - SchemaField("unknown_col", "BYTES"), - ) - job_config = job.LoadJobConfig(schema=schema) - with load_patch as load_table_from_file, pytest.raises( - ValueError - ) as exc_context: - client.load_table_from_dataframe( - dataframe, self.TABLE_REF, job_config=job_config, location=self.LOCATION - ) - - load_table_from_file.assert_not_called() - message = str(exc_context.value) - assert "bq_schema contains fields not present in dataframe" in message - assert "unknown_col" in message - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(fastparquet is None, "Requires `fastparquet`") - def test_load_table_from_dataframe_w_partial_schema_missing_types(self): - from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES - from google.cloud.bigquery import job - from google.cloud.bigquery.schema import SchemaField - - client = self._make_client() - df_data = collections.OrderedDict( - [ - ("string_col", [u"abc", u"def", u"ghi"]), - ("unknown_col", [b"jkl", None, b"mno"]), - ] - ) - dataframe = pandas.DataFrame(df_data, columns=df_data.keys()) - load_patch = mock.patch( - "google.cloud.bigquery.client.Client.load_table_from_file", autospec=True - ) - pyarrow_patch = mock.patch( - "google.cloud.bigquery._pandas_helpers.pyarrow", None - ) - - schema = (SchemaField("string_col", "STRING"),) - job_config = job.LoadJobConfig(schema=schema) - with pyarrow_patch, load_patch as load_table_from_file, warnings.catch_warnings( - record=True - ) as warned: - client.load_table_from_dataframe( - dataframe, self.TABLE_REF, job_config=job_config, location=self.LOCATION - ) - - load_table_from_file.assert_called_once_with( - client, - mock.ANY, - self.TABLE_REF, - num_retries=_DEFAULT_NUM_RETRIES, - rewind=True, - job_id=mock.ANY, - job_id_prefix=None, - location=self.LOCATION, - project=None, - job_config=mock.ANY, - ) - - assert warned # there should be at least one warning - unknown_col_warnings = [ - warning for warning in warned if "unknown_col" in str(warning) - ] - assert unknown_col_warnings - assert unknown_col_warnings[0].category == UserWarning - - sent_config = load_table_from_file.mock_calls[0][2]["job_config"] - assert sent_config.source_format == job.SourceFormat.PARQUET - assert sent_config.schema is None - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - def test_load_table_from_dataframe_w_schema_wo_pyarrow(self): - from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES - from google.cloud.bigquery import job - from google.cloud.bigquery.schema import SchemaField - - client = self._make_client() - records = [{"name": u"Monty", "age": 100}, {"name": u"Python", "age": 60}] - dataframe = pandas.DataFrame(records, columns=["name", "age"]) - schema = (SchemaField("name", "STRING"), SchemaField("age", "INTEGER")) - job_config = job.LoadJobConfig(schema=schema) - - load_patch = mock.patch( - "google.cloud.bigquery.client.Client.load_table_from_file", autospec=True - ) - pyarrow_patch = mock.patch("google.cloud.bigquery.client.pyarrow", None) - - with load_patch as load_table_from_file, pyarrow_patch, warnings.catch_warnings( - record=True - ) as warned: - client.load_table_from_dataframe( - dataframe, self.TABLE_REF, job_config=job_config, location=self.LOCATION - ) - - assert warned # there should be at least one warning - for warning in warned: - assert "pyarrow" in str(warning) - assert warning.category in (DeprecationWarning, PendingDeprecationWarning) - - load_table_from_file.assert_called_once_with( - client, - mock.ANY, - self.TABLE_REF, - num_retries=_DEFAULT_NUM_RETRIES, - rewind=True, - job_id=mock.ANY, - job_id_prefix=None, - location=self.LOCATION, - project=None, - job_config=mock.ANY, - ) - - sent_config = load_table_from_file.mock_calls[0][2]["job_config"] - assert sent_config.source_format == job.SourceFormat.PARQUET - assert tuple(sent_config.schema) == schema - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - def test_load_table_from_dataframe_w_schema_arrow_custom_compression(self): - from google.cloud.bigquery import job - from google.cloud.bigquery.schema import SchemaField - - client = self._make_client() - records = [{"name": u"Monty", "age": 100}, {"name": u"Python", "age": 60}] - dataframe = pandas.DataFrame(records) - schema = (SchemaField("name", "STRING"), SchemaField("age", "INTEGER")) - job_config = job.LoadJobConfig(schema=schema) - - load_patch = mock.patch( - "google.cloud.bigquery.client.Client.load_table_from_file", autospec=True - ) - to_parquet_patch = mock.patch( - "google.cloud.bigquery.client._pandas_helpers.dataframe_to_parquet", - autospec=True, - ) - - with load_patch, to_parquet_patch as fake_to_parquet: - client.load_table_from_dataframe( - dataframe, - self.TABLE_REF, - job_config=job_config, - location=self.LOCATION, - parquet_compression="LZ4", - ) - - call_args = fake_to_parquet.call_args - assert call_args is not None - assert call_args.kwargs.get("parquet_compression") == "LZ4" - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - def test_load_table_from_dataframe_wo_pyarrow_custom_compression(self): - client = self._make_client() - records = [{"id": 1, "age": 100}, {"id": 2, "age": 60}] - dataframe = pandas.DataFrame(records) - - get_table_patch = mock.patch( - "google.cloud.bigquery.client.Client.get_table", - autospec=True, - side_effect=google.api_core.exceptions.NotFound("Table not found"), - ) - load_patch = mock.patch( - "google.cloud.bigquery.client.Client.load_table_from_file", autospec=True - ) - pyarrow_patch = mock.patch("google.cloud.bigquery.client.pyarrow", None) - to_parquet_patch = mock.patch.object( - dataframe, "to_parquet", wraps=dataframe.to_parquet - ) - - with load_patch, get_table_patch, pyarrow_patch, to_parquet_patch as to_parquet_spy: - client.load_table_from_dataframe( - dataframe, - self.TABLE_REF, - location=self.LOCATION, - parquet_compression="gzip", - ) - - call_args = to_parquet_spy.call_args - assert call_args is not None - assert call_args.kwargs.get("compression") == "gzip" - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - def test_load_table_from_dataframe_w_nulls(self): - """Test that a DataFrame with null columns can be uploaded if a - BigQuery schema is specified. - - See: https://github.com/googleapis/google-cloud-python/issues/7370 - """ - from google.cloud.bigquery.schema import SchemaField - from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES - from google.cloud.bigquery import job - - client = self._make_client() - records = [{"name": None, "age": None}, {"name": None, "age": None}] - dataframe = pandas.DataFrame(records, columns=["name", "age"]) - schema = [SchemaField("name", "STRING"), SchemaField("age", "INTEGER")] - job_config = job.LoadJobConfig(schema=schema) - - load_patch = mock.patch( - "google.cloud.bigquery.client.Client.load_table_from_file", autospec=True - ) - with load_patch as load_table_from_file: - client.load_table_from_dataframe( - dataframe, self.TABLE_REF, job_config=job_config, location=self.LOCATION - ) - - load_table_from_file.assert_called_once_with( - client, - mock.ANY, - self.TABLE_REF, - num_retries=_DEFAULT_NUM_RETRIES, - rewind=True, - job_id=mock.ANY, - job_id_prefix=None, - location=self.LOCATION, - project=None, - job_config=mock.ANY, - ) - - sent_config = load_table_from_file.mock_calls[0][2]["job_config"] - assert sent_config.schema == schema - assert sent_config.source_format == job.SourceFormat.PARQUET - - @unittest.skipIf(pandas is None, "Requires `pandas`") - def test_load_table_from_dataframe_w_invaild_job_config(self): - from google.cloud.bigquery import job - - client = self._make_client() - - records = [{"float_column": 3.14, "struct_column": [{"foo": 1}, {"bar": -1}]}] - dataframe = pandas.DataFrame(data=records) - job_config = job.CopyJobConfig() - - with pytest.raises(TypeError) as exc: - client.load_table_from_dataframe( - dataframe, self.TABLE_REF, job_config=job_config, location=self.LOCATION - ) - - err_msg = str(exc.value) - assert "Expected an instance of LoadJobConfig" in err_msg - - def test_load_table_from_json_basic_use(self): - from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES - from google.cloud.bigquery import job - - client = self._make_client() - - json_rows = [ - {"name": "One", "age": 11, "birthday": "2008-09-10", "adult": False}, - {"name": "Two", "age": 22, "birthday": "1997-08-09", "adult": True}, - ] - - load_patch = mock.patch( - "google.cloud.bigquery.client.Client.load_table_from_file", autospec=True - ) - - with load_patch as load_table_from_file: - client.load_table_from_json(json_rows, self.TABLE_REF) - - load_table_from_file.assert_called_once_with( - client, - mock.ANY, - self.TABLE_REF, - num_retries=_DEFAULT_NUM_RETRIES, - job_id=mock.ANY, - job_id_prefix=None, - location=client.location, - project=client.project, - job_config=mock.ANY, - ) - - sent_config = load_table_from_file.mock_calls[0][2]["job_config"] - assert sent_config.source_format == job.SourceFormat.NEWLINE_DELIMITED_JSON - assert sent_config.schema is None - assert sent_config.autodetect - - def test_load_table_from_json_non_default_args(self): - from google.cloud.bigquery import job - from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES - from google.cloud.bigquery.schema import SchemaField - - client = self._make_client() - - json_rows = [ - {"name": "One", "age": 11, "birthday": "2008-09-10", "adult": False}, - {"name": "Two", "age": 22, "birthday": "1997-08-09", "adult": True}, - ] - - schema = [ - SchemaField("name", "STRING"), - SchemaField("age", "INTEGER"), - SchemaField("adult", "BOOLEAN"), - ] - job_config = job.LoadJobConfig(schema=schema) - job_config._properties["load"]["unknown_field"] = "foobar" - original_config_copy = copy.deepcopy(job_config) - - load_patch = mock.patch( - "google.cloud.bigquery.client.Client.load_table_from_file", autospec=True - ) - - with load_patch as load_table_from_file: - client.load_table_from_json( - json_rows, - self.TABLE_REF, - job_config=job_config, - project="project-x", - location="EU", - ) - - load_table_from_file.assert_called_once_with( - client, - mock.ANY, - self.TABLE_REF, - num_retries=_DEFAULT_NUM_RETRIES, - job_id=mock.ANY, - job_id_prefix=None, - location="EU", - project="project-x", - job_config=mock.ANY, - ) - - sent_config = load_table_from_file.mock_calls[0][2]["job_config"] - assert sent_config.source_format == job.SourceFormat.NEWLINE_DELIMITED_JSON - assert sent_config.schema == schema - assert not sent_config.autodetect - # all properties should have been cloned and sent to the backend - assert sent_config._properties.get("load", {}).get("unknown_field") == "foobar" - - # the original config object should not have been modified - assert job_config.to_api_repr() == original_config_copy.to_api_repr() - - def test_load_table_from_json_w_invalid_job_config(self): - from google.cloud.bigquery import job - - client = self._make_client() - json_rows = [ - {"name": "One", "age": 11, "birthday": "2008-09-10", "adult": False}, - {"name": "Two", "age": 22, "birthday": "1997-08-09", "adult": True}, - ] - job_config = job.CopyJobConfig() - with pytest.raises(TypeError) as exc: - client.load_table_from_json( - json_rows, - self.TABLE_REF, - job_config=job_config, - project="project-x", - location="EU", - ) - err_msg = str(exc.value) - assert "Expected an instance of LoadJobConfig" in err_msg - - # Low-level tests - - @classmethod - def _make_resumable_upload_responses(cls, size): - """Make a series of responses for a successful resumable upload.""" - from google import resumable_media - - resumable_url = "http://test.invalid?upload_id=and-then-there-was-1" - initial_response = cls._make_response( - http_client.OK, "", {"location": resumable_url} - ) - data_response = cls._make_response( - resumable_media.PERMANENT_REDIRECT, - "", - {"range": "bytes=0-{:d}".format(size - 1)}, - ) - final_response = cls._make_response( - http_client.OK, - json.dumps({"size": size}), - {"Content-Type": "application/json"}, - ) - return [initial_response, data_response, final_response] - - @staticmethod - def _make_transport(responses=None): - import google.auth.transport.requests - - transport = mock.create_autospec( - google.auth.transport.requests.AuthorizedSession, instance=True - ) - transport.request.side_effect = responses - return transport - - def test__do_resumable_upload(self): - file_obj = self._make_file_obj() - file_obj_len = len(file_obj.getvalue()) - transport = self._make_transport( - self._make_resumable_upload_responses(file_obj_len) - ) - client = self._make_client(transport) - - result = client._do_resumable_upload( - file_obj, self.EXPECTED_CONFIGURATION, None - ) - - content = result.content.decode("utf-8") - assert json.loads(content) == {"size": file_obj_len} - - # Verify that configuration data was passed in with the initial - # request. - transport.request.assert_any_call( - "POST", - mock.ANY, - data=json.dumps(self.EXPECTED_CONFIGURATION).encode("utf-8"), - headers=mock.ANY, - timeout=mock.ANY, - ) - - def test__do_multipart_upload(self): - transport = self._make_transport([self._make_response(http_client.OK)]) - client = self._make_client(transport) - file_obj = self._make_file_obj() - file_obj_len = len(file_obj.getvalue()) - - client._do_multipart_upload( - file_obj, self.EXPECTED_CONFIGURATION, file_obj_len, None - ) - - # Verify that configuration data was passed in with the initial - # request. - request_args = transport.request.mock_calls[0][2] - request_data = request_args["data"].decode("utf-8") - request_headers = request_args["headers"] - - request_content = email.message_from_string( - "Content-Type: {}\r\n{}".format( - request_headers["content-type"].decode("utf-8"), request_data - ) - ) - - # There should be two payloads: the configuration and the binary daya. - configuration_data = request_content.get_payload(0).get_payload() - binary_data = request_content.get_payload(1).get_payload() - - assert json.loads(configuration_data) == self.EXPECTED_CONFIGURATION - assert binary_data.encode("utf-8") == file_obj.getvalue() - - def test__do_multipart_upload_wrong_size(self): - client = self._make_client() - file_obj = self._make_file_obj() - file_obj_len = len(file_obj.getvalue()) - - with pytest.raises(ValueError): - client._do_multipart_upload(file_obj, {}, file_obj_len + 1, None) - - def test_schema_from_json_with_file_path(self): - from google.cloud.bigquery.schema import SchemaField - - file_content = """[ - { - "description": "quarter", - "mode": "REQUIRED", - "name": "qtr", - "type": "STRING" - }, - { - "description": "sales representative", - "mode": "NULLABLE", - "name": "rep", - "type": "STRING" - }, - { - "description": "total sales", - "mode": "NULLABLE", - "name": "sales", - "type": "FLOAT" - } - ]""" - - expected = [ - SchemaField("qtr", "STRING", "REQUIRED", "quarter"), - SchemaField("rep", "STRING", "NULLABLE", "sales representative"), - SchemaField("sales", "FLOAT", "NULLABLE", "total sales"), - ] - - client = self._make_client() - mock_file_path = "/mocked/file.json" - - if six.PY2: - open_patch = mock.patch( - "__builtin__.open", mock.mock_open(read_data=file_content) - ) - else: - open_patch = mock.patch( - "builtins.open", new=mock.mock_open(read_data=file_content) - ) - - with open_patch as _mock_file: - actual = client.schema_from_json(mock_file_path) - _mock_file.assert_called_once_with(mock_file_path) - # This assert is to make sure __exit__ is called in the context - # manager that opens the file in the function - _mock_file().__exit__.assert_called_once() - - assert expected == actual - - def test_schema_from_json_with_file_object(self): - from google.cloud.bigquery.schema import SchemaField - - file_content = """[ - { - "description": "quarter", - "mode": "REQUIRED", - "name": "qtr", - "type": "STRING" - }, - { - "description": "sales representative", - "mode": "NULLABLE", - "name": "rep", - "type": "STRING" - }, - { - "description": "total sales", - "mode": "NULLABLE", - "name": "sales", - "type": "FLOAT" - } - ]""" - - expected = [ - SchemaField("qtr", "STRING", "REQUIRED", "quarter"), - SchemaField("rep", "STRING", "NULLABLE", "sales representative"), - SchemaField("sales", "FLOAT", "NULLABLE", "total sales"), - ] - - client = self._make_client() - - if six.PY2: - fake_file = io.BytesIO(file_content) - else: - fake_file = io.StringIO(file_content) - - actual = client.schema_from_json(fake_file) - - assert expected == actual - - def test_schema_to_json_with_file_path(self): - from google.cloud.bigquery.schema import SchemaField - - file_content = [ - { - "description": "quarter", - "mode": "REQUIRED", - "name": "qtr", - "type": "STRING", - }, - { - "description": "sales representative", - "mode": "NULLABLE", - "name": "rep", - "type": "STRING", - }, - { - "description": "total sales", - "mode": "NULLABLE", - "name": "sales", - "type": "FLOAT", - }, - ] - - schema_list = [ - SchemaField("qtr", "STRING", "REQUIRED", "quarter"), - SchemaField("rep", "STRING", "NULLABLE", "sales representative"), - SchemaField("sales", "FLOAT", "NULLABLE", "total sales"), - ] - - client = self._make_client() - mock_file_path = "/mocked/file.json" - - if six.PY2: - open_patch = mock.patch("__builtin__.open", mock.mock_open()) - else: - open_patch = mock.patch("builtins.open", mock.mock_open()) - - with open_patch as mock_file, mock.patch("json.dump") as mock_dump: - client.schema_to_json(schema_list, mock_file_path) - mock_file.assert_called_once_with(mock_file_path, mode="w") - # This assert is to make sure __exit__ is called in the context - # manager that opens the file in the function - mock_file().__exit__.assert_called_once() - mock_dump.assert_called_with( - file_content, mock_file.return_value, indent=2, sort_keys=True - ) - - def test_schema_to_json_with_file_object(self): - from google.cloud.bigquery.schema import SchemaField - - file_content = [ - { - "description": "quarter", - "mode": "REQUIRED", - "name": "qtr", - "type": "STRING", - }, - { - "description": "sales representative", - "mode": "NULLABLE", - "name": "rep", - "type": "STRING", - }, - { - "description": "total sales", - "mode": "NULLABLE", - "name": "sales", - "type": "FLOAT", - }, - ] - - schema_list = [ - SchemaField("qtr", "STRING", "REQUIRED", "quarter"), - SchemaField("rep", "STRING", "NULLABLE", "sales representative"), - SchemaField("sales", "FLOAT", "NULLABLE", "total sales"), - ] - - if six.PY2: - fake_file = io.BytesIO() - else: - fake_file = io.StringIO() - - client = self._make_client() - - client.schema_to_json(schema_list, fake_file) - assert file_content == json.loads(fake_file.getvalue()) diff --git a/bigquery/tests/unit/test_dataset.py b/bigquery/tests/unit/test_dataset.py deleted file mode 100644 index ac13e00932ba..000000000000 --- a/bigquery/tests/unit/test_dataset.py +++ /dev/null @@ -1,704 +0,0 @@ -# Copyright 2015 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock -import pytest - - -class TestAccessEntry(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigquery.dataset import AccessEntry - - return AccessEntry - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_defaults(self): - entry = self._make_one("OWNER", "userByEmail", "phred@example.com") - self.assertEqual(entry.role, "OWNER") - self.assertEqual(entry.entity_type, "userByEmail") - self.assertEqual(entry.entity_id, "phred@example.com") - - def test_ctor_bad_entity_type(self): - with self.assertRaises(ValueError): - self._make_one(None, "unknown", None) - - def test_ctor_view_with_role(self): - role = "READER" - entity_type = "view" - with self.assertRaises(ValueError): - self._make_one(role, entity_type, None) - - def test_ctor_view_success(self): - role = None - entity_type = "view" - entity_id = object() - entry = self._make_one(role, entity_type, entity_id) - self.assertEqual(entry.role, role) - self.assertEqual(entry.entity_type, entity_type) - self.assertEqual(entry.entity_id, entity_id) - - def test_ctor_nonview_without_role(self): - role = None - entity_type = "userByEmail" - with self.assertRaises(ValueError): - self._make_one(role, entity_type, None) - - def test___eq___role_mismatch(self): - entry = self._make_one("OWNER", "userByEmail", "phred@example.com") - other = self._make_one("WRITER", "userByEmail", "phred@example.com") - self.assertNotEqual(entry, other) - - def test___eq___entity_type_mismatch(self): - entry = self._make_one("OWNER", "userByEmail", "phred@example.com") - other = self._make_one("OWNER", "groupByEmail", "phred@example.com") - self.assertNotEqual(entry, other) - - def test___eq___entity_id_mismatch(self): - entry = self._make_one("OWNER", "userByEmail", "phred@example.com") - other = self._make_one("OWNER", "userByEmail", "bharney@example.com") - self.assertNotEqual(entry, other) - - def test___eq___hit(self): - entry = self._make_one("OWNER", "userByEmail", "phred@example.com") - other = self._make_one("OWNER", "userByEmail", "phred@example.com") - self.assertEqual(entry, other) - - def test__eq___type_mismatch(self): - entry = self._make_one("OWNER", "userByEmail", "silly@example.com") - self.assertNotEqual(entry, object()) - self.assertEqual(entry, mock.ANY) - - def test_to_api_repr(self): - entry = self._make_one("OWNER", "userByEmail", "salmon@example.com") - resource = entry.to_api_repr() - exp_resource = {"role": "OWNER", "userByEmail": "salmon@example.com"} - self.assertEqual(resource, exp_resource) - - def test_to_api_repr_view(self): - view = { - "projectId": "my-project", - "datasetId": "my_dataset", - "tableId": "my_table", - } - entry = self._make_one(None, "view", view) - resource = entry.to_api_repr() - exp_resource = {"view": view} - self.assertEqual(resource, exp_resource) - - def test_from_api_repr(self): - resource = {"role": "OWNER", "userByEmail": "salmon@example.com"} - entry = self._get_target_class().from_api_repr(resource) - self.assertEqual(entry.role, "OWNER") - self.assertEqual(entry.entity_type, "userByEmail") - self.assertEqual(entry.entity_id, "salmon@example.com") - - def test_from_api_repr_w_unknown_entity_type(self): - resource = {"role": "READER", "unknown": "UNKNOWN"} - with self.assertRaises(ValueError): - self._get_target_class().from_api_repr(resource) - - def test_from_api_repr_entries_w_extra_keys(self): - resource = { - "role": "READER", - "specialGroup": "projectReaders", - "userByEmail": "salmon@example.com", - } - with self.assertRaises(ValueError): - self._get_target_class().from_api_repr(resource) - - -class TestDatasetReference(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigquery.dataset import DatasetReference - - return DatasetReference - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_defaults(self): - dataset_ref = self._make_one("some-project-1", "dataset_1") - self.assertEqual(dataset_ref.project, "some-project-1") - self.assertEqual(dataset_ref.dataset_id, "dataset_1") - - def test_ctor_bad_args(self): - with self.assertRaises(ValueError): - self._make_one(1, "d") - with self.assertRaises(ValueError): - self._make_one("p", 2) - - def test_table(self): - dataset_ref = self._make_one("some-project-1", "dataset_1") - table_ref = dataset_ref.table("table_1") - self.assertEqual(table_ref.dataset_id, "dataset_1") - self.assertEqual(table_ref.project, "some-project-1") - self.assertEqual(table_ref.table_id, "table_1") - - def test_model(self): - dataset_ref = self._make_one("some-project-1", "dataset_1") - model_ref = dataset_ref.model("model_1") - self.assertEqual(model_ref.project, "some-project-1") - self.assertEqual(model_ref.dataset_id, "dataset_1") - self.assertEqual(model_ref.model_id, "model_1") - - def test_routine(self): - dataset_ref = self._make_one("some-project-1", "dataset_1") - routine_ref = dataset_ref.routine("routine_1") - self.assertEqual(routine_ref.project, "some-project-1") - self.assertEqual(routine_ref.dataset_id, "dataset_1") - self.assertEqual(routine_ref.routine_id, "routine_1") - - def test_to_api_repr(self): - dataset = self._make_one("project_1", "dataset_1") - - resource = dataset.to_api_repr() - - self.assertEqual(resource, {"projectId": "project_1", "datasetId": "dataset_1"}) - - def test_from_api_repr(self): - cls = self._get_target_class() - expected = self._make_one("project_1", "dataset_1") - - got = cls.from_api_repr({"projectId": "project_1", "datasetId": "dataset_1"}) - - self.assertEqual(expected, got) - - def test_from_string(self): - cls = self._get_target_class() - got = cls.from_string("string-project.string_dataset") - self.assertEqual(got.project, "string-project") - self.assertEqual(got.dataset_id, "string_dataset") - - def test_from_string_w_prefix(self): - cls = self._get_target_class() - got = cls.from_string("google.com:string-project.string_dataset") - self.assertEqual(got.project, "google.com:string-project") - self.assertEqual(got.dataset_id, "string_dataset") - - def test_from_string_legacy_string(self): - cls = self._get_target_class() - with self.assertRaises(ValueError): - cls.from_string("string-project:string_dataset") - - def test_from_string_w_incorrect_prefix(self): - cls = self._get_target_class() - with self.assertRaises(ValueError): - cls.from_string("google.com.string-project.dataset_id") - - def test_from_string_w_prefix_and_too_many_parts(self): - cls = self._get_target_class() - with self.assertRaises(ValueError): - cls.from_string("google.com:string-project.dataset_id.table_id") - - def test_from_string_not_fully_qualified(self): - cls = self._get_target_class() - with self.assertRaises(ValueError): - cls.from_string("string_dataset") - with self.assertRaises(ValueError): - cls.from_string("a.b.c") - - def test_from_string_with_default_project(self): - cls = self._get_target_class() - got = cls.from_string("string_dataset", default_project="default-project") - self.assertEqual(got.project, "default-project") - self.assertEqual(got.dataset_id, "string_dataset") - - def test_from_string_ignores_default_project(self): - cls = self._get_target_class() - got = cls.from_string( - "string-project.string_dataset", default_project="default-project" - ) - self.assertEqual(got.project, "string-project") - self.assertEqual(got.dataset_id, "string_dataset") - - def test___eq___wrong_type(self): - dataset = self._make_one("project_1", "dataset_1") - other = object() - self.assertNotEqual(dataset, other) - self.assertEqual(dataset, mock.ANY) - - def test___eq___project_mismatch(self): - dataset = self._make_one("project_1", "dataset_1") - other = self._make_one("project_2", "dataset_1") - self.assertNotEqual(dataset, other) - - def test___eq___dataset_mismatch(self): - dataset = self._make_one("project_1", "dataset_1") - other = self._make_one("project_1", "dataset_2") - self.assertNotEqual(dataset, other) - - def test___eq___equality(self): - dataset = self._make_one("project_1", "dataset_1") - other = self._make_one("project_1", "dataset_1") - self.assertEqual(dataset, other) - - def test___hash__set_equality(self): - dataset1 = self._make_one("project_1", "dataset_1") - dataset2 = self._make_one("project_1", "dataset_2") - set_one = {dataset1, dataset2} - set_two = {dataset1, dataset2} - self.assertEqual(set_one, set_two) - - def test___hash__not_equals(self): - dataset1 = self._make_one("project_1", "dataset_1") - dataset2 = self._make_one("project_1", "dataset_2") - set_one = {dataset1} - set_two = {dataset2} - self.assertNotEqual(set_one, set_two) - - def test___repr__(self): - dataset = self._make_one("project1", "dataset1") - expected = "DatasetReference('project1', 'dataset1')" - self.assertEqual(repr(dataset), expected) - - -class TestDataset(unittest.TestCase): - from google.cloud.bigquery.dataset import DatasetReference - - PROJECT = "project" - DS_ID = "dataset-id" - DS_REF = DatasetReference(PROJECT, DS_ID) - KMS_KEY_NAME = "projects/1/locations/us/keyRings/1/cryptoKeys/1" - - @staticmethod - def _get_target_class(): - from google.cloud.bigquery.dataset import Dataset - - return Dataset - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def _setUpConstants(self): - import datetime - from google.cloud._helpers import UTC - - self.WHEN_TS = 1437767599.006 - self.WHEN = datetime.datetime.utcfromtimestamp(self.WHEN_TS).replace(tzinfo=UTC) - self.ETAG = "ETAG" - self.DS_FULL_ID = "%s:%s" % (self.PROJECT, self.DS_ID) - self.RESOURCE_URL = "http://example.com/path/to/resource" - - def _make_resource(self): - self._setUpConstants() - USER_EMAIL = "phred@example.com" - GROUP_EMAIL = "group-name@lists.example.com" - return { - "creationTime": self.WHEN_TS * 1000, - "datasetReference": {"projectId": self.PROJECT, "datasetId": self.DS_ID}, - "etag": self.ETAG, - "id": self.DS_FULL_ID, - "lastModifiedTime": self.WHEN_TS * 1000, - "location": "US", - "selfLink": self.RESOURCE_URL, - "defaultTableExpirationMs": 3600, - "access": [ - {"role": "OWNER", "userByEmail": USER_EMAIL}, - {"role": "OWNER", "groupByEmail": GROUP_EMAIL}, - {"role": "WRITER", "specialGroup": "projectWriters"}, - {"role": "READER", "specialGroup": "projectReaders"}, - ], - "defaultEncryptionConfiguration": {"kmsKeyName": self.KMS_KEY_NAME}, - } - - def _verify_access_entry(self, access_entries, resource): - r_entries = [] - for r_entry in resource["access"]: - role = r_entry.pop("role") - for entity_type, entity_id in sorted(r_entry.items()): - r_entries.append( - {"role": role, "entity_type": entity_type, "entity_id": entity_id} - ) - - self.assertEqual(len(access_entries), len(r_entries)) - for a_entry, r_entry in zip(access_entries, r_entries): - self.assertEqual(a_entry.role, r_entry["role"]) - self.assertEqual(a_entry.entity_type, r_entry["entity_type"]) - self.assertEqual(a_entry.entity_id, r_entry["entity_id"]) - - def _verify_readonly_resource_properties(self, dataset, resource): - - self.assertEqual(dataset.project, self.PROJECT) - self.assertEqual(dataset.dataset_id, self.DS_ID) - self.assertEqual(dataset.reference.project, self.PROJECT) - self.assertEqual(dataset.reference.dataset_id, self.DS_ID) - - if "creationTime" in resource: - self.assertEqual(dataset.created, self.WHEN) - else: - self.assertIsNone(dataset.created) - if "etag" in resource: - self.assertEqual(dataset.etag, self.ETAG) - else: - self.assertIsNone(dataset.etag) - if "lastModifiedTime" in resource: - self.assertEqual(dataset.modified, self.WHEN) - else: - self.assertIsNone(dataset.modified) - if "selfLink" in resource: - self.assertEqual(dataset.self_link, self.RESOURCE_URL) - else: - self.assertIsNone(dataset.self_link) - - def _verify_resource_properties(self, dataset, resource): - - self._verify_readonly_resource_properties(dataset, resource) - - if "defaultTableExpirationMs" in resource: - self.assertEqual( - dataset.default_table_expiration_ms, - int(resource.get("defaultTableExpirationMs")), - ) - else: - self.assertIsNone(dataset.default_table_expiration_ms) - self.assertEqual(dataset.description, resource.get("description")) - self.assertEqual(dataset.friendly_name, resource.get("friendlyName")) - self.assertEqual(dataset.location, resource.get("location")) - if "defaultEncryptionConfiguration" in resource: - self.assertEqual( - dataset.default_encryption_configuration.kms_key_name, - resource.get("defaultEncryptionConfiguration")["kmsKeyName"], - ) - else: - self.assertIsNone(dataset.default_encryption_configuration) - - if "access" in resource: - self._verify_access_entry(dataset.access_entries, resource) - else: - self.assertEqual(dataset.access_entries, []) - - def test_ctor_defaults(self): - dataset = self._make_one(self.DS_REF) - self.assertEqual(dataset.dataset_id, self.DS_ID) - self.assertEqual(dataset.project, self.PROJECT) - self.assertEqual( - dataset.path, "/projects/%s/datasets/%s" % (self.PROJECT, self.DS_ID) - ) - self.assertEqual(dataset.access_entries, []) - - self.assertIsNone(dataset.created) - self.assertIsNone(dataset.full_dataset_id) - self.assertIsNone(dataset.etag) - self.assertIsNone(dataset.modified) - self.assertIsNone(dataset.self_link) - - self.assertIsNone(dataset.default_table_expiration_ms) - self.assertIsNone(dataset.description) - self.assertIsNone(dataset.friendly_name) - self.assertIsNone(dataset.location) - - def test_ctor_string(self): - dataset = self._make_one("some-project.some_dset") - self.assertEqual(dataset.project, "some-project") - self.assertEqual(dataset.dataset_id, "some_dset") - - def test_ctor_string_wo_project_id(self): - with pytest.raises(ValueError): - # Project ID is missing. - self._make_one("some_dset") - - def test_ctor_explicit(self): - from google.cloud.bigquery.dataset import DatasetReference, AccessEntry - - phred = AccessEntry("OWNER", "userByEmail", "phred@example.com") - bharney = AccessEntry("OWNER", "userByEmail", "bharney@example.com") - entries = [phred, bharney] - OTHER_PROJECT = "foo-bar-123" - dataset = self._make_one(DatasetReference(OTHER_PROJECT, self.DS_ID)) - dataset.access_entries = entries - self.assertEqual(dataset.dataset_id, self.DS_ID) - self.assertEqual(dataset.project, OTHER_PROJECT) - self.assertEqual( - dataset.path, "/projects/%s/datasets/%s" % (OTHER_PROJECT, self.DS_ID) - ) - self.assertEqual(dataset.access_entries, entries) - - self.assertIsNone(dataset.created) - self.assertIsNone(dataset.full_dataset_id) - self.assertIsNone(dataset.etag) - self.assertIsNone(dataset.modified) - self.assertIsNone(dataset.self_link) - - self.assertIsNone(dataset.default_table_expiration_ms) - self.assertIsNone(dataset.description) - self.assertIsNone(dataset.friendly_name) - self.assertIsNone(dataset.location) - - def test_access_entries_setter_non_list(self): - dataset = self._make_one(self.DS_REF) - with self.assertRaises(TypeError): - dataset.access_entries = object() - - def test_access_entries_setter_invalid_field(self): - from google.cloud.bigquery.dataset import AccessEntry - - dataset = self._make_one(self.DS_REF) - phred = AccessEntry("OWNER", "userByEmail", "phred@example.com") - with self.assertRaises(ValueError): - dataset.access_entries = [phred, object()] - - def test_access_entries_setter(self): - from google.cloud.bigquery.dataset import AccessEntry - - dataset = self._make_one(self.DS_REF) - phred = AccessEntry("OWNER", "userByEmail", "phred@example.com") - bharney = AccessEntry("OWNER", "userByEmail", "bharney@example.com") - dataset.access_entries = [phred, bharney] - self.assertEqual(dataset.access_entries, [phred, bharney]) - - def test_default_partition_expiration_ms(self): - dataset = self._make_one("proj.dset") - assert dataset.default_partition_expiration_ms is None - dataset.default_partition_expiration_ms = 12345 - assert dataset.default_partition_expiration_ms == 12345 - dataset.default_partition_expiration_ms = None - assert dataset.default_partition_expiration_ms is None - - def test_default_table_expiration_ms_setter_bad_value(self): - dataset = self._make_one(self.DS_REF) - with self.assertRaises(ValueError): - dataset.default_table_expiration_ms = "bogus" - - def test_default_table_expiration_ms_setter(self): - dataset = self._make_one(self.DS_REF) - dataset.default_table_expiration_ms = 12345 - self.assertEqual(dataset.default_table_expiration_ms, 12345) - - def test_description_setter_bad_value(self): - dataset = self._make_one(self.DS_REF) - with self.assertRaises(ValueError): - dataset.description = 12345 - - def test_description_setter(self): - dataset = self._make_one(self.DS_REF) - dataset.description = "DESCRIPTION" - self.assertEqual(dataset.description, "DESCRIPTION") - - def test_friendly_name_setter_bad_value(self): - dataset = self._make_one(self.DS_REF) - with self.assertRaises(ValueError): - dataset.friendly_name = 12345 - - def test_friendly_name_setter(self): - dataset = self._make_one(self.DS_REF) - dataset.friendly_name = "FRIENDLY" - self.assertEqual(dataset.friendly_name, "FRIENDLY") - - def test_location_setter_bad_value(self): - dataset = self._make_one(self.DS_REF) - with self.assertRaises(ValueError): - dataset.location = 12345 - - def test_location_setter(self): - dataset = self._make_one(self.DS_REF) - dataset.location = "LOCATION" - self.assertEqual(dataset.location, "LOCATION") - - def test_labels_update_in_place(self): - dataset = self._make_one(self.DS_REF) - del dataset._properties["labels"] # don't start w/ existing dict - labels = dataset.labels - labels["foo"] = "bar" # update in place - self.assertEqual(dataset.labels, {"foo": "bar"}) - - def test_labels_setter(self): - dataset = self._make_one(self.DS_REF) - dataset.labels = {"color": "green"} - self.assertEqual(dataset.labels, {"color": "green"}) - - def test_labels_setter_bad_value(self): - dataset = self._make_one(self.DS_REF) - with self.assertRaises(ValueError): - dataset.labels = None - - def test_labels_getter_missing_value(self): - dataset = self._make_one(self.DS_REF) - self.assertEqual(dataset.labels, {}) - - def test_from_api_repr_missing_identity(self): - self._setUpConstants() - RESOURCE = {} - klass = self._get_target_class() - with self.assertRaises(KeyError): - klass.from_api_repr(RESOURCE) - - def test_from_api_repr_bare(self): - self._setUpConstants() - RESOURCE = { - "id": "%s:%s" % (self.PROJECT, self.DS_ID), - "datasetReference": {"projectId": self.PROJECT, "datasetId": self.DS_ID}, - } - klass = self._get_target_class() - dataset = klass.from_api_repr(RESOURCE) - self._verify_resource_properties(dataset, RESOURCE) - - def test_from_api_repr_w_properties(self): - RESOURCE = self._make_resource() - klass = self._get_target_class() - dataset = klass.from_api_repr(RESOURCE) - self._verify_resource_properties(dataset, RESOURCE) - - def test_to_api_repr_w_custom_field(self): - dataset = self._make_one(self.DS_REF) - dataset._properties["newAlphaProperty"] = "unreleased property" - resource = dataset.to_api_repr() - - exp_resource = { - "datasetReference": self.DS_REF.to_api_repr(), - "labels": {}, - "newAlphaProperty": "unreleased property", - } - self.assertEqual(resource, exp_resource) - - def test_default_encryption_configuration_setter(self): - from google.cloud.bigquery.encryption_configuration import ( - EncryptionConfiguration, - ) - - dataset = self._make_one(self.DS_REF) - encryption_configuration = EncryptionConfiguration( - kms_key_name=self.KMS_KEY_NAME - ) - dataset.default_encryption_configuration = encryption_configuration - self.assertEqual( - dataset.default_encryption_configuration.kms_key_name, self.KMS_KEY_NAME - ) - dataset.default_encryption_configuration = None - self.assertIsNone(dataset.default_encryption_configuration) - - def test_from_string(self): - cls = self._get_target_class() - got = cls.from_string("string-project.string_dataset") - self.assertEqual(got.project, "string-project") - self.assertEqual(got.dataset_id, "string_dataset") - - def test_from_string_legacy_string(self): - cls = self._get_target_class() - with self.assertRaises(ValueError): - cls.from_string("string-project:string_dataset") - - def test__build_resource_w_custom_field(self): - dataset = self._make_one(self.DS_REF) - dataset._properties["newAlphaProperty"] = "unreleased property" - resource = dataset._build_resource(["newAlphaProperty"]) - - exp_resource = {"newAlphaProperty": "unreleased property"} - self.assertEqual(resource, exp_resource) - - def test__build_resource_w_custom_field_not_in__properties(self): - dataset = self._make_one(self.DS_REF) - dataset.bad = "value" - with self.assertRaises(ValueError): - dataset._build_resource(["bad"]) - - def test_table(self): - from google.cloud.bigquery.table import TableReference - - dataset = self._make_one(self.DS_REF) - table = dataset.table("table_id") - self.assertIsInstance(table, TableReference) - self.assertEqual(table.table_id, "table_id") - self.assertEqual(table.dataset_id, self.DS_ID) - self.assertEqual(table.project, self.PROJECT) - - def test___repr__(self): - from google.cloud.bigquery.dataset import DatasetReference - - dataset = self._make_one(DatasetReference("project1", "dataset1")) - expected = "Dataset(DatasetReference('project1', 'dataset1'))" - self.assertEqual(repr(dataset), expected) - - -class TestDatasetListItem(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigquery.dataset import DatasetListItem - - return DatasetListItem - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor(self): - project = "test-project" - dataset_id = "test_dataset" - resource = { - "kind": "bigquery#dataset", - "id": "{}:{}".format(project, dataset_id), - "datasetReference": {"projectId": project, "datasetId": dataset_id}, - "friendlyName": "Data of the Test", - "labels": {"some-stuff": "this-is-a-label"}, - } - - dataset = self._make_one(resource) - self.assertEqual(dataset.project, project) - self.assertEqual(dataset.dataset_id, dataset_id) - self.assertEqual(dataset.full_dataset_id, "{}:{}".format(project, dataset_id)) - self.assertEqual(dataset.reference.project, project) - self.assertEqual(dataset.reference.dataset_id, dataset_id) - self.assertEqual(dataset.friendly_name, "Data of the Test") - self.assertEqual(dataset.labels["some-stuff"], "this-is-a-label") - - def test_ctor_missing_properties(self): - resource = { - "datasetReference": {"projectId": "testproject", "datasetId": "testdataset"} - } - dataset = self._make_one(resource) - self.assertEqual(dataset.project, "testproject") - self.assertEqual(dataset.dataset_id, "testdataset") - self.assertIsNone(dataset.full_dataset_id) - self.assertIsNone(dataset.friendly_name) - self.assertEqual(dataset.labels, {}) - - def test_ctor_wo_project(self): - resource = {"datasetReference": {"datasetId": "testdataset"}} - with self.assertRaises(ValueError): - self._make_one(resource) - - def test_ctor_wo_dataset(self): - resource = {"datasetReference": {"projectId": "testproject"}} - with self.assertRaises(ValueError): - self._make_one(resource) - - def test_ctor_wo_reference(self): - with self.assertRaises(ValueError): - self._make_one({}) - - def test_labels_update_in_place(self): - resource = { - "datasetReference": {"projectId": "testproject", "datasetId": "testdataset"} - } - dataset = self._make_one(resource) - labels = dataset.labels - labels["foo"] = "bar" # update in place - self.assertEqual(dataset.labels, {"foo": "bar"}) - - def test_table(self): - from google.cloud.bigquery.table import TableReference - - project = "test-project" - dataset_id = "test_dataset" - resource = {"datasetReference": {"projectId": project, "datasetId": dataset_id}} - dataset = self._make_one(resource) - table = dataset.table("table_id") - self.assertIsInstance(table, TableReference) - self.assertEqual(table.table_id, "table_id") - self.assertEqual(table.dataset_id, dataset_id) - self.assertEqual(table.project, project) diff --git a/bigquery/tests/unit/test_dbapi__helpers.py b/bigquery/tests/unit/test_dbapi__helpers.py deleted file mode 100644 index 45c690ede363..000000000000 --- a/bigquery/tests/unit/test_dbapi__helpers.py +++ /dev/null @@ -1,187 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import datetime -import decimal -import math -import unittest - -import google.cloud._helpers -from google.cloud.bigquery.dbapi import _helpers -from google.cloud.bigquery.dbapi import exceptions - - -class TestQueryParameters(unittest.TestCase): - def test_scalar_to_query_parameter(self): - expected_types = [ - (True, "BOOL"), - (False, "BOOL"), - (123, "INT64"), - (-123456789, "INT64"), - (1.25, "FLOAT64"), - (decimal.Decimal("1.25"), "NUMERIC"), - (b"I am some bytes", "BYTES"), - (u"I am a string", "STRING"), - (datetime.date(2017, 4, 1), "DATE"), - (datetime.time(12, 34, 56), "TIME"), - (datetime.datetime(2012, 3, 4, 5, 6, 7), "DATETIME"), - ( - datetime.datetime( - 2012, 3, 4, 5, 6, 7, tzinfo=google.cloud._helpers.UTC - ), - "TIMESTAMP", - ), - ] - for value, expected_type in expected_types: - msg = "value: {} expected_type: {}".format(value, expected_type) - parameter = _helpers.scalar_to_query_parameter(value) - self.assertIsNone(parameter.name, msg=msg) - self.assertEqual(parameter.type_, expected_type, msg=msg) - self.assertEqual(parameter.value, value, msg=msg) - named_parameter = _helpers.scalar_to_query_parameter(value, name="myvar") - self.assertEqual(named_parameter.name, "myvar", msg=msg) - self.assertEqual(named_parameter.type_, expected_type, msg=msg) - self.assertEqual(named_parameter.value, value, msg=msg) - - def test_scalar_to_query_parameter_w_unexpected_type(self): - with self.assertRaises(exceptions.ProgrammingError): - _helpers.scalar_to_query_parameter(value={"a": "dictionary"}) - - def test_scalar_to_query_parameter_w_special_floats(self): - nan_parameter = _helpers.scalar_to_query_parameter(float("nan")) - self.assertTrue(math.isnan(nan_parameter.value)) - self.assertEqual(nan_parameter.type_, "FLOAT64") - inf_parameter = _helpers.scalar_to_query_parameter(float("inf")) - self.assertTrue(math.isinf(inf_parameter.value)) - self.assertEqual(inf_parameter.type_, "FLOAT64") - - def test_array_to_query_parameter_valid_argument(self): - expected_types = [ - ([True, False], "BOOL"), - ([123, -456, 0], "INT64"), - ([1.25, 2.50], "FLOAT64"), - ([decimal.Decimal("1.25")], "NUMERIC"), - ([b"foo", b"bar"], "BYTES"), - ([u"foo", u"bar"], "STRING"), - ([datetime.date(2017, 4, 1), datetime.date(2018, 4, 1)], "DATE"), - ([datetime.time(12, 34, 56), datetime.time(10, 20, 30)], "TIME"), - ( - [ - datetime.datetime(2012, 3, 4, 5, 6, 7), - datetime.datetime(2013, 1, 1, 10, 20, 30), - ], - "DATETIME", - ), - ( - [ - datetime.datetime( - 2012, 3, 4, 5, 6, 7, tzinfo=google.cloud._helpers.UTC - ), - datetime.datetime( - 2013, 1, 1, 10, 20, 30, tzinfo=google.cloud._helpers.UTC - ), - ], - "TIMESTAMP", - ), - ] - - for values, expected_type in expected_types: - msg = "value: {} expected_type: {}".format(values, expected_type) - parameter = _helpers.array_to_query_parameter(values) - self.assertIsNone(parameter.name, msg=msg) - self.assertEqual(parameter.array_type, expected_type, msg=msg) - self.assertEqual(parameter.values, values, msg=msg) - named_param = _helpers.array_to_query_parameter(values, name="my_param") - self.assertEqual(named_param.name, "my_param", msg=msg) - self.assertEqual(named_param.array_type, expected_type, msg=msg) - self.assertEqual(named_param.values, values, msg=msg) - - def test_array_to_query_parameter_empty_argument(self): - with self.assertRaises(exceptions.ProgrammingError): - _helpers.array_to_query_parameter([]) - - def test_array_to_query_parameter_unsupported_sequence(self): - unsupported_iterables = [{10, 20, 30}, u"foo", b"bar", bytearray([65, 75, 85])] - for iterable in unsupported_iterables: - with self.assertRaises(exceptions.ProgrammingError): - _helpers.array_to_query_parameter(iterable) - - def test_array_to_query_parameter_sequence_w_invalid_elements(self): - with self.assertRaises(exceptions.ProgrammingError): - _helpers.array_to_query_parameter([object(), 2, 7]) - - def test_to_query_parameters_w_dict(self): - parameters = {"somebool": True, "somestring": u"a-string-value"} - query_parameters = _helpers.to_query_parameters(parameters) - query_parameter_tuples = [] - for param in query_parameters: - query_parameter_tuples.append((param.name, param.type_, param.value)) - self.assertSequenceEqual( - sorted(query_parameter_tuples), - sorted( - [ - ("somebool", "BOOL", True), - ("somestring", "STRING", u"a-string-value"), - ] - ), - ) - - def test_to_query_parameters_w_dict_array_param(self): - parameters = {"somelist": [10, 20]} - query_parameters = _helpers.to_query_parameters(parameters) - - self.assertEqual(len(query_parameters), 1) - param = query_parameters[0] - - self.assertEqual(param.name, "somelist") - self.assertEqual(param.array_type, "INT64") - self.assertEqual(param.values, [10, 20]) - - def test_to_query_parameters_w_dict_dict_param(self): - parameters = {"my_param": {"foo": "bar"}} - - with self.assertRaises(NotImplementedError): - _helpers.to_query_parameters(parameters) - - def test_to_query_parameters_w_list(self): - parameters = [True, u"a-string-value"] - query_parameters = _helpers.to_query_parameters(parameters) - query_parameter_tuples = [] - for param in query_parameters: - query_parameter_tuples.append((param.name, param.type_, param.value)) - self.assertSequenceEqual( - sorted(query_parameter_tuples), - sorted([(None, "BOOL", True), (None, "STRING", u"a-string-value")]), - ) - - def test_to_query_parameters_w_list_array_param(self): - parameters = [[10, 20]] - query_parameters = _helpers.to_query_parameters(parameters) - - self.assertEqual(len(query_parameters), 1) - param = query_parameters[0] - - self.assertIsNone(param.name) - self.assertEqual(param.array_type, "INT64") - self.assertEqual(param.values, [10, 20]) - - def test_to_query_parameters_w_list_dict_param(self): - parameters = [{"foo": "bar"}] - - with self.assertRaises(NotImplementedError): - _helpers.to_query_parameters(parameters) - - def test_to_query_parameters_none_argument(self): - query_parameters = _helpers.to_query_parameters(None) - self.assertEqual(query_parameters, []) diff --git a/bigquery/tests/unit/test_dbapi_connection.py b/bigquery/tests/unit/test_dbapi_connection.py deleted file mode 100644 index 19acec05bd34..000000000000 --- a/bigquery/tests/unit/test_dbapi_connection.py +++ /dev/null @@ -1,78 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - - -class TestConnection(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigquery.dbapi import Connection - - return Connection - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def _mock_client(self, rows=None, schema=None): - from google.cloud.bigquery import client - - mock_client = mock.create_autospec(client.Client) - return mock_client - - def test_ctor(self): - from google.cloud.bigquery.dbapi import Connection - - mock_client = self._mock_client() - connection = self._make_one(client=mock_client) - self.assertIsInstance(connection, Connection) - self.assertIs(connection._client, mock_client) - - @mock.patch("google.cloud.bigquery.Client", autospec=True) - def test_connect_wo_client(self, mock_client): - from google.cloud.bigquery.dbapi import connect - from google.cloud.bigquery.dbapi import Connection - - connection = connect() - self.assertIsInstance(connection, Connection) - self.assertIsNotNone(connection._client) - - def test_connect_w_client(self): - from google.cloud.bigquery.dbapi import connect - from google.cloud.bigquery.dbapi import Connection - - mock_client = self._mock_client() - connection = connect(client=mock_client) - self.assertIsInstance(connection, Connection) - self.assertIs(connection._client, mock_client) - - def test_close(self): - connection = self._make_one(client=self._mock_client()) - # close() is a no-op, there is nothing to test. - connection.close() - - def test_commit(self): - connection = self._make_one(client=self._mock_client()) - # commit() is a no-op, there is nothing to test. - connection.commit() - - def test_cursor(self): - from google.cloud.bigquery.dbapi import Cursor - - connection = self._make_one(client=self._mock_client()) - cursor = connection.cursor() - self.assertIsInstance(cursor, Cursor) - self.assertIs(cursor.connection, connection) diff --git a/bigquery/tests/unit/test_dbapi_cursor.py b/bigquery/tests/unit/test_dbapi_cursor.py deleted file mode 100644 index 4ccd5e71af72..000000000000 --- a/bigquery/tests/unit/test_dbapi_cursor.py +++ /dev/null @@ -1,332 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - - -class TestCursor(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigquery.dbapi import Cursor - - return Cursor - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def _mock_client(self, rows=None, schema=None, num_dml_affected_rows=None): - from google.cloud.bigquery import client - - if rows is None: - total_rows = 0 - else: - total_rows = len(rows) - - mock_client = mock.create_autospec(client.Client) - mock_client.query.return_value = self._mock_job( - total_rows=total_rows, - schema=schema, - num_dml_affected_rows=num_dml_affected_rows, - ) - mock_client.list_rows.return_value = rows - return mock_client - - def _mock_job(self, total_rows=0, schema=None, num_dml_affected_rows=None): - from google.cloud.bigquery import job - - mock_job = mock.create_autospec(job.QueryJob) - mock_job.error_result = None - mock_job.state = "DONE" - mock_job.result.return_value = mock_job - mock_job._query_results = self._mock_results( - total_rows=total_rows, - schema=schema, - num_dml_affected_rows=num_dml_affected_rows, - ) - - if num_dml_affected_rows is None: - mock_job.statement_type = None # API sends back None for SELECT - else: - mock_job.statement_type = "UPDATE" - - return mock_job - - def _mock_results(self, total_rows=0, schema=None, num_dml_affected_rows=None): - from google.cloud.bigquery import query - - mock_results = mock.create_autospec(query._QueryResults) - mock_results.schema = schema - mock_results.num_dml_affected_rows = num_dml_affected_rows - mock_results.total_rows = total_rows - return mock_results - - def test_ctor(self): - from google.cloud.bigquery.dbapi import connect - from google.cloud.bigquery.dbapi import Cursor - - connection = connect(self._mock_client()) - cursor = self._make_one(connection) - self.assertIsInstance(cursor, Cursor) - self.assertIs(cursor.connection, connection) - - def test_close(self): - from google.cloud.bigquery.dbapi import connect - - connection = connect(self._mock_client()) - cursor = connection.cursor() - # close() is a no-op, there is nothing to test. - cursor.close() - - def test_fetchone_wo_execute_raises_error(self): - from google.cloud.bigquery import dbapi - - connection = dbapi.connect(self._mock_client()) - cursor = connection.cursor() - self.assertRaises(dbapi.Error, cursor.fetchone) - - def test_fetchone_w_row(self): - from google.cloud.bigquery import dbapi - - connection = dbapi.connect(self._mock_client(rows=[(1,)])) - cursor = connection.cursor() - cursor.execute("SELECT 1;") - row = cursor.fetchone() - self.assertEqual(row, (1,)) - self.assertIsNone(cursor.fetchone()) - - def test_fetchmany_wo_execute_raises_error(self): - from google.cloud.bigquery import dbapi - - connection = dbapi.connect(self._mock_client()) - cursor = connection.cursor() - self.assertRaises(dbapi.Error, cursor.fetchmany) - - def test_fetchmany_w_row(self): - from google.cloud.bigquery import dbapi - - connection = dbapi.connect(self._mock_client(rows=[(1,)])) - cursor = connection.cursor() - cursor.execute("SELECT 1;") - rows = cursor.fetchmany() - self.assertEqual(len(rows), 1) - self.assertEqual(rows[0], (1,)) - - def test_fetchmany_w_size(self): - from google.cloud.bigquery import dbapi - - connection = dbapi.connect( - self._mock_client(rows=[(1, 2, 3), (4, 5, 6), (7, 8, 9)]) - ) - cursor = connection.cursor() - cursor.execute("SELECT a, b, c;") - rows = cursor.fetchmany(size=2) - self.assertEqual(len(rows), 2) - self.assertEqual(rows[0], (1, 2, 3)) - self.assertEqual(rows[1], (4, 5, 6)) - second_page = cursor.fetchmany(size=2) - self.assertEqual(len(second_page), 1) - self.assertEqual(second_page[0], (7, 8, 9)) - third_page = cursor.fetchmany(size=2) - self.assertEqual(third_page, []) - - def test_fetchmany_w_arraysize(self): - from google.cloud.bigquery import dbapi - - connection = dbapi.connect( - self._mock_client(rows=[(1, 2, 3), (4, 5, 6), (7, 8, 9)]) - ) - cursor = connection.cursor() - cursor.execute("SELECT a, b, c;") - cursor.arraysize = 2 - rows = cursor.fetchmany() - self.assertEqual(len(rows), 2) - self.assertEqual(rows[0], (1, 2, 3)) - self.assertEqual(rows[1], (4, 5, 6)) - second_page = cursor.fetchmany() - self.assertEqual(len(second_page), 1) - self.assertEqual(second_page[0], (7, 8, 9)) - third_page = cursor.fetchmany() - self.assertEqual(third_page, []) - - def test_fetchall_wo_execute_raises_error(self): - from google.cloud.bigquery import dbapi - - connection = dbapi.connect(self._mock_client()) - cursor = connection.cursor() - self.assertRaises(dbapi.Error, cursor.fetchall) - - def test_fetchall_w_row(self): - from google.cloud.bigquery import dbapi - - connection = dbapi.connect(self._mock_client(rows=[(1,)])) - cursor = connection.cursor() - cursor.execute("SELECT 1;") - self.assertIsNone(cursor.description) - self.assertEqual(cursor.rowcount, 1) - rows = cursor.fetchall() - self.assertEqual(len(rows), 1) - self.assertEqual(rows[0], (1,)) - - def test_execute_custom_job_id(self): - from google.cloud.bigquery.dbapi import connect - - client = self._mock_client(rows=[], num_dml_affected_rows=0) - connection = connect(client) - cursor = connection.cursor() - cursor.execute("SELECT 1;", job_id="foo") - args, kwargs = client.query.call_args - self.assertEqual(args[0], "SELECT 1;") - self.assertEqual(kwargs["job_id"], "foo") - - def test_execute_custom_job_config(self): - from google.cloud.bigquery.dbapi import connect - from google.cloud.bigquery import job - - config = job.QueryJobConfig(use_legacy_sql=True) - client = self._mock_client(rows=[], num_dml_affected_rows=0) - connection = connect(client) - cursor = connection.cursor() - cursor.execute("SELECT 1;", job_id="foo", job_config=config) - args, kwargs = client.query.call_args - self.assertEqual(args[0], "SELECT 1;") - self.assertEqual(kwargs["job_id"], "foo") - self.assertEqual(kwargs["job_config"], config) - - def test_execute_w_dml(self): - from google.cloud.bigquery.dbapi import connect - - connection = connect(self._mock_client(rows=[], num_dml_affected_rows=12)) - cursor = connection.cursor() - cursor.execute("DELETE FROM UserSessions WHERE user_id = 'test';") - rows = cursor.fetchall() - self.assertIsNone(cursor.description) - self.assertEqual(cursor.rowcount, 12) - self.assertEqual(rows, []) - - def test_execute_w_query(self): - from google.cloud.bigquery.schema import SchemaField - from google.cloud.bigquery import dbapi - - connection = dbapi.connect( - self._mock_client( - rows=[("hello", "world", 1), ("howdy", "y'all", 2)], - schema=[ - SchemaField("a", "STRING", mode="NULLABLE"), - SchemaField("b", "STRING", mode="REQUIRED"), - SchemaField("c", "INTEGER", mode="NULLABLE"), - ], - ) - ) - cursor = connection.cursor() - cursor.execute("SELECT a, b, c FROM hello_world WHERE d > 3;") - - # Verify the description. - self.assertEqual(len(cursor.description), 3) - a_name, a_type, _, _, _, _, a_null_ok = cursor.description[0] - self.assertEqual(a_name, "a") - self.assertEqual(a_type, "STRING") - self.assertEqual(a_type, dbapi.STRING) - self.assertTrue(a_null_ok) - b_name, b_type, _, _, _, _, b_null_ok = cursor.description[1] - self.assertEqual(b_name, "b") - self.assertEqual(b_type, "STRING") - self.assertEqual(b_type, dbapi.STRING) - self.assertFalse(b_null_ok) - c_name, c_type, _, _, _, _, c_null_ok = cursor.description[2] - self.assertEqual(c_name, "c") - self.assertEqual(c_type, "INTEGER") - self.assertEqual(c_type, dbapi.NUMBER) - self.assertTrue(c_null_ok) - - # Verify the results. - self.assertEqual(cursor.rowcount, 2) - row = cursor.fetchone() - self.assertEqual(row, ("hello", "world", 1)) - row = cursor.fetchone() - self.assertEqual(row, ("howdy", "y'all", 2)) - row = cursor.fetchone() - self.assertIsNone(row) - - def test_execute_raises_if_result_raises(self): - import google.cloud.exceptions - - from google.cloud.bigquery import client - from google.cloud.bigquery import job - from google.cloud.bigquery.dbapi import connect - from google.cloud.bigquery.dbapi import exceptions - - job = mock.create_autospec(job.QueryJob) - job.result.side_effect = google.cloud.exceptions.GoogleCloudError("") - client = mock.create_autospec(client.Client) - client.query.return_value = job - connection = connect(client) - cursor = connection.cursor() - - with self.assertRaises(exceptions.DatabaseError): - cursor.execute("SELECT 1") - - def test_executemany_w_dml(self): - from google.cloud.bigquery.dbapi import connect - - connection = connect(self._mock_client(rows=[], num_dml_affected_rows=12)) - cursor = connection.cursor() - cursor.executemany( - "DELETE FROM UserSessions WHERE user_id = %s;", - (("test",), ("anothertest",)), - ) - self.assertIsNone(cursor.description) - self.assertEqual(cursor.rowcount, 12) - - def test__format_operation_w_dict(self): - from google.cloud.bigquery.dbapi import cursor - - formatted_operation = cursor._format_operation( - "SELECT %(somevalue)s, %(a `weird` one)s;", - {"somevalue": "hi", "a `weird` one": "world"}, - ) - self.assertEqual( - formatted_operation, "SELECT @`somevalue`, @`a \\`weird\\` one`;" - ) - - def test__format_operation_w_wrong_dict(self): - from google.cloud.bigquery import dbapi - from google.cloud.bigquery.dbapi import cursor - - self.assertRaises( - dbapi.ProgrammingError, - cursor._format_operation, - "SELECT %(somevalue)s, %(othervalue)s;", - {"somevalue-not-here": "hi", "othervalue": "world"}, - ) - - def test__format_operation_w_sequence(self): - from google.cloud.bigquery.dbapi import cursor - - formatted_operation = cursor._format_operation( - "SELECT %s, %s;", ("hello", "world") - ) - self.assertEqual(formatted_operation, "SELECT ?, ?;") - - def test__format_operation_w_too_short_sequence(self): - from google.cloud.bigquery import dbapi - from google.cloud.bigquery.dbapi import cursor - - self.assertRaises( - dbapi.ProgrammingError, - cursor._format_operation, - "SELECT %s, %s;", - ("hello",), - ) diff --git a/bigquery/tests/unit/test_dbapi_types.py b/bigquery/tests/unit/test_dbapi_types.py deleted file mode 100644 index e05660ffed14..000000000000 --- a/bigquery/tests/unit/test_dbapi_types.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import datetime -import unittest - -import google.cloud._helpers -from google.cloud.bigquery.dbapi import types - - -class TestTypes(unittest.TestCase): - def test_binary_type(self): - self.assertEqual("BYTES", types.BINARY) - self.assertEqual("RECORD", types.BINARY) - self.assertEqual("STRUCT", types.BINARY) - self.assertNotEqual("STRING", types.BINARY) - - def test_binary_constructor(self): - self.assertEqual(types.Binary(u"hello"), b"hello") - self.assertEqual(types.Binary(u"\u1f60"), u"\u1f60".encode("utf-8")) - - def test_timefromticks(self): - somedatetime = datetime.datetime( - 2017, 2, 18, 12, 47, 26, tzinfo=google.cloud._helpers.UTC - ) - epoch = datetime.datetime(1970, 1, 1, tzinfo=google.cloud._helpers.UTC) - ticks = (somedatetime - epoch).total_seconds() - self.assertEqual( - types.TimeFromTicks(ticks, google.cloud._helpers.UTC), - datetime.time(12, 47, 26, tzinfo=google.cloud._helpers.UTC), - ) diff --git a/bigquery/tests/unit/test_encryption_configuration.py b/bigquery/tests/unit/test_encryption_configuration.py deleted file mode 100644 index f432a903b4cc..000000000000 --- a/bigquery/tests/unit/test_encryption_configuration.py +++ /dev/null @@ -1,111 +0,0 @@ -# Copyright 2015 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest -import mock - - -class TestEncryptionConfiguration(unittest.TestCase): - KMS_KEY_NAME = "projects/1/locations/us/keyRings/1/cryptoKeys/1" - - @staticmethod - def _get_target_class(): - from google.cloud.bigquery.encryption_configuration import ( - EncryptionConfiguration, - ) - - return EncryptionConfiguration - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_defaults(self): - encryption_config = self._make_one() - self.assertIsNone(encryption_config.kms_key_name) - - def test_ctor_with_key(self): - encryption_config = self._make_one(kms_key_name=self.KMS_KEY_NAME) - self.assertEqual(encryption_config.kms_key_name, self.KMS_KEY_NAME) - - def test_kms_key_name_setter(self): - encryption_config = self._make_one() - self.assertIsNone(encryption_config.kms_key_name) - encryption_config.kms_key_name = self.KMS_KEY_NAME - self.assertEqual(encryption_config.kms_key_name, self.KMS_KEY_NAME) - encryption_config.kms_key_name = None - self.assertIsNone(encryption_config.kms_key_name) - - def test_from_api_repr(self): - RESOURCE = {"kmsKeyName": self.KMS_KEY_NAME} - klass = self._get_target_class() - encryption_config = klass.from_api_repr(RESOURCE) - self.assertEqual(encryption_config.kms_key_name, self.KMS_KEY_NAME) - - def test_to_api_repr(self): - encryption_config = self._make_one(kms_key_name=self.KMS_KEY_NAME) - resource = encryption_config.to_api_repr() - self.assertEqual(resource, {"kmsKeyName": self.KMS_KEY_NAME}) - - def test___eq___wrong_type(self): - encryption_config = self._make_one() - other = object() - self.assertNotEqual(encryption_config, other) - self.assertEqual(encryption_config, mock.ANY) - - def test___eq___kms_key_name_mismatch(self): - encryption_config = self._make_one() - other = self._make_one(self.KMS_KEY_NAME) - self.assertNotEqual(encryption_config, other) - - def test___eq___hit(self): - encryption_config = self._make_one(self.KMS_KEY_NAME) - other = self._make_one(self.KMS_KEY_NAME) - self.assertEqual(encryption_config, other) - - def test___ne___wrong_type(self): - encryption_config = self._make_one() - other = object() - self.assertNotEqual(encryption_config, other) - self.assertEqual(encryption_config, mock.ANY) - - def test___ne___same_value(self): - encryption_config1 = self._make_one(self.KMS_KEY_NAME) - encryption_config2 = self._make_one(self.KMS_KEY_NAME) - # unittest ``assertEqual`` uses ``==`` not ``!=``. - comparison_val = encryption_config1 != encryption_config2 - self.assertFalse(comparison_val) - - def test___ne___different_values(self): - encryption_config1 = self._make_one() - encryption_config2 = self._make_one(self.KMS_KEY_NAME) - self.assertNotEqual(encryption_config1, encryption_config2) - - def test___hash__set_equality(self): - encryption_config1 = self._make_one(self.KMS_KEY_NAME) - encryption_config2 = self._make_one(self.KMS_KEY_NAME) - set_one = {encryption_config1, encryption_config2} - set_two = {encryption_config1, encryption_config2} - self.assertEqual(set_one, set_two) - - def test___hash__not_equals(self): - encryption_config1 = self._make_one() - encryption_config2 = self._make_one(self.KMS_KEY_NAME) - set_one = {encryption_config1} - set_two = {encryption_config2} - self.assertNotEqual(set_one, set_two) - - def test___repr__(self): - encryption_config = self._make_one(self.KMS_KEY_NAME) - expected = "EncryptionConfiguration({})".format(self.KMS_KEY_NAME) - self.assertEqual(repr(encryption_config), expected) diff --git a/bigquery/tests/unit/test_external_config.py b/bigquery/tests/unit/test_external_config.py deleted file mode 100644 index 6028d069bcbe..000000000000 --- a/bigquery/tests/unit/test_external_config.py +++ /dev/null @@ -1,427 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import base64 -import copy -import unittest - -from google.cloud.bigquery import external_config -from google.cloud.bigquery import schema - - -class TestExternalConfig(unittest.TestCase): - - SOURCE_URIS = ["gs://foo", "gs://bar"] - - BASE_RESOURCE = { - "sourceFormat": "", - "sourceUris": SOURCE_URIS, - "maxBadRecords": 17, - "autodetect": True, - "ignoreUnknownValues": False, - "compression": "compression", - } - - def test_from_api_repr_base(self): - resource = copy.deepcopy(self.BASE_RESOURCE) - ec = external_config.ExternalConfig.from_api_repr(resource) - self._verify_base(ec) - self.assertEqual(ec.schema, []) - self.assertIsNone(ec.options) - - got_resource = ec.to_api_repr() - self.assertEqual(got_resource, self.BASE_RESOURCE) - - resource = _copy_and_update( - self.BASE_RESOURCE, - { - "schema": { - "fields": [ - { - "name": "full_name", - "type": "STRING", - "mode": "REQUIRED", - "description": None, - } - ] - } - }, - ) - ec = external_config.ExternalConfig.from_api_repr(resource) - self._verify_base(ec) - exp_schema = [schema.SchemaField("full_name", "STRING", mode="REQUIRED")] - self.assertEqual(ec.schema, exp_schema) - self.assertIsNone(ec.options) - - got_resource = ec.to_api_repr() - self.assertEqual(got_resource, resource) - - def test_to_api_repr_base(self): - ec = external_config.ExternalConfig("") - ec.source_uris = self.SOURCE_URIS - ec.max_bad_records = 17 - ec.autodetect = True - ec.ignore_unknown_values = False - ec.compression = "compression" - ec.schema = [schema.SchemaField("full_name", "STRING", mode="REQUIRED")] - - exp_schema = { - "fields": [ - { - "name": "full_name", - "type": "STRING", - "mode": "REQUIRED", - "description": None, - } - ] - } - got_resource = ec.to_api_repr() - exp_resource = { - "sourceFormat": "", - "sourceUris": self.SOURCE_URIS, - "maxBadRecords": 17, - "autodetect": True, - "ignoreUnknownValues": False, - "compression": "compression", - "schema": exp_schema, - } - self.assertEqual(got_resource, exp_resource) - - def test_schema_None(self): - ec = external_config.ExternalConfig("") - ec.schema = None - got = ec.to_api_repr() - want = {"sourceFormat": "", "schema": None} - self.assertEqual(got, want) - - def test_schema_empty(self): - ec = external_config.ExternalConfig("") - ec.schema = [] - got = ec.to_api_repr() - want = {"sourceFormat": "", "schema": {"fields": []}} - self.assertEqual(got, want) - - def _verify_base(self, ec): - self.assertEqual(ec.autodetect, True) - self.assertEqual(ec.compression, "compression") - self.assertEqual(ec.ignore_unknown_values, False) - self.assertEqual(ec.max_bad_records, 17) - self.assertEqual(ec.source_uris, self.SOURCE_URIS) - - def test_to_api_repr_source_format(self): - ec = external_config.ExternalConfig("CSV") - got = ec.to_api_repr() - want = {"sourceFormat": "CSV"} - self.assertEqual(got, want) - - def test_from_api_repr_sheets(self): - resource = _copy_and_update( - self.BASE_RESOURCE, - { - "sourceFormat": "GOOGLE_SHEETS", - "googleSheetsOptions": { - "skipLeadingRows": "123", - "range": "Sheet1!A5:B10", - }, - }, - ) - - ec = external_config.ExternalConfig.from_api_repr(resource) - - self._verify_base(ec) - self.assertEqual(ec.source_format, "GOOGLE_SHEETS") - self.assertIsInstance(ec.options, external_config.GoogleSheetsOptions) - self.assertEqual(ec.options.skip_leading_rows, 123) - self.assertEqual(ec.options.range, "Sheet1!A5:B10") - - got_resource = ec.to_api_repr() - - self.assertEqual(got_resource, resource) - - del resource["googleSheetsOptions"]["skipLeadingRows"] - del resource["googleSheetsOptions"]["range"] - ec = external_config.ExternalConfig.from_api_repr(resource) - self.assertIsNone(ec.options.skip_leading_rows) - self.assertIsNone(ec.options.range) - got_resource = ec.to_api_repr() - self.assertEqual(got_resource, resource) - - def test_to_api_repr_sheets(self): - ec = external_config.ExternalConfig("GOOGLE_SHEETS") - options = external_config.GoogleSheetsOptions() - options.skip_leading_rows = 123 - options.range = "Sheet1!A5:B10" - ec._options = options - - exp_resource = { - "sourceFormat": "GOOGLE_SHEETS", - "googleSheetsOptions": {"skipLeadingRows": "123", "range": "Sheet1!A5:B10"}, - } - - got_resource = ec.to_api_repr() - - self.assertEqual(got_resource, exp_resource) - - def test_from_api_repr_hive_partitioning(self): - resource = _copy_and_update( - self.BASE_RESOURCE, - { - "sourceFormat": "FORMAT_FOO", - "hivePartitioningOptions": { - "sourceUriPrefix": "http://foo/bar", - "mode": "STRINGS", - }, - }, - ) - - ec = external_config.ExternalConfig.from_api_repr(resource) - - self._verify_base(ec) - self.assertEqual(ec.source_format, "FORMAT_FOO") - self.assertIsInstance( - ec.hive_partitioning, external_config.HivePartitioningOptions - ) - self.assertEqual(ec.hive_partitioning.source_uri_prefix, "http://foo/bar") - self.assertEqual(ec.hive_partitioning.mode, "STRINGS") - - # converting back to API representation should yield the same result - got_resource = ec.to_api_repr() - self.assertEqual(got_resource, resource) - - del resource["hivePartitioningOptions"] - ec = external_config.ExternalConfig.from_api_repr(resource) - self.assertIsNone(ec.hive_partitioning) - - got_resource = ec.to_api_repr() - self.assertEqual(got_resource, resource) - - def test_to_api_repr_hive_partitioning(self): - hive_partitioning = external_config.HivePartitioningOptions() - hive_partitioning.source_uri_prefix = "http://foo/bar" - hive_partitioning.mode = "STRINGS" - - ec = external_config.ExternalConfig("FORMAT_FOO") - ec.hive_partitioning = hive_partitioning - - got_resource = ec.to_api_repr() - - expected_resource = { - "sourceFormat": "FORMAT_FOO", - "hivePartitioningOptions": { - "sourceUriPrefix": "http://foo/bar", - "mode": "STRINGS", - }, - } - self.assertEqual(got_resource, expected_resource) - - def test_from_api_repr_csv(self): - resource = _copy_and_update( - self.BASE_RESOURCE, - { - "sourceFormat": "CSV", - "csvOptions": { - "fieldDelimiter": "fieldDelimiter", - "skipLeadingRows": "123", - "quote": "quote", - "allowQuotedNewlines": True, - "allowJaggedRows": False, - "encoding": "encoding", - }, - }, - ) - - ec = external_config.ExternalConfig.from_api_repr(resource) - - self._verify_base(ec) - self.assertEqual(ec.source_format, "CSV") - self.assertIsInstance(ec.options, external_config.CSVOptions) - self.assertEqual(ec.options.field_delimiter, "fieldDelimiter") - self.assertEqual(ec.options.skip_leading_rows, 123) - self.assertEqual(ec.options.quote_character, "quote") - self.assertEqual(ec.options.allow_quoted_newlines, True) - self.assertEqual(ec.options.allow_jagged_rows, False) - self.assertEqual(ec.options.encoding, "encoding") - - got_resource = ec.to_api_repr() - - self.assertEqual(got_resource, resource) - - del resource["csvOptions"]["skipLeadingRows"] - ec = external_config.ExternalConfig.from_api_repr(resource) - self.assertIsNone(ec.options.skip_leading_rows) - got_resource = ec.to_api_repr() - self.assertEqual(got_resource, resource) - - def test_to_api_repr_csv(self): - ec = external_config.ExternalConfig("CSV") - options = external_config.CSVOptions() - options.allow_quoted_newlines = True - options.encoding = "encoding" - options.field_delimiter = "fieldDelimiter" - options.quote_character = "quote" - options.skip_leading_rows = 123 - options.allow_jagged_rows = False - ec._options = options - - exp_resource = { - "sourceFormat": "CSV", - "csvOptions": { - "fieldDelimiter": "fieldDelimiter", - "skipLeadingRows": "123", - "quote": "quote", - "allowQuotedNewlines": True, - "allowJaggedRows": False, - "encoding": "encoding", - }, - } - - got_resource = ec.to_api_repr() - - self.assertEqual(got_resource, exp_resource) - - def test_from_api_repr_bigtable(self): - qualifier_encoded = base64.standard_b64encode(b"q").decode("ascii") - resource = _copy_and_update( - self.BASE_RESOURCE, - { - "sourceFormat": "BIGTABLE", - "bigtableOptions": { - "ignoreUnspecifiedColumnFamilies": True, - "readRowkeyAsString": False, - "columnFamilies": [ - { - "familyId": "familyId", - "type": "type", - "encoding": "encoding", - "columns": [ - { - "qualifierString": "q", - "fieldName": "fieldName1", - "type": "type1", - "encoding": "encoding1", - "onlyReadLatest": True, - }, - { - "qualifierEncoded": qualifier_encoded, - "fieldName": "fieldName2", - "type": "type2", - "encoding": "encoding2", - }, - ], - "onlyReadLatest": False, - } - ], - }, - }, - ) - - ec = external_config.ExternalConfig.from_api_repr(resource) - - self._verify_base(ec) - self.assertEqual(ec.source_format, "BIGTABLE") - self.assertIsInstance(ec.options, external_config.BigtableOptions) - self.assertEqual(ec.options.ignore_unspecified_column_families, True) - self.assertEqual(ec.options.read_rowkey_as_string, False) - self.assertEqual(len(ec.options.column_families), 1) - fam1 = ec.options.column_families[0] - self.assertIsInstance(fam1, external_config.BigtableColumnFamily) - self.assertEqual(fam1.family_id, "familyId") - self.assertEqual(fam1.type_, "type") - self.assertEqual(fam1.encoding, "encoding") - self.assertEqual(len(fam1.columns), 2) - self.assertFalse(fam1.only_read_latest) - col1 = fam1.columns[0] - self.assertEqual(col1.qualifier_string, "q") - self.assertEqual(col1.field_name, "fieldName1") - self.assertEqual(col1.type_, "type1") - self.assertEqual(col1.encoding, "encoding1") - self.assertTrue(col1.only_read_latest) - self.assertIsNone(col1.qualifier_encoded) - col2 = ec.options.column_families[0].columns[1] - self.assertEqual(col2.qualifier_encoded, b"q") - self.assertEqual(col2.field_name, "fieldName2") - self.assertEqual(col2.type_, "type2") - self.assertEqual(col2.encoding, "encoding2") - - got_resource = ec.to_api_repr() - - self.assertEqual(got_resource, resource) - - def test_to_api_repr_bigtable(self): - ec = external_config.ExternalConfig("BIGTABLE") - options = external_config.BigtableOptions() - options.ignore_unspecified_column_families = True - options.read_rowkey_as_string = False - ec._options = options - - fam1 = external_config.BigtableColumnFamily() - fam1.family_id = "familyId" - fam1.type_ = "type" - fam1.encoding = "encoding" - fam1.only_read_latest = False - col1 = external_config.BigtableColumn() - col1.qualifier_string = "q" - col1.field_name = "fieldName1" - col1.type_ = "type1" - col1.encoding = "encoding1" - col1.only_read_latest = True - col2 = external_config.BigtableColumn() - col2.qualifier_encoded = b"q" - col2.field_name = "fieldName2" - col2.type_ = "type2" - col2.encoding = "encoding2" - fam1.columns = [col1, col2] - options.column_families = [fam1] - - qualifier_encoded = base64.standard_b64encode(b"q").decode("ascii") - exp_resource = { - "sourceFormat": "BIGTABLE", - "bigtableOptions": { - "ignoreUnspecifiedColumnFamilies": True, - "readRowkeyAsString": False, - "columnFamilies": [ - { - "familyId": "familyId", - "type": "type", - "encoding": "encoding", - "columns": [ - { - "qualifierString": "q", - "fieldName": "fieldName1", - "type": "type1", - "encoding": "encoding1", - "onlyReadLatest": True, - }, - { - "qualifierEncoded": qualifier_encoded, - "fieldName": "fieldName2", - "type": "type2", - "encoding": "encoding2", - }, - ], - "onlyReadLatest": False, - } - ], - }, - } - - got_resource = ec.to_api_repr() - - self.assertEqual(got_resource, exp_resource) - - -def _copy_and_update(d, u): - d = copy.deepcopy(d) - d.update(u) - return d diff --git a/bigquery/tests/unit/test_job.py b/bigquery/tests/unit/test_job.py deleted file mode 100644 index 6b0d4b8fb352..000000000000 --- a/bigquery/tests/unit/test_job.py +++ /dev/null @@ -1,6033 +0,0 @@ -# Copyright 2015 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import concurrent -import copy -import json -import textwrap -import unittest - -import freezegun -import mock -import pytest -import requests -from six.moves import http_client - -try: - import pandas -except (ImportError, AttributeError): # pragma: NO COVER - pandas = None - -try: - import pyarrow -except ImportError: # pragma: NO COVER - pyarrow = None -try: - from google.cloud import bigquery_storage_v1beta1 -except (ImportError, AttributeError): # pragma: NO COVER - bigquery_storage_v1beta1 = None -try: - from tqdm import tqdm -except (ImportError, AttributeError): # pragma: NO COVER - tqdm = None - - -def _make_credentials(): - import google.auth.credentials - - return mock.Mock(spec=google.auth.credentials.Credentials) - - -def _make_client(project="test-project", connection=None): - from google.cloud.bigquery.client import Client - - if connection is None: - connection = _make_connection() - - client = Client(project=project, credentials=_make_credentials(), _http=object()) - client._connection = connection - return client - - -def _make_connection(*responses): - import google.cloud.bigquery._http - from google.cloud.exceptions import NotFound - - mock_conn = mock.create_autospec(google.cloud.bigquery._http.Connection) - mock_conn.api_request.side_effect = list(responses) + [NotFound("miss")] - return mock_conn - - -def _make_job_resource( - creation_time_ms=1437767599006, - started_time_ms=1437767600007, - ended_time_ms=1437767601008, - started=False, - ended=False, - etag="abc-def-hjk", - endpoint="https://bigquery.googleapis.com", - job_type="load", - job_id="a-random-id", - project_id="some-project", - user_email="bq-user@example.com", -): - resource = { - "configuration": {job_type: {}}, - "statistics": {"creationTime": creation_time_ms, job_type: {}}, - "etag": etag, - "id": "{}:{}".format(project_id, job_id), - "jobReference": {"projectId": project_id, "jobId": job_id}, - "selfLink": "{}/bigquery/v2/projects/{}/jobs/{}".format( - endpoint, project_id, job_id - ), - "user_email": user_email, - } - - if started or ended: - resource["statistics"]["startTime"] = started_time_ms - - if ended: - resource["statistics"]["endTime"] = ended_time_ms - - if job_type == "query": - resource["configuration"]["query"]["destinationTable"] = { - "projectId": project_id, - "datasetId": "_temp_dataset", - "tableId": "_temp_table", - } - - return resource - - -class Test__error_result_to_exception(unittest.TestCase): - def _call_fut(self, *args, **kwargs): - from google.cloud.bigquery import job - - return job._error_result_to_exception(*args, **kwargs) - - def test_simple(self): - error_result = {"reason": "invalid", "message": "bad request"} - exception = self._call_fut(error_result) - self.assertEqual(exception.code, http_client.BAD_REQUEST) - self.assertTrue(exception.message.startswith("bad request")) - self.assertIn(error_result, exception.errors) - - def test_missing_reason(self): - error_result = {} - exception = self._call_fut(error_result) - self.assertEqual(exception.code, http_client.INTERNAL_SERVER_ERROR) - - -class Test_JobReference(unittest.TestCase): - JOB_ID = "job-id" - PROJECT = "test-project-123" - LOCATION = "us-central" - - @staticmethod - def _get_target_class(): - from google.cloud.bigquery import job - - return job._JobReference - - def _make_one(self, job_id, project, location): - return self._get_target_class()(job_id, project, location) - - def test_ctor(self): - job_ref = self._make_one(self.JOB_ID, self.PROJECT, self.LOCATION) - - self.assertEqual(job_ref.job_id, self.JOB_ID) - self.assertEqual(job_ref.project, self.PROJECT) - self.assertEqual(job_ref.location, self.LOCATION) - - def test__to_api_repr(self): - job_ref = self._make_one(self.JOB_ID, self.PROJECT, self.LOCATION) - - self.assertEqual( - job_ref._to_api_repr(), - { - "jobId": self.JOB_ID, - "projectId": self.PROJECT, - "location": self.LOCATION, - }, - ) - - def test_from_api_repr(self): - api_repr = { - "jobId": self.JOB_ID, - "projectId": self.PROJECT, - "location": self.LOCATION, - } - - job_ref = self._get_target_class()._from_api_repr(api_repr) - - self.assertEqual(job_ref.job_id, self.JOB_ID) - self.assertEqual(job_ref.project, self.PROJECT) - self.assertEqual(job_ref.location, self.LOCATION) - - -class Test_AsyncJob(unittest.TestCase): - JOB_ID = "job-id" - PROJECT = "test-project-123" - LOCATION = "us-central" - - @staticmethod - def _get_target_class(): - from google.cloud.bigquery import job - - return job._AsyncJob - - def _make_one(self, job_id, client): - return self._get_target_class()(job_id, client) - - def _make_derived_class(self): - class Derived(self._get_target_class()): - _JOB_TYPE = "derived" - - return Derived - - def _make_derived(self, job_id, client): - return self._make_derived_class()(job_id, client) - - @staticmethod - def _job_reference(job_id, project, location): - from google.cloud.bigquery import job - - return job._JobReference(job_id, project, location) - - def test_ctor_w_bare_job_id(self): - import threading - - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - - self.assertEqual(job.job_id, self.JOB_ID) - self.assertEqual(job.project, self.PROJECT) - self.assertIsNone(job.location) - self.assertIs(job._client, client) - self.assertEqual( - job._properties, - {"jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}}, - ) - self.assertIsInstance(job._completion_lock, type(threading.Lock())) - self.assertEqual( - job.path, "/projects/{}/jobs/{}".format(self.PROJECT, self.JOB_ID) - ) - - def test_ctor_w_job_ref(self): - import threading - - other_project = "other-project-234" - client = _make_client(project=other_project) - job_ref = self._job_reference(self.JOB_ID, self.PROJECT, self.LOCATION) - job = self._make_one(job_ref, client) - - self.assertEqual(job.job_id, self.JOB_ID) - self.assertEqual(job.project, self.PROJECT) - self.assertEqual(job.location, self.LOCATION) - self.assertIs(job._client, client) - self.assertEqual( - job._properties, - { - "jobReference": { - "projectId": self.PROJECT, - "location": self.LOCATION, - "jobId": self.JOB_ID, - } - }, - ) - self.assertFalse(job._result_set) - self.assertIsInstance(job._completion_lock, type(threading.Lock())) - self.assertEqual( - job.path, "/projects/{}/jobs/{}".format(self.PROJECT, self.JOB_ID) - ) - - def test__require_client_w_none(self): - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - - self.assertIs(job._require_client(None), client) - - def test__require_client_w_other(self): - client = _make_client(project=self.PROJECT) - other = object() - job = self._make_one(self.JOB_ID, client) - - self.assertIs(job._require_client(other), other) - - def test_job_type(self): - client = _make_client(project=self.PROJECT) - derived = self._make_derived(self.JOB_ID, client) - - self.assertEqual(derived.job_type, "derived") - - def test_parent_job_id(self): - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - - self.assertIsNone(job.parent_job_id) - job._properties["statistics"] = {"parentJobId": "parent-job-123"} - self.assertEqual(job.parent_job_id, "parent-job-123") - - def test_script_statistics(self): - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - - self.assertIsNone(job.script_statistics) - job._properties["statistics"] = { - "scriptStatistics": { - "evaluationKind": "EXPRESSION", - "stackFrames": [ - { - "startLine": 5, - "startColumn": 29, - "endLine": 9, - "endColumn": 14, - "text": "QUERY TEXT", - } - ], - } - } - script_stats = job.script_statistics - self.assertEqual(script_stats.evaluation_kind, "EXPRESSION") - stack_frames = script_stats.stack_frames - self.assertEqual(len(stack_frames), 1) - stack_frame = stack_frames[0] - self.assertIsNone(stack_frame.procedure_id) - self.assertEqual(stack_frame.start_line, 5) - self.assertEqual(stack_frame.start_column, 29) - self.assertEqual(stack_frame.end_line, 9) - self.assertEqual(stack_frame.end_column, 14) - self.assertEqual(stack_frame.text, "QUERY TEXT") - - def test_num_child_jobs(self): - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - - self.assertEqual(job.num_child_jobs, 0) - job._properties["statistics"] = {"numChildJobs": "17"} - self.assertEqual(job.num_child_jobs, 17) - - def test_labels_miss(self): - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - self.assertEqual(job.labels, {}) - - def test_labels_update_in_place(self): - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - labels = job.labels - labels["foo"] = "bar" # update in place - self.assertEqual(job.labels, {"foo": "bar"}) - - def test_labels_hit(self): - labels = {"foo": "bar"} - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - job._properties["labels"] = labels - self.assertEqual(job.labels, labels) - - def test_etag(self): - etag = "ETAG-123" - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - self.assertIsNone(job.etag) - job._properties["etag"] = etag - self.assertEqual(job.etag, etag) - - def test_self_link(self): - self_link = "https://api.example.com/123" - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - self.assertIsNone(job.self_link) - job._properties["selfLink"] = self_link - self.assertEqual(job.self_link, self_link) - - def test_user_email(self): - user_email = "user@example.com" - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - self.assertIsNone(job.user_email) - job._properties["user_email"] = user_email - self.assertEqual(job.user_email, user_email) - - @staticmethod - def _datetime_and_millis(): - import datetime - import pytz - from google.cloud._helpers import _millis - - now = datetime.datetime.utcnow().replace( - microsecond=123000, tzinfo=pytz.UTC # stats timestamps have ms precision - ) - return now, _millis(now) - - def test_created(self): - now, millis = self._datetime_and_millis() - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - self.assertIsNone(job.created) - stats = job._properties["statistics"] = {} - self.assertIsNone(job.created) - stats["creationTime"] = millis - self.assertEqual(job.created, now) - - def test_started(self): - now, millis = self._datetime_and_millis() - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - self.assertIsNone(job.started) - stats = job._properties["statistics"] = {} - self.assertIsNone(job.started) - stats["startTime"] = millis - self.assertEqual(job.started, now) - - def test_ended(self): - now, millis = self._datetime_and_millis() - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - self.assertIsNone(job.ended) - stats = job._properties["statistics"] = {} - self.assertIsNone(job.ended) - stats["endTime"] = millis - self.assertEqual(job.ended, now) - - def test__job_statistics(self): - statistics = {"foo": "bar"} - client = _make_client(project=self.PROJECT) - derived = self._make_derived(self.JOB_ID, client) - self.assertEqual(derived._job_statistics(), {}) - stats = derived._properties["statistics"] = {} - self.assertEqual(derived._job_statistics(), {}) - stats["derived"] = statistics - self.assertEqual(derived._job_statistics(), statistics) - - def test_error_result(self): - error_result = { - "debugInfo": "DEBUG INFO", - "location": "LOCATION", - "message": "MESSAGE", - "reason": "REASON", - } - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - self.assertIsNone(job.error_result) - status = job._properties["status"] = {} - self.assertIsNone(job.error_result) - status["errorResult"] = error_result - self.assertEqual(job.error_result, error_result) - - def test_errors(self): - errors = [ - { - "debugInfo": "DEBUG INFO", - "location": "LOCATION", - "message": "MESSAGE", - "reason": "REASON", - } - ] - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - self.assertIsNone(job.errors) - status = job._properties["status"] = {} - self.assertIsNone(job.errors) - status["errors"] = errors - self.assertEqual(job.errors, errors) - - def test_state(self): - state = "STATE" - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - self.assertIsNone(job.state) - status = job._properties["status"] = {} - self.assertIsNone(job.state) - status["state"] = state - self.assertEqual(job.state, state) - - def test__scrub_local_properties(self): - before = {"foo": "bar"} - resource = before.copy() - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - job._scrub_local_properties(resource) # no raise - self.assertEqual(resource, before) - - def test__copy_configuration_properties(self): - before = {"foo": "bar"} - resource = before.copy() - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - with self.assertRaises(NotImplementedError): - job._copy_configuration_properties(resource) - self.assertEqual(resource, before) - - def _set_properties_job(self): - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - job._scrub_local_properties = mock.Mock() - job._copy_configuration_properties = mock.Mock() - job._set_future_result = mock.Mock() - job._properties = { - "jobReference": job._properties["jobReference"], - "foo": "bar", - } - return job - - def test__set_properties_no_stats(self): - config = {"test": True} - resource = {"configuration": config} - job = self._set_properties_job() - - job._set_properties(resource) - - self.assertEqual(job._properties, resource) - - job._scrub_local_properties.assert_called_once_with(resource) - job._copy_configuration_properties.assert_called_once_with(config) - - def test__set_properties_w_creation_time(self): - now, millis = self._datetime_and_millis() - config = {"test": True} - stats = {"creationTime": str(millis)} - resource = {"configuration": config, "statistics": stats} - job = self._set_properties_job() - - job._set_properties(resource) - - cleaned = copy.deepcopy(resource) - cleaned["statistics"]["creationTime"] = float(millis) - self.assertEqual(job._properties, cleaned) - - job._scrub_local_properties.assert_called_once_with(resource) - job._copy_configuration_properties.assert_called_once_with(config) - - def test__set_properties_w_start_time(self): - now, millis = self._datetime_and_millis() - config = {"test": True} - stats = {"startTime": str(millis)} - resource = {"configuration": config, "statistics": stats} - job = self._set_properties_job() - - job._set_properties(resource) - - cleaned = copy.deepcopy(resource) - cleaned["statistics"]["startTime"] = float(millis) - self.assertEqual(job._properties, cleaned) - - job._scrub_local_properties.assert_called_once_with(resource) - job._copy_configuration_properties.assert_called_once_with(config) - - def test__set_properties_w_end_time(self): - now, millis = self._datetime_and_millis() - config = {"test": True} - stats = {"endTime": str(millis)} - resource = {"configuration": config, "statistics": stats} - job = self._set_properties_job() - - job._set_properties(resource) - - cleaned = copy.deepcopy(resource) - cleaned["statistics"]["endTime"] = float(millis) - self.assertEqual(job._properties, cleaned) - - job._scrub_local_properties.assert_called_once_with(resource) - job._copy_configuration_properties.assert_called_once_with(config) - - def test__get_resource_config_missing_job_ref(self): - resource = {} - klass = self._make_derived_class() - - with self.assertRaises(KeyError): - klass._get_resource_config(resource) - - def test__get_resource_config_missing_job_id(self): - resource = {"jobReference": {}} - klass = self._make_derived_class() - - with self.assertRaises(KeyError): - klass._get_resource_config(resource) - - def test__get_resource_config_missing_configuration(self): - resource = {"jobReference": {"jobId": self.JOB_ID}} - klass = self._make_derived_class() - - with self.assertRaises(KeyError): - klass._get_resource_config(resource) - - def test__get_resource_config_missing_config_type(self): - resource = {"jobReference": {"jobId": self.JOB_ID}, "configuration": {}} - klass = self._make_derived_class() - - with self.assertRaises(KeyError): - klass._get_resource_config(resource) - - def test__get_resource_config_ok(self): - derived_config = {"foo": "bar"} - resource = { - "jobReference": {"jobId": self.JOB_ID}, - "configuration": {"derived": derived_config}, - } - klass = self._make_derived_class() - - job_id, config = klass._get_resource_config(resource) - - self.assertEqual(job_id, self.JOB_ID) - self.assertEqual(config, {"derived": derived_config}) - - def test__build_resource(self): - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - with self.assertRaises(NotImplementedError): - job._build_resource() - - def test_to_api_repr(self): - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - with self.assertRaises(NotImplementedError): - job.to_api_repr() - - def test__begin_already(self): - job = self._set_properties_job() - job._properties["status"] = {"state": "WHATEVER"} - - with self.assertRaises(ValueError): - job._begin() - - def test__begin_defaults(self): - from google.cloud.bigquery.retry import DEFAULT_RETRY - - resource = { - "jobReference": { - "jobId": self.JOB_ID, - "projectId": self.PROJECT, - "location": None, - }, - "configuration": {"test": True}, - } - job = self._set_properties_job() - builder = job.to_api_repr = mock.Mock() - builder.return_value = resource - call_api = job._client._call_api = mock.Mock() - call_api.return_value = resource - - job._begin() - - call_api.assert_called_once_with( - DEFAULT_RETRY, - method="POST", - path="/projects/{}/jobs".format(self.PROJECT), - data=resource, - timeout=None, - ) - self.assertEqual(job._properties, resource) - - def test__begin_explicit(self): - from google.cloud.bigquery.retry import DEFAULT_RETRY - - other_project = "other-project-234" - resource = { - "jobReference": { - "jobId": self.JOB_ID, - "projectId": self.PROJECT, - "location": None, - }, - "configuration": {"test": True}, - } - job = self._set_properties_job() - builder = job.to_api_repr = mock.Mock() - builder.return_value = resource - client = _make_client(project=other_project) - call_api = client._call_api = mock.Mock() - call_api.return_value = resource - retry = DEFAULT_RETRY.with_deadline(1) - - job._begin(client=client, retry=retry, timeout=7.5) - - call_api.assert_called_once_with( - retry, - method="POST", - path="/projects/{}/jobs".format(self.PROJECT), - data=resource, - timeout=7.5, - ) - self.assertEqual(job._properties, resource) - - def test_exists_defaults_miss(self): - from google.cloud.exceptions import NotFound - from google.cloud.bigquery.retry import DEFAULT_RETRY - - job = self._set_properties_job() - job._properties["jobReference"]["location"] = self.LOCATION - call_api = job._client._call_api = mock.Mock() - call_api.side_effect = NotFound("testing") - - self.assertFalse(job.exists()) - - call_api.assert_called_once_with( - DEFAULT_RETRY, - method="GET", - path="/projects/{}/jobs/{}".format(self.PROJECT, self.JOB_ID), - query_params={"fields": "id", "location": self.LOCATION}, - timeout=None, - ) - - def test_exists_explicit_hit(self): - from google.cloud.bigquery.retry import DEFAULT_RETRY - - other_project = "other-project-234" - resource = { - "jobReference": { - "jobId": self.JOB_ID, - "projectId": self.PROJECT, - "location": None, - }, - "configuration": {"test": True}, - } - job = self._set_properties_job() - client = _make_client(project=other_project) - call_api = client._call_api = mock.Mock() - call_api.return_value = resource - retry = DEFAULT_RETRY.with_deadline(1) - - self.assertTrue(job.exists(client=client, retry=retry)) - - call_api.assert_called_once_with( - retry, - method="GET", - path="/projects/{}/jobs/{}".format(self.PROJECT, self.JOB_ID), - query_params={"fields": "id"}, - timeout=None, - ) - - def test_exists_w_timeout(self): - from google.cloud.bigquery.retry import DEFAULT_RETRY - - PATH = "/projects/{}/jobs/{}".format(self.PROJECT, self.JOB_ID) - job = self._set_properties_job() - call_api = job._client._call_api = mock.Mock() - - job.exists(timeout=7.5) - - call_api.assert_called_once_with( - DEFAULT_RETRY, - method="GET", - path=PATH, - query_params={"fields": "id"}, - timeout=7.5, - ) - - def test_reload_defaults(self): - from google.cloud.bigquery.retry import DEFAULT_RETRY - - resource = { - "jobReference": { - "jobId": self.JOB_ID, - "projectId": self.PROJECT, - "location": None, - }, - "configuration": {"test": True}, - } - job = self._set_properties_job() - job._properties["jobReference"]["location"] = self.LOCATION - call_api = job._client._call_api = mock.Mock() - call_api.return_value = resource - - job.reload() - - call_api.assert_called_once_with( - DEFAULT_RETRY, - method="GET", - path="/projects/{}/jobs/{}".format(self.PROJECT, self.JOB_ID), - query_params={"location": self.LOCATION}, - timeout=None, - ) - self.assertEqual(job._properties, resource) - - def test_reload_explicit(self): - from google.cloud.bigquery.retry import DEFAULT_RETRY - - other_project = "other-project-234" - resource = { - "jobReference": { - "jobId": self.JOB_ID, - "projectId": self.PROJECT, - "location": None, - }, - "configuration": {"test": True}, - } - job = self._set_properties_job() - client = _make_client(project=other_project) - call_api = client._call_api = mock.Mock() - call_api.return_value = resource - retry = DEFAULT_RETRY.with_deadline(1) - - job.reload(client=client, retry=retry, timeout=4.2) - - call_api.assert_called_once_with( - retry, - method="GET", - path="/projects/{}/jobs/{}".format(self.PROJECT, self.JOB_ID), - query_params={}, - timeout=4.2, - ) - self.assertEqual(job._properties, resource) - - def test_cancel_defaults(self): - resource = { - "jobReference": { - "jobId": self.JOB_ID, - "projectId": self.PROJECT, - "location": None, - }, - "configuration": {"test": True}, - } - response = {"job": resource} - job = self._set_properties_job() - job._properties["jobReference"]["location"] = self.LOCATION - connection = job._client._connection = _make_connection(response) - - self.assertTrue(job.cancel()) - - connection.api_request.assert_called_once_with( - method="POST", - path="/projects/{}/jobs/{}/cancel".format(self.PROJECT, self.JOB_ID), - query_params={"location": self.LOCATION}, - timeout=None, - ) - self.assertEqual(job._properties, resource) - - def test_cancel_explicit(self): - other_project = "other-project-234" - resource = { - "jobReference": { - "jobId": self.JOB_ID, - "projectId": self.PROJECT, - "location": None, - }, - "configuration": {"test": True}, - } - response = {"job": resource} - job = self._set_properties_job() - client = _make_client(project=other_project) - connection = client._connection = _make_connection(response) - - self.assertTrue(job.cancel(client=client, timeout=7.5)) - - connection.api_request.assert_called_once_with( - method="POST", - path="/projects/{}/jobs/{}/cancel".format(self.PROJECT, self.JOB_ID), - query_params={}, - timeout=7.5, - ) - self.assertEqual(job._properties, resource) - - def test_cancel_w_custom_retry(self): - from google.cloud.bigquery.retry import DEFAULT_RETRY - - api_path = "/projects/{}/jobs/{}/cancel".format(self.PROJECT, self.JOB_ID) - resource = { - "jobReference": { - "jobId": self.JOB_ID, - "projectId": self.PROJECT, - "location": None, - }, - "configuration": {"test": True}, - } - response = {"job": resource} - job = self._set_properties_job() - - api_request_patcher = mock.patch.object( - job._client._connection, "api_request", side_effect=[ValueError, response], - ) - retry = DEFAULT_RETRY.with_deadline(1).with_predicate( - lambda exc: isinstance(exc, ValueError) - ) - - with api_request_patcher as fake_api_request: - result = job.cancel(retry=retry, timeout=7.5) - - self.assertTrue(result) - self.assertEqual(job._properties, resource) - self.assertEqual( - fake_api_request.call_args_list, - [ - mock.call(method="POST", path=api_path, query_params={}, timeout=7.5), - mock.call( - method="POST", path=api_path, query_params={}, timeout=7.5, - ), # was retried once - ], - ) - - def test__set_future_result_wo_done(self): - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - set_exception = job.set_exception = mock.Mock() - set_result = job.set_result = mock.Mock() - - job._set_future_result() - - set_exception.assert_not_called() - set_result.assert_not_called() - - def test__set_future_result_w_result_set(self): - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - job._properties["status"] = {"state": "DONE"} - job._result_set = True - set_exception = job.set_exception = mock.Mock() - set_result = job.set_result = mock.Mock() - - job._set_future_result() - - set_exception.assert_not_called() - set_result.assert_not_called() - - def test__set_future_result_w_done_wo_result_set_w_error(self): - from google.cloud.exceptions import NotFound - - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - job._properties["status"] = { - "state": "DONE", - "errorResult": {"reason": "notFound", "message": "testing"}, - } - set_exception = job.set_exception = mock.Mock() - set_result = job.set_result = mock.Mock() - - job._set_future_result() - - set_exception.assert_called_once() - args, kw = set_exception.call_args - (exception,) = args - self.assertIsInstance(exception, NotFound) - self.assertEqual(exception.message, "testing") - self.assertEqual(kw, {}) - set_result.assert_not_called() - - def test__set_future_result_w_done_wo_result_set_wo_error(self): - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - job._properties["status"] = {"state": "DONE"} - set_exception = job.set_exception = mock.Mock() - set_result = job.set_result = mock.Mock() - - job._set_future_result() - - set_exception.assert_not_called() - set_result.assert_called_once_with(job) - - def test_done_defaults_wo_state(self): - from google.cloud.bigquery.retry import DEFAULT_RETRY - - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - reload_ = job.reload = mock.Mock() - - self.assertFalse(job.done()) - - reload_.assert_called_once_with(retry=DEFAULT_RETRY, timeout=None) - - def test_done_explicit_wo_state(self): - from google.cloud.bigquery.retry import DEFAULT_RETRY - - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - reload_ = job.reload = mock.Mock() - retry = DEFAULT_RETRY.with_deadline(1) - - self.assertFalse(job.done(retry=retry, timeout=7.5)) - - reload_.assert_called_once_with(retry=retry, timeout=7.5) - - def test_done_already(self): - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - job._properties["status"] = {"state": "DONE"} - - self.assertTrue(job.done()) - - @mock.patch("google.api_core.future.polling.PollingFuture.result") - def test_result_default_wo_state(self, result): - from google.cloud.bigquery.retry import DEFAULT_RETRY - - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - begin = job._begin = mock.Mock() - - self.assertIs(job.result(), result.return_value) - - begin.assert_called_once_with(retry=DEFAULT_RETRY, timeout=None) - result.assert_called_once_with(timeout=None) - - @mock.patch("google.api_core.future.polling.PollingFuture.result") - def test_result_w_retry_wo_state(self, result): - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - begin = job._begin = mock.Mock() - retry = mock.Mock() - - self.assertIs(job.result(retry=retry), result.return_value) - - begin.assert_called_once_with(retry=retry, timeout=None) - result.assert_called_once_with(timeout=None) - - @mock.patch("google.api_core.future.polling.PollingFuture.result") - def test_result_explicit_w_state(self, result): - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - job._properties["status"] = {"state": "DONE"} - begin = job._begin = mock.Mock() - timeout = 1 - - self.assertIs(job.result(timeout=timeout), result.return_value) - - begin.assert_not_called() - result.assert_called_once_with(timeout=timeout) - - @mock.patch("google.api_core.future.polling.PollingFuture.result") - def test_result_splitting_timout_between_requests(self, result): - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - begin = job._begin = mock.Mock() - retry = mock.Mock() - - with freezegun.freeze_time("1970-01-01 00:00:00", tick=False) as frozen_time: - - def delayed_begin(*args, **kwargs): - frozen_time.tick(delta=0.3) - - begin.side_effect = delayed_begin - job.result(retry=retry, timeout=1.0) - - begin.assert_called_once_with(retry=retry, timeout=1.0) - result.assert_called_once_with(timeout=0.7) - - def test_cancelled_wo_error_result(self): - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - - self.assertFalse(job.cancelled()) - - def test_cancelled_w_error_result_not_stopped(self): - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - job._properties["status"] = {"errorResult": {"reason": "other"}} - - self.assertFalse(job.cancelled()) - - def test_cancelled_w_error_result_w_stopped(self): - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, client) - job._properties["status"] = {"errorResult": {"reason": "stopped"}} - - self.assertTrue(job.cancelled()) - - -class Test_JobConfig(unittest.TestCase): - JOB_TYPE = "testing" - - @staticmethod - def _get_target_class(): - from google.cloud.bigquery import job - - return job._JobConfig - - def _make_one(self, job_type=JOB_TYPE): - return self._get_target_class()(job_type) - - def test_ctor(self): - job_config = self._make_one() - self.assertEqual(job_config._job_type, self.JOB_TYPE) - self.assertEqual(job_config._properties, {self.JOB_TYPE: {}}) - - def test_fill_from_default(self): - from google.cloud.bigquery import QueryJobConfig - - job_config = QueryJobConfig() - job_config.dry_run = True - job_config.maximum_bytes_billed = 1000 - - default_job_config = QueryJobConfig() - default_job_config.use_query_cache = True - default_job_config.maximum_bytes_billed = 2000 - - final_job_config = job_config._fill_from_default(default_job_config) - self.assertTrue(final_job_config.dry_run) - self.assertTrue(final_job_config.use_query_cache) - self.assertEqual(final_job_config.maximum_bytes_billed, 1000) - - def test_fill_from_default_conflict(self): - from google.cloud.bigquery import QueryJobConfig - - basic_job_config = QueryJobConfig() - conflicting_job_config = self._make_one("conflicting_job_type") - self.assertNotEqual( - basic_job_config._job_type, conflicting_job_config._job_type - ) - - with self.assertRaises(TypeError): - basic_job_config._fill_from_default(conflicting_job_config) - - @mock.patch("google.cloud.bigquery._helpers._get_sub_prop") - def test__get_sub_prop_wo_default(self, _get_sub_prop): - job_config = self._make_one() - key = "key" - self.assertIs(job_config._get_sub_prop(key), _get_sub_prop.return_value) - _get_sub_prop.assert_called_once_with( - job_config._properties, [self.JOB_TYPE, key], default=None - ) - - @mock.patch("google.cloud.bigquery._helpers._get_sub_prop") - def test__get_sub_prop_w_default(self, _get_sub_prop): - job_config = self._make_one() - key = "key" - default = "default" - self.assertIs( - job_config._get_sub_prop(key, default=default), _get_sub_prop.return_value - ) - _get_sub_prop.assert_called_once_with( - job_config._properties, [self.JOB_TYPE, key], default=default - ) - - @mock.patch("google.cloud.bigquery._helpers._set_sub_prop") - def test__set_sub_prop(self, _set_sub_prop): - job_config = self._make_one() - key = "key" - value = "value" - job_config._set_sub_prop(key, value) - _set_sub_prop.assert_called_once_with( - job_config._properties, [self.JOB_TYPE, key], value - ) - - def test_to_api_repr(self): - job_config = self._make_one() - expected = job_config._properties = {self.JOB_TYPE: {"foo": "bar"}} - found = job_config.to_api_repr() - self.assertEqual(found, expected) - self.assertIsNot(found, expected) # copied - - # 'from_api_repr' cannot be tested on '_JobConfig', because it presumes - # the ctor can be called w/o arguments - - def test_labels_miss(self): - job_config = self._make_one() - self.assertEqual(job_config.labels, {}) - - def test_labels_update_in_place(self): - job_config = self._make_one() - labels = job_config.labels - labels["foo"] = "bar" # update in place - self.assertEqual(job_config.labels, {"foo": "bar"}) - - def test_labels_hit(self): - labels = {"foo": "bar"} - job_config = self._make_one() - job_config._properties["labels"] = labels - self.assertEqual(job_config.labels, labels) - - def test_labels_setter_invalid(self): - labels = object() - job_config = self._make_one() - with self.assertRaises(ValueError): - job_config.labels = labels - - def test_labels_setter(self): - labels = {"foo": "bar"} - job_config = self._make_one() - job_config.labels = labels - self.assertEqual(job_config._properties["labels"], labels) - - -class _Base(object): - from google.cloud.bigquery.dataset import DatasetReference - from google.cloud.bigquery.table import TableReference - - ENDPOINT = "https://bigquery.googleapis.com" - PROJECT = "project" - SOURCE1 = "http://example.com/source1.csv" - DS_ID = "dataset_id" - DS_REF = DatasetReference(PROJECT, DS_ID) - TABLE_ID = "table_id" - TABLE_REF = TableReference(DS_REF, TABLE_ID) - JOB_ID = "JOB_ID" - KMS_KEY_NAME = "projects/1/locations/us/keyRings/1/cryptoKeys/1" - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def _setUpConstants(self): - import datetime - from google.cloud._helpers import UTC - - self.WHEN_TS = 1437767599.006 - self.WHEN = datetime.datetime.utcfromtimestamp(self.WHEN_TS).replace(tzinfo=UTC) - self.ETAG = "ETAG" - self.FULL_JOB_ID = "%s:%s" % (self.PROJECT, self.JOB_ID) - self.RESOURCE_URL = "{}/bigquery/v2/projects/{}/jobs/{}".format( - self.ENDPOINT, self.PROJECT, self.JOB_ID - ) - self.USER_EMAIL = "phred@example.com" - - def _table_ref(self, table_id): - from google.cloud.bigquery.table import TableReference - - return TableReference(self.DS_REF, table_id) - - def _make_resource(self, started=False, ended=False): - self._setUpConstants() - return _make_job_resource( - creation_time_ms=int(self.WHEN_TS * 1000), - started_time_ms=int(self.WHEN_TS * 1000), - ended_time_ms=int(self.WHEN_TS * 1000) + 1000000, - started=started, - ended=ended, - etag=self.ETAG, - endpoint=self.ENDPOINT, - job_type=self.JOB_TYPE, - job_id=self.JOB_ID, - project_id=self.PROJECT, - user_email=self.USER_EMAIL, - ) - - def _verifyInitialReadonlyProperties(self, job): - # root elements of resource - self.assertIsNone(job.etag) - self.assertIsNone(job.self_link) - self.assertIsNone(job.user_email) - - # derived from resource['statistics'] - self.assertIsNone(job.created) - self.assertIsNone(job.started) - self.assertIsNone(job.ended) - - # derived from resource['status'] - self.assertIsNone(job.error_result) - self.assertIsNone(job.errors) - self.assertIsNone(job.state) - - def _verifyReadonlyResourceProperties(self, job, resource): - from datetime import timedelta - - statistics = resource.get("statistics", {}) - - if "creationTime" in statistics: - self.assertEqual(job.created, self.WHEN) - else: - self.assertIsNone(job.created) - - if "startTime" in statistics: - self.assertEqual(job.started, self.WHEN) - else: - self.assertIsNone(job.started) - - if "endTime" in statistics: - self.assertEqual(job.ended, self.WHEN + timedelta(seconds=1000)) - else: - self.assertIsNone(job.ended) - - if "etag" in resource: - self.assertEqual(job.etag, self.ETAG) - else: - self.assertIsNone(job.etag) - - if "selfLink" in resource: - self.assertEqual(job.self_link, self.RESOURCE_URL) - else: - self.assertIsNone(job.self_link) - - if "user_email" in resource: - self.assertEqual(job.user_email, self.USER_EMAIL) - else: - self.assertIsNone(job.user_email) - - -class TestLoadJobConfig(unittest.TestCase, _Base): - JOB_TYPE = "load" - - @staticmethod - def _get_target_class(): - from google.cloud.bigquery.job import LoadJobConfig - - return LoadJobConfig - - def test_ctor_w_properties(self): - config = self._get_target_class()( - allow_jagged_rows=True, allow_quoted_newlines=True - ) - - self.assertTrue(config.allow_jagged_rows) - self.assertTrue(config.allow_quoted_newlines) - - def test_allow_jagged_rows_missing(self): - config = self._get_target_class()() - self.assertIsNone(config.allow_jagged_rows) - - def test_allow_jagged_rows_hit(self): - config = self._get_target_class()() - config._properties["load"]["allowJaggedRows"] = True - self.assertTrue(config.allow_jagged_rows) - - def test_allow_jagged_rows_setter(self): - config = self._get_target_class()() - config.allow_jagged_rows = True - self.assertTrue(config._properties["load"]["allowJaggedRows"]) - - def test_allow_quoted_newlines_missing(self): - config = self._get_target_class()() - self.assertIsNone(config.allow_quoted_newlines) - - def test_allow_quoted_newlines_hit(self): - config = self._get_target_class()() - config._properties["load"]["allowQuotedNewlines"] = True - self.assertTrue(config.allow_quoted_newlines) - - def test_allow_quoted_newlines_setter(self): - config = self._get_target_class()() - config.allow_quoted_newlines = True - self.assertTrue(config._properties["load"]["allowQuotedNewlines"]) - - def test_autodetect_missing(self): - config = self._get_target_class()() - self.assertIsNone(config.autodetect) - - def test_autodetect_hit(self): - config = self._get_target_class()() - config._properties["load"]["autodetect"] = True - self.assertTrue(config.autodetect) - - def test_autodetect_setter(self): - config = self._get_target_class()() - config.autodetect = True - self.assertTrue(config._properties["load"]["autodetect"]) - - def test_clustering_fields_miss(self): - config = self._get_target_class()() - self.assertIsNone(config.clustering_fields) - - def test_clustering_fields_hit(self): - config = self._get_target_class()() - fields = ["email", "postal_code"] - config._properties["load"]["clustering"] = {"fields": fields} - self.assertEqual(config.clustering_fields, fields) - - def test_clustering_fields_setter(self): - fields = ["email", "postal_code"] - config = self._get_target_class()() - config.clustering_fields = fields - self.assertEqual(config._properties["load"]["clustering"], {"fields": fields}) - - def test_clustering_fields_setter_w_none(self): - config = self._get_target_class()() - fields = ["email", "postal_code"] - config._properties["load"]["clustering"] = {"fields": fields} - config.clustering_fields = None - self.assertIsNone(config.clustering_fields) - self.assertNotIn("clustering", config._properties["load"]) - - def test_create_disposition_missing(self): - config = self._get_target_class()() - self.assertIsNone(config.create_disposition) - - def test_create_disposition_hit(self): - from google.cloud.bigquery.job import CreateDisposition - - disposition = CreateDisposition.CREATE_IF_NEEDED - config = self._get_target_class()() - config._properties["load"]["createDisposition"] = disposition - self.assertEqual(config.create_disposition, disposition) - - def test_create_disposition_setter(self): - from google.cloud.bigquery.job import CreateDisposition - - disposition = CreateDisposition.CREATE_IF_NEEDED - config = self._get_target_class()() - config.create_disposition = disposition - self.assertEqual(config._properties["load"]["createDisposition"], disposition) - - def test_destination_encryption_configuration_missing(self): - config = self._get_target_class()() - self.assertIsNone(config.destination_encryption_configuration) - - def test_destination_encryption_configuration_hit(self): - from google.cloud.bigquery.encryption_configuration import ( - EncryptionConfiguration, - ) - - kms_key_name = "kms-key-name" - encryption_configuration = EncryptionConfiguration(kms_key_name) - config = self._get_target_class()() - config._properties["load"]["destinationEncryptionConfiguration"] = { - "kmsKeyName": kms_key_name - } - self.assertEqual( - config.destination_encryption_configuration, encryption_configuration - ) - - def test_destination_encryption_configuration_setter(self): - from google.cloud.bigquery.encryption_configuration import ( - EncryptionConfiguration, - ) - - kms_key_name = "kms-key-name" - encryption_configuration = EncryptionConfiguration(kms_key_name) - config = self._get_target_class()() - config.destination_encryption_configuration = encryption_configuration - expected = {"kmsKeyName": kms_key_name} - self.assertEqual( - config._properties["load"]["destinationEncryptionConfiguration"], expected - ) - - def test_destination_encryption_configuration_setter_w_none(self): - kms_key_name = "kms-key-name" - config = self._get_target_class()() - config._properties["load"]["destinationEncryptionConfiguration"] = { - "kmsKeyName": kms_key_name - } - config.destination_encryption_configuration = None - self.assertIsNone(config.destination_encryption_configuration) - self.assertNotIn( - "destinationEncryptionConfiguration", config._properties["load"] - ) - - def test_destination_table_description_missing(self): - config = self._get_target_class()() - self.assertIsNone(config.destination_table_description) - - def test_destination_table_description_hit(self): - description = "Description" - config = self._get_target_class()() - config._properties["load"]["destinationTableProperties"] = { - "description": description - } - self.assertEqual(config.destination_table_description, description) - - def test_destination_table_description_setter(self): - description = "Description" - config = self._get_target_class()() - config.destination_table_description = description - expected = {"description": description} - self.assertEqual( - config._properties["load"]["destinationTableProperties"], expected - ) - - def test_destination_table_description_setter_w_fn_already(self): - description = "Description" - friendly_name = "Friendly Name" - config = self._get_target_class()() - config._properties["load"]["destinationTableProperties"] = { - "friendlyName": friendly_name - } - config.destination_table_description = description - expected = {"friendlyName": friendly_name, "description": description} - self.assertEqual( - config._properties["load"]["destinationTableProperties"], expected - ) - - def test_destination_table_description_w_none(self): - description = "Description" - friendly_name = "Friendly Name" - config = self._get_target_class()() - config._properties["load"]["destinationTableProperties"] = { - "description": description, - "friendlyName": friendly_name, - } - config.destination_table_description = None - expected = {"friendlyName": friendly_name} - self.assertEqual( - config._properties["load"]["destinationTableProperties"], expected - ) - - def test_destination_table_friendly_name_missing(self): - config = self._get_target_class()() - self.assertIsNone(config.destination_table_friendly_name) - - def test_destination_table_friendly_name_hit(self): - friendly_name = "Friendly Name" - config = self._get_target_class()() - config._properties["load"]["destinationTableProperties"] = { - "friendlyName": friendly_name - } - self.assertEqual(config.destination_table_friendly_name, friendly_name) - - def test_destination_table_friendly_name_setter(self): - friendly_name = "Friendly Name" - config = self._get_target_class()() - config.destination_table_friendly_name = friendly_name - expected = {"friendlyName": friendly_name} - self.assertEqual( - config._properties["load"]["destinationTableProperties"], expected - ) - - def test_destination_table_friendly_name_setter_w_descr_already(self): - friendly_name = "Friendly Name" - description = "Description" - config = self._get_target_class()() - config._properties["load"]["destinationTableProperties"] = { - "description": description - } - config.destination_table_friendly_name = friendly_name - expected = {"friendlyName": friendly_name, "description": description} - self.assertEqual( - config._properties["load"]["destinationTableProperties"], expected - ) - - def test_destination_table_friendly_name_w_none(self): - friendly_name = "Friendly Name" - description = "Description" - config = self._get_target_class()() - config._properties["load"]["destinationTableProperties"] = { - "description": description, - "friendlyName": friendly_name, - } - config.destination_table_friendly_name = None - expected = {"description": description} - self.assertEqual( - config._properties["load"]["destinationTableProperties"], expected - ) - - def test_encoding_missing(self): - config = self._get_target_class()() - self.assertIsNone(config.encoding) - - def test_encoding_hit(self): - from google.cloud.bigquery.job import Encoding - - encoding = Encoding.UTF_8 - config = self._get_target_class()() - config._properties["load"]["encoding"] = encoding - self.assertEqual(config.encoding, encoding) - - def test_encoding_setter(self): - from google.cloud.bigquery.job import Encoding - - encoding = Encoding.UTF_8 - config = self._get_target_class()() - config.encoding = encoding - self.assertEqual(config._properties["load"]["encoding"], encoding) - - def test_field_delimiter_missing(self): - config = self._get_target_class()() - self.assertIsNone(config.field_delimiter) - - def test_field_delimiter_hit(self): - field_delimiter = "|" - config = self._get_target_class()() - config._properties["load"]["fieldDelimiter"] = field_delimiter - self.assertEqual(config.field_delimiter, field_delimiter) - - def test_field_delimiter_setter(self): - field_delimiter = "|" - config = self._get_target_class()() - config.field_delimiter = field_delimiter - self.assertEqual(config._properties["load"]["fieldDelimiter"], field_delimiter) - - def test_hive_partitioning_missing(self): - config = self._get_target_class()() - self.assertIsNone(config.hive_partitioning) - - def test_hive_partitioning_hit(self): - from google.cloud.bigquery.external_config import HivePartitioningOptions - - config = self._get_target_class()() - config._properties["load"]["hivePartitioningOptions"] = { - "sourceUriPrefix": "http://foo/bar", - "mode": "STRINGS", - } - result = config.hive_partitioning - self.assertIsInstance(result, HivePartitioningOptions) - self.assertEqual(result.source_uri_prefix, "http://foo/bar") - self.assertEqual(result.mode, "STRINGS") - - def test_hive_partitioning_setter(self): - from google.cloud.bigquery.external_config import HivePartitioningOptions - - hive_partitioning = HivePartitioningOptions() - hive_partitioning.source_uri_prefix = "http://foo/bar" - hive_partitioning.mode = "AUTO" - - config = self._get_target_class()() - config.hive_partitioning = hive_partitioning - self.assertEqual( - config._properties["load"]["hivePartitioningOptions"], - {"sourceUriPrefix": "http://foo/bar", "mode": "AUTO"}, - ) - - config.hive_partitioning = None - self.assertIsNone(config._properties["load"]["hivePartitioningOptions"]) - - def test_hive_partitioning_invalid_type(self): - config = self._get_target_class()() - - with self.assertRaises(TypeError): - config.hive_partitioning = {"mode": "AUTO"} - - def test_ignore_unknown_values_missing(self): - config = self._get_target_class()() - self.assertIsNone(config.ignore_unknown_values) - - def test_ignore_unknown_values_hit(self): - config = self._get_target_class()() - config._properties["load"]["ignoreUnknownValues"] = True - self.assertTrue(config.ignore_unknown_values) - - def test_ignore_unknown_values_setter(self): - config = self._get_target_class()() - config.ignore_unknown_values = True - self.assertTrue(config._properties["load"]["ignoreUnknownValues"]) - - def test_max_bad_records_missing(self): - config = self._get_target_class()() - self.assertIsNone(config.max_bad_records) - - def test_max_bad_records_hit(self): - max_bad_records = 13 - config = self._get_target_class()() - config._properties["load"]["maxBadRecords"] = max_bad_records - self.assertEqual(config.max_bad_records, max_bad_records) - - def test_max_bad_records_setter(self): - max_bad_records = 13 - config = self._get_target_class()() - config.max_bad_records = max_bad_records - self.assertEqual(config._properties["load"]["maxBadRecords"], max_bad_records) - - def test_null_marker_missing(self): - config = self._get_target_class()() - self.assertIsNone(config.null_marker) - - def test_null_marker_hit(self): - null_marker = "XXX" - config = self._get_target_class()() - config._properties["load"]["nullMarker"] = null_marker - self.assertEqual(config.null_marker, null_marker) - - def test_null_marker_setter(self): - null_marker = "XXX" - config = self._get_target_class()() - config.null_marker = null_marker - self.assertEqual(config._properties["load"]["nullMarker"], null_marker) - - def test_quote_character_missing(self): - config = self._get_target_class()() - self.assertIsNone(config.quote_character) - - def test_quote_character_hit(self): - quote_character = "'" - config = self._get_target_class()() - config._properties["load"]["quote"] = quote_character - self.assertEqual(config.quote_character, quote_character) - - def test_quote_character_setter(self): - quote_character = "'" - config = self._get_target_class()() - config.quote_character = quote_character - self.assertEqual(config._properties["load"]["quote"], quote_character) - - def test_schema_missing(self): - config = self._get_target_class()() - self.assertIsNone(config.schema) - - def test_schema_hit(self): - from google.cloud.bigquery.schema import SchemaField - - config = self._get_target_class()() - all_props_repr = { - "mode": "REQUIRED", - "name": "foo", - "type": "INTEGER", - "description": "Foo", - } - minimal_repr = {"name": "bar", "type": "STRING"} - config._properties["load"]["schema"] = { - "fields": [all_props_repr, minimal_repr] - } - all_props, minimal = config.schema - self.assertEqual(all_props, SchemaField.from_api_repr(all_props_repr)) - self.assertEqual(minimal, SchemaField.from_api_repr(minimal_repr)) - - def test_schema_setter_fields(self): - from google.cloud.bigquery.schema import SchemaField - - config = self._get_target_class()() - full_name = SchemaField("full_name", "STRING", mode="REQUIRED") - age = SchemaField("age", "INTEGER", mode="REQUIRED") - config.schema = [full_name, age] - full_name_repr = { - "name": "full_name", - "type": "STRING", - "mode": "REQUIRED", - "description": None, - } - age_repr = { - "name": "age", - "type": "INTEGER", - "mode": "REQUIRED", - "description": None, - } - self.assertEqual( - config._properties["load"]["schema"], {"fields": [full_name_repr, age_repr]} - ) - - def test_schema_setter_valid_mappings_list(self): - config = self._get_target_class()() - - schema = [ - {"name": "full_name", "type": "STRING", "mode": "REQUIRED"}, - {"name": "age", "type": "INTEGER", "mode": "REQUIRED"}, - ] - config.schema = schema - - full_name_repr = { - "name": "full_name", - "type": "STRING", - "mode": "REQUIRED", - "description": None, - } - age_repr = { - "name": "age", - "type": "INTEGER", - "mode": "REQUIRED", - "description": None, - } - self.assertEqual( - config._properties["load"]["schema"], {"fields": [full_name_repr, age_repr]} - ) - - def test_schema_setter_invalid_mappings_list(self): - config = self._get_target_class()() - - schema = [ - {"name": "full_name", "type": "STRING", "mode": "REQUIRED"}, - {"name": "age", "typeoo": "INTEGER", "mode": "REQUIRED"}, - ] - - with self.assertRaises(Exception): - config.schema = schema - - def test_schema_setter_unsetting_schema(self): - from google.cloud.bigquery.schema import SchemaField - - config = self._get_target_class()() - config._properties["load"]["schema"] = [ - SchemaField("full_name", "STRING", mode="REQUIRED"), - SchemaField("age", "INTEGER", mode="REQUIRED"), - ] - - config.schema = None - self.assertNotIn("schema", config._properties["load"]) - config.schema = None # no error, idempotent operation - - def test_schema_update_options_missing(self): - config = self._get_target_class()() - self.assertIsNone(config.schema_update_options) - - def test_schema_update_options_hit(self): - from google.cloud.bigquery.job import SchemaUpdateOption - - options = [ - SchemaUpdateOption.ALLOW_FIELD_ADDITION, - SchemaUpdateOption.ALLOW_FIELD_RELAXATION, - ] - config = self._get_target_class()() - config._properties["load"]["schemaUpdateOptions"] = options - self.assertEqual(config.schema_update_options, options) - - def test_schema_update_options_setter(self): - from google.cloud.bigquery.job import SchemaUpdateOption - - options = [ - SchemaUpdateOption.ALLOW_FIELD_ADDITION, - SchemaUpdateOption.ALLOW_FIELD_RELAXATION, - ] - config = self._get_target_class()() - config.schema_update_options = options - self.assertEqual(config._properties["load"]["schemaUpdateOptions"], options) - - def test_skip_leading_rows_missing(self): - config = self._get_target_class()() - self.assertIsNone(config.skip_leading_rows) - - def test_skip_leading_rows_hit_w_str(self): - skip_leading_rows = 1 - config = self._get_target_class()() - config._properties["load"]["skipLeadingRows"] = str(skip_leading_rows) - self.assertEqual(config.skip_leading_rows, skip_leading_rows) - - def test_skip_leading_rows_hit_w_integer(self): - skip_leading_rows = 1 - config = self._get_target_class()() - config._properties["load"]["skipLeadingRows"] = skip_leading_rows - self.assertEqual(config.skip_leading_rows, skip_leading_rows) - - def test_skip_leading_rows_setter(self): - skip_leading_rows = 1 - config = self._get_target_class()() - config.skip_leading_rows = skip_leading_rows - self.assertEqual( - config._properties["load"]["skipLeadingRows"], str(skip_leading_rows) - ) - - def test_source_format_missing(self): - config = self._get_target_class()() - self.assertIsNone(config.source_format) - - def test_source_format_hit(self): - from google.cloud.bigquery.job import SourceFormat - - source_format = SourceFormat.CSV - config = self._get_target_class()() - config._properties["load"]["sourceFormat"] = source_format - self.assertEqual(config.source_format, source_format) - - def test_source_format_setter(self): - from google.cloud.bigquery.job import SourceFormat - - source_format = SourceFormat.CSV - config = self._get_target_class()() - config.source_format = source_format - self.assertEqual(config._properties["load"]["sourceFormat"], source_format) - - def test_range_partitioning_w_none(self): - object_under_test = self._get_target_class()() - assert object_under_test.range_partitioning is None - - def test_range_partitioning_w_value(self): - object_under_test = self._get_target_class()() - object_under_test._properties["load"]["rangePartitioning"] = { - "field": "column_one", - "range": {"start": 1, "end": 1000, "interval": 10}, - } - object_under_test.range_partitioning.field == "column_one" - object_under_test.range_partitioning.range_.start == 1 - object_under_test.range_partitioning.range_.end == 1000 - object_under_test.range_partitioning.range_.interval == 10 - - def test_range_partitioning_setter(self): - from google.cloud.bigquery.table import PartitionRange - from google.cloud.bigquery.table import RangePartitioning - - object_under_test = self._get_target_class()() - object_under_test.range_partitioning = RangePartitioning( - field="column_one", range_=PartitionRange(start=1, end=1000, interval=10) - ) - object_under_test.range_partitioning.field == "column_one" - object_under_test.range_partitioning.range_.start == 1 - object_under_test.range_partitioning.range_.end == 1000 - object_under_test.range_partitioning.range_.interval == 10 - - def test_range_partitioning_setter_w_none(self): - object_under_test = self._get_target_class()() - object_under_test.range_partitioning = None - assert object_under_test.range_partitioning is None - - def test_range_partitioning_setter_w_wrong_type(self): - object_under_test = self._get_target_class()() - with pytest.raises(ValueError, match="RangePartitioning"): - object_under_test.range_partitioning = object() - - def test_time_partitioning_miss(self): - config = self._get_target_class()() - self.assertIsNone(config.time_partitioning) - - def test_time_partitioning_hit(self): - from google.cloud.bigquery.table import TimePartitioning - from google.cloud.bigquery.table import TimePartitioningType - - field = "creation_date" - year_ms = 86400 * 1000 * 365 - config = self._get_target_class()() - config._properties["load"]["timePartitioning"] = { - "type": TimePartitioningType.DAY, - "field": field, - "expirationMs": str(year_ms), - "requirePartitionFilter": False, - } - expected = TimePartitioning( - type_=TimePartitioningType.DAY, - field=field, - expiration_ms=year_ms, - require_partition_filter=False, - ) - self.assertEqual(config.time_partitioning, expected) - - def test_time_partitioning_setter(self): - from google.cloud.bigquery.table import TimePartitioning - from google.cloud.bigquery.table import TimePartitioningType - - field = "creation_date" - year_ms = 86400 * 1000 * 365 - time_partitioning = TimePartitioning( - type_=TimePartitioningType.DAY, - field=field, - expiration_ms=year_ms, - require_partition_filter=False, - ) - config = self._get_target_class()() - config.time_partitioning = time_partitioning - expected = { - "type": TimePartitioningType.DAY, - "field": field, - "expirationMs": str(year_ms), - "requirePartitionFilter": False, - } - self.assertEqual(config._properties["load"]["timePartitioning"], expected) - - def test_time_partitioning_setter_w_none(self): - from google.cloud.bigquery.table import TimePartitioningType - - field = "creation_date" - year_ms = 86400 * 1000 * 365 - config = self._get_target_class()() - config._properties["load"]["timePartitioning"] = { - "type": TimePartitioningType.DAY, - "field": field, - "expirationMs": str(year_ms), - "requirePartitionFilter": False, - } - config.time_partitioning = None - self.assertIsNone(config.time_partitioning) - self.assertNotIn("timePartitioning", config._properties["load"]) - - def test_use_avro_logical_types(self): - config = self._get_target_class()() - self.assertIsNone(config.use_avro_logical_types) - - def test_use_avro_logical_types_setter(self): - config = self._get_target_class()() - config.use_avro_logical_types = True - self.assertTrue(config._properties["load"]["useAvroLogicalTypes"]) - - def test_write_disposition_missing(self): - config = self._get_target_class()() - self.assertIsNone(config.write_disposition) - - def test_write_disposition_hit(self): - from google.cloud.bigquery.job import WriteDisposition - - write_disposition = WriteDisposition.WRITE_TRUNCATE - config = self._get_target_class()() - config._properties["load"]["writeDisposition"] = write_disposition - self.assertEqual(config.write_disposition, write_disposition) - - def test_write_disposition_setter(self): - from google.cloud.bigquery.job import WriteDisposition - - write_disposition = WriteDisposition.WRITE_TRUNCATE - config = self._get_target_class()() - config.write_disposition = write_disposition - self.assertEqual( - config._properties["load"]["writeDisposition"], write_disposition - ) - - -class TestLoadJob(unittest.TestCase, _Base): - JOB_TYPE = "load" - - @staticmethod - def _get_target_class(): - from google.cloud.bigquery.job import LoadJob - - return LoadJob - - def _setUpConstants(self): - super(TestLoadJob, self)._setUpConstants() - self.INPUT_FILES = 2 - self.INPUT_BYTES = 12345 - self.OUTPUT_BYTES = 23456 - self.OUTPUT_ROWS = 345 - - def _make_resource(self, started=False, ended=False): - resource = super(TestLoadJob, self)._make_resource(started, ended) - config = resource["configuration"]["load"] - config["sourceUris"] = [self.SOURCE1] - config["destinationTable"] = { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.TABLE_ID, - } - - if ended: - resource["status"] = {"state": "DONE"} - resource["statistics"]["load"]["inputFiles"] = self.INPUT_FILES - resource["statistics"]["load"]["inputFileBytes"] = self.INPUT_BYTES - resource["statistics"]["load"]["outputBytes"] = self.OUTPUT_BYTES - resource["statistics"]["load"]["outputRows"] = self.OUTPUT_ROWS - - return resource - - def _verifyBooleanConfigProperties(self, job, config): - if "allowJaggedRows" in config: - self.assertEqual(job.allow_jagged_rows, config["allowJaggedRows"]) - else: - self.assertIsNone(job.allow_jagged_rows) - if "allowQuotedNewlines" in config: - self.assertEqual(job.allow_quoted_newlines, config["allowQuotedNewlines"]) - else: - self.assertIsNone(job.allow_quoted_newlines) - if "autodetect" in config: - self.assertEqual(job.autodetect, config["autodetect"]) - else: - self.assertIsNone(job.autodetect) - if "ignoreUnknownValues" in config: - self.assertEqual(job.ignore_unknown_values, config["ignoreUnknownValues"]) - else: - self.assertIsNone(job.ignore_unknown_values) - if "useAvroLogicalTypes" in config: - self.assertEqual(job.use_avro_logical_types, config["useAvroLogicalTypes"]) - else: - self.assertIsNone(job.use_avro_logical_types) - - def _verifyEnumConfigProperties(self, job, config): - if "createDisposition" in config: - self.assertEqual(job.create_disposition, config["createDisposition"]) - else: - self.assertIsNone(job.create_disposition) - if "encoding" in config: - self.assertEqual(job.encoding, config["encoding"]) - else: - self.assertIsNone(job.encoding) - if "sourceFormat" in config: - self.assertEqual(job.source_format, config["sourceFormat"]) - else: - self.assertIsNone(job.source_format) - if "writeDisposition" in config: - self.assertEqual(job.write_disposition, config["writeDisposition"]) - else: - self.assertIsNone(job.write_disposition) - if "schemaUpdateOptions" in config: - self.assertEqual(job.schema_update_options, config["schemaUpdateOptions"]) - else: - self.assertIsNone(job.schema_update_options) - - def _verifyResourceProperties(self, job, resource): - self._verifyReadonlyResourceProperties(job, resource) - - config = resource.get("configuration", {}).get("load") - - self._verifyBooleanConfigProperties(job, config) - self._verifyEnumConfigProperties(job, config) - - self.assertEqual(job.source_uris, config["sourceUris"]) - - table_ref = config["destinationTable"] - self.assertEqual(job.destination.project, table_ref["projectId"]) - self.assertEqual(job.destination.dataset_id, table_ref["datasetId"]) - self.assertEqual(job.destination.table_id, table_ref["tableId"]) - - if "fieldDelimiter" in config: - self.assertEqual(job.field_delimiter, config["fieldDelimiter"]) - else: - self.assertIsNone(job.field_delimiter) - if "maxBadRecords" in config: - self.assertEqual(job.max_bad_records, config["maxBadRecords"]) - else: - self.assertIsNone(job.max_bad_records) - if "nullMarker" in config: - self.assertEqual(job.null_marker, config["nullMarker"]) - else: - self.assertIsNone(job.null_marker) - if "quote" in config: - self.assertEqual(job.quote_character, config["quote"]) - else: - self.assertIsNone(job.quote_character) - if "skipLeadingRows" in config: - self.assertEqual(str(job.skip_leading_rows), config["skipLeadingRows"]) - else: - self.assertIsNone(job.skip_leading_rows) - - if "destinationEncryptionConfiguration" in config: - self.assertIsNotNone(job.destination_encryption_configuration) - self.assertEqual( - job.destination_encryption_configuration.kms_key_name, - config["destinationEncryptionConfiguration"]["kmsKeyName"], - ) - else: - self.assertIsNone(job.destination_encryption_configuration) - - def test_ctor(self): - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, [self.SOURCE1], self.TABLE_REF, client) - self.assertIs(job.destination, self.TABLE_REF) - self.assertEqual(list(job.source_uris), [self.SOURCE1]) - self.assertIs(job._client, client) - self.assertEqual(job.job_type, self.JOB_TYPE) - self.assertEqual(job.path, "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID)) - - self._verifyInitialReadonlyProperties(job) - - # derived from resource['statistics']['load'] - self.assertIsNone(job.input_file_bytes) - self.assertIsNone(job.input_files) - self.assertIsNone(job.output_bytes) - self.assertIsNone(job.output_rows) - - # set/read from resource['configuration']['load'] - self.assertIsNone(job.schema) - self.assertIsNone(job.allow_jagged_rows) - self.assertIsNone(job.allow_quoted_newlines) - self.assertIsNone(job.autodetect) - self.assertIsNone(job.create_disposition) - self.assertIsNone(job.encoding) - self.assertIsNone(job.field_delimiter) - self.assertIsNone(job.ignore_unknown_values) - self.assertIsNone(job.max_bad_records) - self.assertIsNone(job.null_marker) - self.assertIsNone(job.quote_character) - self.assertIsNone(job.skip_leading_rows) - self.assertIsNone(job.source_format) - self.assertIsNone(job.write_disposition) - self.assertIsNone(job.destination_encryption_configuration) - self.assertIsNone(job.destination_table_description) - self.assertIsNone(job.destination_table_friendly_name) - self.assertIsNone(job.range_partitioning) - self.assertIsNone(job.time_partitioning) - self.assertIsNone(job.use_avro_logical_types) - self.assertIsNone(job.clustering_fields) - self.assertIsNone(job.schema_update_options) - - def test_ctor_w_config(self): - from google.cloud.bigquery.schema import SchemaField - from google.cloud.bigquery.job import LoadJobConfig - - client = _make_client(project=self.PROJECT) - full_name = SchemaField("full_name", "STRING", mode="REQUIRED") - age = SchemaField("age", "INTEGER", mode="REQUIRED") - config = LoadJobConfig() - config.schema = [full_name, age] - job = self._make_one( - self.JOB_ID, [self.SOURCE1], self.TABLE_REF, client, config - ) - self.assertEqual(job.schema, [full_name, age]) - config.destination_table_description = "Description" - expected = {"description": "Description"} - self.assertEqual( - config._properties["load"]["destinationTableProperties"], expected - ) - friendly_name = "Friendly Name" - config._properties["load"]["destinationTableProperties"] = { - "friendlyName": friendly_name - } - self.assertEqual(config.destination_table_friendly_name, friendly_name) - - def test_ctor_w_job_reference(self): - from google.cloud.bigquery import job - - client = _make_client(project=self.PROJECT) - job_ref = job._JobReference(self.JOB_ID, "alternative-project", "US") - load_job = self._make_one(job_ref, [self.SOURCE1], self.TABLE_REF, client) - self.assertEqual(load_job.project, "alternative-project") - self.assertEqual(load_job.location, "US") - - def test_done(self): - client = _make_client(project=self.PROJECT) - resource = self._make_resource(ended=True) - job = self._get_target_class().from_api_repr(resource, client) - self.assertTrue(job.done()) - - def test_result(self): - client = _make_client(project=self.PROJECT) - resource = self._make_resource(ended=True) - job = self._get_target_class().from_api_repr(resource, client) - - result = job.result() - - self.assertIs(result, job) - - def test_result_invokes_begin(self): - begun_resource = self._make_resource() - done_resource = copy.deepcopy(begun_resource) - done_resource["status"] = {"state": "DONE"} - connection = _make_connection(begun_resource, done_resource) - client = _make_client(self.PROJECT) - client._connection = connection - - job = self._make_one(self.JOB_ID, [self.SOURCE1], self.TABLE_REF, client) - job.result() - - self.assertEqual(len(connection.api_request.call_args_list), 2) - begin_request, reload_request = connection.api_request.call_args_list - self.assertEqual(begin_request[1]["method"], "POST") - self.assertEqual(reload_request[1]["method"], "GET") - - def test_schema_setter_non_list(self): - from google.cloud.bigquery.job import LoadJobConfig - - config = LoadJobConfig() - with self.assertRaises(TypeError): - config.schema = object() - - def test_schema_setter_invalid_field(self): - from google.cloud.bigquery.job import LoadJobConfig - from google.cloud.bigquery.schema import SchemaField - - config = LoadJobConfig() - full_name = SchemaField("full_name", "STRING", mode="REQUIRED") - with self.assertRaises(ValueError): - config.schema = [full_name, object()] - - def test_schema_setter(self): - from google.cloud.bigquery.job import LoadJobConfig - from google.cloud.bigquery.schema import SchemaField - - config = LoadJobConfig() - full_name = SchemaField("full_name", "STRING", mode="REQUIRED") - age = SchemaField("age", "INTEGER", mode="REQUIRED") - config.schema = [full_name, age] - self.assertEqual(config.schema, [full_name, age]) - - def test_props_set_by_server(self): - import datetime - from google.cloud._helpers import UTC - from google.cloud._helpers import _millis - - CREATED = datetime.datetime(2015, 8, 11, 12, 13, 22, tzinfo=UTC) - STARTED = datetime.datetime(2015, 8, 11, 13, 47, 15, tzinfo=UTC) - ENDED = datetime.datetime(2015, 8, 11, 14, 47, 15, tzinfo=UTC) - FULL_JOB_ID = "%s:%s" % (self.PROJECT, self.JOB_ID) - URL = "http://example.com/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID) - EMAIL = "phred@example.com" - ERROR_RESULT = { - "debugInfo": "DEBUG", - "location": "LOCATION", - "message": "MESSAGE", - "reason": "REASON", - } - - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, [self.SOURCE1], self.TABLE_REF, client) - job._properties["etag"] = "ETAG" - job._properties["id"] = FULL_JOB_ID - job._properties["selfLink"] = URL - job._properties["user_email"] = EMAIL - - statistics = job._properties["statistics"] = {} - statistics["creationTime"] = _millis(CREATED) - statistics["startTime"] = _millis(STARTED) - statistics["endTime"] = _millis(ENDED) - - self.assertEqual(job.etag, "ETAG") - self.assertEqual(job.self_link, URL) - self.assertEqual(job.user_email, EMAIL) - - self.assertEqual(job.created, CREATED) - self.assertEqual(job.started, STARTED) - self.assertEqual(job.ended, ENDED) - - # running jobs have no load stats not yet set. - self.assertIsNone(job.output_bytes) - - load_stats = statistics["load"] = {} - load_stats["inputFileBytes"] = 12345 - load_stats["inputFiles"] = 1 - load_stats["outputBytes"] = 23456 - load_stats["outputRows"] = 345 - - self.assertEqual(job.input_file_bytes, 12345) - self.assertEqual(job.input_files, 1) - self.assertEqual(job.output_bytes, 23456) - self.assertEqual(job.output_rows, 345) - - status = job._properties["status"] = {} - - self.assertIsNone(job.error_result) - self.assertIsNone(job.errors) - self.assertIsNone(job.state) - - status["errorResult"] = ERROR_RESULT - status["errors"] = [ERROR_RESULT] - status["state"] = "STATE" - - self.assertEqual(job.error_result, ERROR_RESULT) - self.assertEqual(job.errors, [ERROR_RESULT]) - self.assertEqual(job.state, "STATE") - - def test_from_api_repr_missing_identity(self): - self._setUpConstants() - client = _make_client(project=self.PROJECT) - RESOURCE = {} - klass = self._get_target_class() - with self.assertRaises(KeyError): - klass.from_api_repr(RESOURCE, client=client) - - def test_from_api_repr_missing_config(self): - self._setUpConstants() - client = _make_client(project=self.PROJECT) - RESOURCE = { - "id": "%s:%s" % (self.PROJECT, self.JOB_ID), - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - } - klass = self._get_target_class() - with self.assertRaises(KeyError): - klass.from_api_repr(RESOURCE, client=client) - - def test_from_api_repr_bare(self): - self._setUpConstants() - client = _make_client(project=self.PROJECT) - RESOURCE = { - "id": self.FULL_JOB_ID, - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "configuration": { - "load": { - "sourceUris": [self.SOURCE1], - "destinationTable": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.TABLE_ID, - }, - } - }, - } - klass = self._get_target_class() - job = klass.from_api_repr(RESOURCE, client=client) - self.assertIs(job._client, client) - self._verifyResourceProperties(job, RESOURCE) - - def test_from_api_with_encryption(self): - self._setUpConstants() - client = _make_client(project=self.PROJECT) - RESOURCE = { - "id": self.FULL_JOB_ID, - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "configuration": { - "load": { - "sourceUris": [self.SOURCE1], - "destinationTable": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.TABLE_ID, - }, - "destinationEncryptionConfiguration": { - "kmsKeyName": self.KMS_KEY_NAME - }, - } - }, - } - klass = self._get_target_class() - job = klass.from_api_repr(RESOURCE, client=client) - self.assertIs(job._client, client) - self._verifyResourceProperties(job, RESOURCE) - - def test_from_api_repr_w_properties(self): - from google.cloud.bigquery.job import CreateDisposition - - client = _make_client(project=self.PROJECT) - RESOURCE = self._make_resource() - load_config = RESOURCE["configuration"]["load"] - load_config["createDisposition"] = CreateDisposition.CREATE_IF_NEEDED - klass = self._get_target_class() - job = klass.from_api_repr(RESOURCE, client=client) - self.assertIs(job._client, client) - self._verifyResourceProperties(job, RESOURCE) - - def test_begin_w_already_running(self): - conn = _make_connection() - client = _make_client(project=self.PROJECT, connection=conn) - job = self._make_one(self.JOB_ID, [self.SOURCE1], self.TABLE_REF, client) - job._properties["status"] = {"state": "RUNNING"} - - with self.assertRaises(ValueError): - job._begin() - - def test_begin_w_bound_client(self): - RESOURCE = self._make_resource() - # Ensure None for missing server-set props - del RESOURCE["statistics"]["creationTime"] - del RESOURCE["etag"] - del RESOURCE["selfLink"] - del RESOURCE["user_email"] - conn = _make_connection(RESOURCE) - client = _make_client(project=self.PROJECT, connection=conn) - job = self._make_one(self.JOB_ID, [self.SOURCE1], self.TABLE_REF, client) - - job._begin() - - conn.api_request.assert_called_once_with( - method="POST", - path="/projects/{}/jobs".format(self.PROJECT), - data={ - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "configuration": { - "load": { - "sourceUris": [self.SOURCE1], - "destinationTable": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.TABLE_ID, - }, - } - }, - }, - timeout=None, - ) - self._verifyResourceProperties(job, RESOURCE) - - def test_begin_w_autodetect(self): - from google.cloud.bigquery.job import LoadJobConfig - - path = "/projects/{}/jobs".format(self.PROJECT) - resource = self._make_resource() - resource["configuration"]["load"]["autodetect"] = True - # Ensure None for missing server-set props - del resource["statistics"]["creationTime"] - del resource["etag"] - del resource["selfLink"] - del resource["user_email"] - conn = _make_connection(resource) - client = _make_client(project=self.PROJECT, connection=conn) - config = LoadJobConfig() - config.autodetect = True - job = self._make_one( - self.JOB_ID, [self.SOURCE1], self.TABLE_REF, client, config - ) - job._begin() - - sent = { - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "configuration": { - "load": { - "sourceUris": [self.SOURCE1], - "destinationTable": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.TABLE_ID, - }, - "autodetect": True, - } - }, - } - conn.api_request.assert_called_once_with( - method="POST", path=path, data=sent, timeout=None - ) - self._verifyResourceProperties(job, resource) - - def test_begin_w_alternate_client(self): - from google.cloud.bigquery.job import CreateDisposition - from google.cloud.bigquery.job import LoadJobConfig - from google.cloud.bigquery.job import SchemaUpdateOption - from google.cloud.bigquery.job import WriteDisposition - from google.cloud.bigquery.schema import SchemaField - - PATH = "/projects/%s/jobs" % (self.PROJECT,) - RESOURCE = self._make_resource(ended=True) - LOAD_CONFIGURATION = { - "sourceUris": [self.SOURCE1], - "destinationTable": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.TABLE_ID, - }, - "allowJaggedRows": True, - "allowQuotedNewlines": True, - "createDisposition": CreateDisposition.CREATE_NEVER, - "encoding": "ISO-8559-1", - "fieldDelimiter": "|", - "ignoreUnknownValues": True, - "maxBadRecords": 100, - "nullMarker": r"\N", - "quote": "'", - "skipLeadingRows": "1", - "sourceFormat": "CSV", - "useAvroLogicalTypes": True, - "writeDisposition": WriteDisposition.WRITE_TRUNCATE, - "schema": { - "fields": [ - { - "name": "full_name", - "type": "STRING", - "mode": "REQUIRED", - "description": None, - }, - { - "name": "age", - "type": "INTEGER", - "mode": "REQUIRED", - "description": None, - }, - ] - }, - "schemaUpdateOptions": [SchemaUpdateOption.ALLOW_FIELD_ADDITION], - } - RESOURCE["configuration"]["load"] = LOAD_CONFIGURATION - conn1 = _make_connection() - client1 = _make_client(project=self.PROJECT, connection=conn1) - conn2 = _make_connection(RESOURCE) - client2 = _make_client(project=self.PROJECT, connection=conn2) - full_name = SchemaField("full_name", "STRING", mode="REQUIRED") - age = SchemaField("age", "INTEGER", mode="REQUIRED") - config = LoadJobConfig() - config.schema = [full_name, age] - job = self._make_one( - self.JOB_ID, [self.SOURCE1], self.TABLE_REF, client1, config - ) - config.allow_jagged_rows = True - config.allow_quoted_newlines = True - config.create_disposition = CreateDisposition.CREATE_NEVER - config.encoding = "ISO-8559-1" - config.field_delimiter = "|" - config.ignore_unknown_values = True - config.max_bad_records = 100 - config.null_marker = r"\N" - config.quote_character = "'" - config.skip_leading_rows = 1 - config.source_format = "CSV" - config.use_avro_logical_types = True - config.write_disposition = WriteDisposition.WRITE_TRUNCATE - config.schema_update_options = [SchemaUpdateOption.ALLOW_FIELD_ADDITION] - - job._begin(client=client2) - - conn1.api_request.assert_not_called() - self.assertEqual(len(conn2.api_request.call_args_list), 1) - req = conn2.api_request.call_args_list[0] - self.assertEqual(req[1]["method"], "POST") - self.assertEqual(req[1]["path"], PATH) - SENT = { - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "configuration": {"load": LOAD_CONFIGURATION}, - } - self.maxDiff = None - self.assertEqual(req[1]["data"], SENT) - self._verifyResourceProperties(job, RESOURCE) - - def test_begin_w_job_reference(self): - from google.cloud.bigquery import job - - resource = self._make_resource() - resource["jobReference"]["projectId"] = "alternative-project" - resource["jobReference"]["location"] = "US" - job_ref = job._JobReference(self.JOB_ID, "alternative-project", "US") - conn = _make_connection(resource) - client = _make_client(project=self.PROJECT, connection=conn) - load_job = self._make_one(job_ref, [self.SOURCE1], self.TABLE_REF, client) - - load_job._begin() - - conn.api_request.assert_called_once() - _, request = conn.api_request.call_args - self.assertEqual(request["method"], "POST") - self.assertEqual(request["path"], "/projects/alternative-project/jobs") - self.assertEqual( - request["data"]["jobReference"]["projectId"], "alternative-project" - ) - self.assertEqual(request["data"]["jobReference"]["location"], "US") - self.assertEqual(request["data"]["jobReference"]["jobId"], self.JOB_ID) - - def test_exists_miss_w_bound_client(self): - PATH = "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID) - conn = _make_connection() - client = _make_client(project=self.PROJECT, connection=conn) - job = self._make_one(self.JOB_ID, [self.SOURCE1], self.TABLE_REF, client) - - self.assertFalse(job.exists()) - - conn.api_request.assert_called_once_with( - method="GET", path=PATH, query_params={"fields": "id"}, timeout=None - ) - - def test_exists_hit_w_alternate_client(self): - PATH = "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID) - conn1 = _make_connection() - client1 = _make_client(project=self.PROJECT, connection=conn1) - conn2 = _make_connection({}) - client2 = _make_client(project=self.PROJECT, connection=conn2) - job = self._make_one(self.JOB_ID, [self.SOURCE1], self.TABLE_REF, client1) - - self.assertTrue(job.exists(client=client2)) - - conn1.api_request.assert_not_called() - conn2.api_request.assert_called_once_with( - method="GET", path=PATH, query_params={"fields": "id"}, timeout=None - ) - - def test_exists_miss_w_job_reference(self): - from google.cloud.bigquery import job - - job_ref = job._JobReference("my-job-id", "other-project", "US") - conn = _make_connection() - client = _make_client(project=self.PROJECT, connection=conn) - load_job = self._make_one(job_ref, [self.SOURCE1], self.TABLE_REF, client) - - self.assertFalse(load_job.exists()) - - conn.api_request.assert_called_once_with( - method="GET", - path="/projects/other-project/jobs/my-job-id", - query_params={"fields": "id", "location": "US"}, - timeout=None, - ) - - def test_reload_w_bound_client(self): - PATH = "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID) - RESOURCE = self._make_resource() - conn = _make_connection(RESOURCE) - client = _make_client(project=self.PROJECT, connection=conn) - job = self._make_one(self.JOB_ID, [self.SOURCE1], self.TABLE_REF, client) - - job.reload() - - conn.api_request.assert_called_once_with( - method="GET", path=PATH, query_params={}, timeout=None - ) - self._verifyResourceProperties(job, RESOURCE) - - def test_reload_w_alternate_client(self): - PATH = "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID) - RESOURCE = self._make_resource() - conn1 = _make_connection() - client1 = _make_client(project=self.PROJECT, connection=conn1) - conn2 = _make_connection(RESOURCE) - client2 = _make_client(project=self.PROJECT, connection=conn2) - job = self._make_one(self.JOB_ID, [self.SOURCE1], self.TABLE_REF, client1) - - job.reload(client=client2) - - conn1.api_request.assert_not_called() - conn2.api_request.assert_called_once_with( - method="GET", path=PATH, query_params={}, timeout=None - ) - self._verifyResourceProperties(job, RESOURCE) - - def test_reload_w_job_reference(self): - from google.cloud.bigquery import job - - resource = self._make_resource(ended=True) - resource["jobReference"]["projectId"] = "alternative-project" - resource["jobReference"]["location"] = "US" - job_ref = job._JobReference(self.JOB_ID, "alternative-project", "US") - conn = _make_connection(resource) - client = _make_client(project=self.PROJECT, connection=conn) - load_job = self._make_one(job_ref, [self.SOURCE1], self.TABLE_REF, client) - - load_job.reload() - - conn.api_request.assert_called_once_with( - method="GET", - path="/projects/alternative-project/jobs/{}".format(self.JOB_ID), - query_params={"location": "US"}, - timeout=None, - ) - - def test_cancel_w_bound_client(self): - PATH = "/projects/%s/jobs/%s/cancel" % (self.PROJECT, self.JOB_ID) - RESOURCE = self._make_resource(ended=True) - RESPONSE = {"job": RESOURCE} - conn = _make_connection(RESPONSE) - client = _make_client(project=self.PROJECT, connection=conn) - job = self._make_one(self.JOB_ID, [self.SOURCE1], self.TABLE_REF, client) - - job.cancel() - - conn.api_request.assert_called_once_with( - method="POST", path=PATH, query_params={}, timeout=None, - ) - self._verifyResourceProperties(job, RESOURCE) - - def test_cancel_w_alternate_client(self): - PATH = "/projects/%s/jobs/%s/cancel" % (self.PROJECT, self.JOB_ID) - RESOURCE = self._make_resource(ended=True) - RESPONSE = {"job": RESOURCE} - conn1 = _make_connection() - client1 = _make_client(project=self.PROJECT, connection=conn1) - conn2 = _make_connection(RESPONSE) - client2 = _make_client(project=self.PROJECT, connection=conn2) - job = self._make_one(self.JOB_ID, [self.SOURCE1], self.TABLE_REF, client1) - - job.cancel(client=client2) - - conn1.api_request.assert_not_called() - conn2.api_request.assert_called_once_with( - method="POST", path=PATH, query_params={}, timeout=None, - ) - self._verifyResourceProperties(job, RESOURCE) - - def test_cancel_w_job_reference(self): - from google.cloud.bigquery import job - - resource = self._make_resource(ended=True) - resource["jobReference"]["projectId"] = "alternative-project" - resource["jobReference"]["location"] = "US" - job_ref = job._JobReference(self.JOB_ID, "alternative-project", "US") - conn = _make_connection({"job": resource}) - client = _make_client(project=self.PROJECT, connection=conn) - load_job = self._make_one(job_ref, [self.SOURCE1], self.TABLE_REF, client) - - load_job.cancel() - - conn.api_request.assert_called_once_with( - method="POST", - path="/projects/alternative-project/jobs/{}/cancel".format(self.JOB_ID), - query_params={"location": "US"}, - timeout=None, - ) - - -class TestCopyJobConfig(unittest.TestCase, _Base): - JOB_TYPE = "copy" - - @staticmethod - def _get_target_class(): - from google.cloud.bigquery.job import CopyJobConfig - - return CopyJobConfig - - def test_ctor_w_properties(self): - from google.cloud.bigquery.job import CreateDisposition - from google.cloud.bigquery.job import WriteDisposition - - create_disposition = CreateDisposition.CREATE_NEVER - write_disposition = WriteDisposition.WRITE_TRUNCATE - config = self._get_target_class()( - create_disposition=create_disposition, write_disposition=write_disposition - ) - - self.assertEqual(config.create_disposition, create_disposition) - self.assertEqual(config.write_disposition, write_disposition) - - def test_to_api_repr_with_encryption(self): - from google.cloud.bigquery.encryption_configuration import ( - EncryptionConfiguration, - ) - - config = self._make_one() - config.destination_encryption_configuration = EncryptionConfiguration( - kms_key_name=self.KMS_KEY_NAME - ) - resource = config.to_api_repr() - self.assertEqual( - resource, - { - "copy": { - "destinationEncryptionConfiguration": { - "kmsKeyName": self.KMS_KEY_NAME - } - } - }, - ) - - def test_to_api_repr_with_encryption_none(self): - config = self._make_one() - config.destination_encryption_configuration = None - resource = config.to_api_repr() - self.assertEqual( - resource, {"copy": {"destinationEncryptionConfiguration": None}} - ) - - -class TestCopyJob(unittest.TestCase, _Base): - JOB_TYPE = "copy" - SOURCE_TABLE = "source_table" - DESTINATION_TABLE = "destination_table" - - @staticmethod - def _get_target_class(): - from google.cloud.bigquery.job import CopyJob - - return CopyJob - - def _make_resource(self, started=False, ended=False): - resource = super(TestCopyJob, self)._make_resource(started, ended) - config = resource["configuration"]["copy"] - config["sourceTables"] = [ - { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.SOURCE_TABLE, - } - ] - config["destinationTable"] = { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.DESTINATION_TABLE, - } - - return resource - - def _verifyResourceProperties(self, job, resource): - self._verifyReadonlyResourceProperties(job, resource) - - config = resource.get("configuration", {}).get("copy") - - table_ref = config["destinationTable"] - self.assertEqual(job.destination.project, table_ref["projectId"]) - self.assertEqual(job.destination.dataset_id, table_ref["datasetId"]) - self.assertEqual(job.destination.table_id, table_ref["tableId"]) - - sources = config.get("sourceTables") - if sources is None: - sources = [config["sourceTable"]] - self.assertEqual(len(sources), len(job.sources)) - for table_ref, table in zip(sources, job.sources): - self.assertEqual(table.project, table_ref["projectId"]) - self.assertEqual(table.dataset_id, table_ref["datasetId"]) - self.assertEqual(table.table_id, table_ref["tableId"]) - - if "createDisposition" in config: - self.assertEqual(job.create_disposition, config["createDisposition"]) - else: - self.assertIsNone(job.create_disposition) - - if "writeDisposition" in config: - self.assertEqual(job.write_disposition, config["writeDisposition"]) - else: - self.assertIsNone(job.write_disposition) - - if "destinationEncryptionConfiguration" in config: - self.assertIsNotNone(job.destination_encryption_configuration) - self.assertEqual( - job.destination_encryption_configuration.kms_key_name, - config["destinationEncryptionConfiguration"]["kmsKeyName"], - ) - else: - self.assertIsNone(job.destination_encryption_configuration) - - def test_ctor(self): - client = _make_client(project=self.PROJECT) - source = self._table_ref(self.SOURCE_TABLE) - destination = self._table_ref(self.DESTINATION_TABLE) - job = self._make_one(self.JOB_ID, [source], destination, client) - self.assertIs(job.destination, destination) - self.assertEqual(job.sources, [source]) - self.assertIs(job._client, client) - self.assertEqual(job.job_type, self.JOB_TYPE) - self.assertEqual(job.path, "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID)) - - self._verifyInitialReadonlyProperties(job) - - # set/read from resource['configuration']['copy'] - self.assertIsNone(job.create_disposition) - self.assertIsNone(job.write_disposition) - self.assertIsNone(job.destination_encryption_configuration) - - def test_from_api_repr_missing_identity(self): - self._setUpConstants() - client = _make_client(project=self.PROJECT) - RESOURCE = {} - klass = self._get_target_class() - with self.assertRaises(KeyError): - klass.from_api_repr(RESOURCE, client=client) - - def test_from_api_repr_missing_config(self): - self._setUpConstants() - client = _make_client(project=self.PROJECT) - RESOURCE = { - "id": "%s:%s" % (self.PROJECT, self.DS_ID), - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - } - klass = self._get_target_class() - with self.assertRaises(KeyError): - klass.from_api_repr(RESOURCE, client=client) - - def test_from_api_repr_bare(self): - self._setUpConstants() - client = _make_client(project=self.PROJECT) - RESOURCE = { - "id": self.JOB_ID, - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "configuration": { - "copy": { - "sourceTables": [ - { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.SOURCE_TABLE, - } - ], - "destinationTable": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.DESTINATION_TABLE, - }, - } - }, - } - klass = self._get_target_class() - job = klass.from_api_repr(RESOURCE, client=client) - self.assertIs(job._client, client) - self._verifyResourceProperties(job, RESOURCE) - - def test_from_api_with_encryption(self): - self._setUpConstants() - client = _make_client(project=self.PROJECT) - RESOURCE = { - "id": self.JOB_ID, - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "configuration": { - "copy": { - "sourceTables": [ - { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.SOURCE_TABLE, - } - ], - "destinationTable": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.DESTINATION_TABLE, - }, - "destinationEncryptionConfiguration": { - "kmsKeyName": self.KMS_KEY_NAME - }, - } - }, - } - klass = self._get_target_class() - job = klass.from_api_repr(RESOURCE, client=client) - self.assertIs(job._client, client) - self._verifyResourceProperties(job, RESOURCE) - - def test_from_api_repr_w_sourcetable(self): - self._setUpConstants() - client = _make_client(project=self.PROJECT) - RESOURCE = { - "id": self.JOB_ID, - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "configuration": { - "copy": { - "sourceTable": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.SOURCE_TABLE, - }, - "destinationTable": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.DESTINATION_TABLE, - }, - } - }, - } - klass = self._get_target_class() - job = klass.from_api_repr(RESOURCE, client=client) - self.assertIs(job._client, client) - self._verifyResourceProperties(job, RESOURCE) - - def test_from_api_repr_wo_sources(self): - self._setUpConstants() - client = _make_client(project=self.PROJECT) - RESOURCE = { - "id": self.JOB_ID, - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "configuration": { - "copy": { - "destinationTable": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.DESTINATION_TABLE, - } - } - }, - } - klass = self._get_target_class() - with self.assertRaises(KeyError): - klass.from_api_repr(RESOURCE, client=client) - - def test_from_api_repr_w_properties(self): - from google.cloud.bigquery.job import CreateDisposition - - client = _make_client(project=self.PROJECT) - RESOURCE = self._make_resource() - copy_config = RESOURCE["configuration"]["copy"] - copy_config["createDisposition"] = CreateDisposition.CREATE_IF_NEEDED - klass = self._get_target_class() - job = klass.from_api_repr(RESOURCE, client=client) - self.assertIs(job._client, client) - self._verifyResourceProperties(job, RESOURCE) - - def test_begin_w_bound_client(self): - PATH = "/projects/%s/jobs" % (self.PROJECT,) - RESOURCE = self._make_resource() - # Ensure None for missing server-set props - del RESOURCE["statistics"]["creationTime"] - del RESOURCE["etag"] - del RESOURCE["selfLink"] - del RESOURCE["user_email"] - conn = _make_connection(RESOURCE) - client = _make_client(project=self.PROJECT, connection=conn) - source = self._table_ref(self.SOURCE_TABLE) - destination = self._table_ref(self.DESTINATION_TABLE) - job = self._make_one(self.JOB_ID, [source], destination, client) - - job._begin() - - conn.api_request.assert_called_once_with( - method="POST", - path=PATH, - data={ - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "configuration": { - "copy": { - "sourceTables": [ - { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.SOURCE_TABLE, - } - ], - "destinationTable": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.DESTINATION_TABLE, - }, - } - }, - }, - timeout=None, - ) - self._verifyResourceProperties(job, RESOURCE) - - def test_begin_w_alternate_client(self): - from google.cloud.bigquery.job import CopyJobConfig - - from google.cloud.bigquery.job import CreateDisposition - from google.cloud.bigquery.job import WriteDisposition - - PATH = "/projects/%s/jobs" % (self.PROJECT,) - RESOURCE = self._make_resource(ended=True) - COPY_CONFIGURATION = { - "sourceTables": [ - { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.SOURCE_TABLE, - } - ], - "destinationTable": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.DESTINATION_TABLE, - }, - "createDisposition": CreateDisposition.CREATE_NEVER, - "writeDisposition": WriteDisposition.WRITE_TRUNCATE, - } - RESOURCE["configuration"]["copy"] = COPY_CONFIGURATION - conn1 = _make_connection() - client1 = _make_client(project=self.PROJECT, connection=conn1) - conn2 = _make_connection(RESOURCE) - client2 = _make_client(project=self.PROJECT, connection=conn2) - source = self._table_ref(self.SOURCE_TABLE) - destination = self._table_ref(self.DESTINATION_TABLE) - config = CopyJobConfig() - config.create_disposition = CreateDisposition.CREATE_NEVER - config.write_disposition = WriteDisposition.WRITE_TRUNCATE - job = self._make_one(self.JOB_ID, [source], destination, client1, config) - job._begin(client=client2) - - conn1.api_request.assert_not_called() - conn2.api_request.assert_called_once_with( - method="POST", - path=PATH, - data={ - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "configuration": {"copy": COPY_CONFIGURATION}, - }, - timeout=None, - ) - self._verifyResourceProperties(job, RESOURCE) - - def test_exists_miss_w_bound_client(self): - PATH = "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID) - conn = _make_connection() - client = _make_client(project=self.PROJECT, connection=conn) - - source = self._table_ref(self.SOURCE_TABLE) - destination = self._table_ref(self.DESTINATION_TABLE) - job = self._make_one(self.JOB_ID, [source], destination, client) - - self.assertFalse(job.exists()) - - conn.api_request.assert_called_once_with( - method="GET", path=PATH, query_params={"fields": "id"}, timeout=None, - ) - - def test_exists_hit_w_alternate_client(self): - PATH = "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID) - conn1 = _make_connection() - client1 = _make_client(project=self.PROJECT, connection=conn1) - conn2 = _make_connection({}) - client2 = _make_client(project=self.PROJECT, connection=conn2) - source = self._table_ref(self.SOURCE_TABLE) - destination = self._table_ref(self.DESTINATION_TABLE) - job = self._make_one(self.JOB_ID, [source], destination, client1) - - self.assertTrue(job.exists(client=client2)) - - conn1.api_request.assert_not_called() - conn2.api_request.assert_called_once_with( - method="GET", path=PATH, query_params={"fields": "id"}, timeout=None - ) - - def test_reload_w_bound_client(self): - PATH = "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID) - RESOURCE = self._make_resource() - conn = _make_connection(RESOURCE) - client = _make_client(project=self.PROJECT, connection=conn) - source = self._table_ref(self.SOURCE_TABLE) - destination = self._table_ref(self.DESTINATION_TABLE) - job = self._make_one(self.JOB_ID, [source], destination, client) - - job.reload() - - conn.api_request.assert_called_once_with( - method="GET", path=PATH, query_params={}, timeout=None - ) - self._verifyResourceProperties(job, RESOURCE) - - def test_reload_w_alternate_client(self): - PATH = "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID) - RESOURCE = self._make_resource() - conn1 = _make_connection() - client1 = _make_client(project=self.PROJECT, connection=conn1) - conn2 = _make_connection(RESOURCE) - client2 = _make_client(project=self.PROJECT, connection=conn2) - source = self._table_ref(self.SOURCE_TABLE) - destination = self._table_ref(self.DESTINATION_TABLE) - job = self._make_one(self.JOB_ID, [source], destination, client1) - - job.reload(client=client2) - - conn1.api_request.assert_not_called() - conn2.api_request.assert_called_once_with( - method="GET", path=PATH, query_params={}, timeout=None - ) - self._verifyResourceProperties(job, RESOURCE) - - -class TestExtractJobConfig(unittest.TestCase, _Base): - JOB_TYPE = "extract" - - @staticmethod - def _get_target_class(): - from google.cloud.bigquery.job import ExtractJobConfig - - return ExtractJobConfig - - def test_ctor_w_properties(self): - config = self._get_target_class()(field_delimiter="\t", print_header=True) - - self.assertEqual(config.field_delimiter, "\t") - self.assertTrue(config.print_header) - - def test_to_api_repr(self): - from google.cloud.bigquery import job - - config = self._make_one() - config.compression = job.Compression.SNAPPY - config.destination_format = job.DestinationFormat.AVRO - config.field_delimiter = "ignored for avro" - config.print_header = False - config._properties["extract"]["someNewField"] = "some-value" - config.use_avro_logical_types = True - resource = config.to_api_repr() - self.assertEqual( - resource, - { - "extract": { - "compression": "SNAPPY", - "destinationFormat": "AVRO", - "fieldDelimiter": "ignored for avro", - "printHeader": False, - "someNewField": "some-value", - "useAvroLogicalTypes": True, - } - }, - ) - - def test_from_api_repr(self): - cls = self._get_target_class() - config = cls.from_api_repr( - { - "extract": { - "compression": "NONE", - "destinationFormat": "CSV", - "fieldDelimiter": "\t", - "printHeader": True, - "someNewField": "some-value", - "useAvroLogicalTypes": False, - } - } - ) - self.assertEqual(config.compression, "NONE") - self.assertEqual(config.destination_format, "CSV") - self.assertEqual(config.field_delimiter, "\t") - self.assertEqual(config.print_header, True) - self.assertEqual(config._properties["extract"]["someNewField"], "some-value") - self.assertEqual(config.use_avro_logical_types, False) - - -class TestExtractJob(unittest.TestCase, _Base): - JOB_TYPE = "extract" - SOURCE_TABLE = "source_table" - DESTINATION_URI = "gs://bucket_name/object_name" - - @staticmethod - def _get_target_class(): - from google.cloud.bigquery.job import ExtractJob - - return ExtractJob - - def _make_resource(self, started=False, ended=False): - resource = super(TestExtractJob, self)._make_resource(started, ended) - config = resource["configuration"]["extract"] - config["sourceTable"] = { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.SOURCE_TABLE, - } - config["destinationUris"] = [self.DESTINATION_URI] - return resource - - def _verifyResourceProperties(self, job, resource): - self._verifyReadonlyResourceProperties(job, resource) - - config = resource.get("configuration", {}).get("extract") - - self.assertEqual(job.destination_uris, config["destinationUris"]) - - table_ref = config["sourceTable"] - self.assertEqual(job.source.project, table_ref["projectId"]) - self.assertEqual(job.source.dataset_id, table_ref["datasetId"]) - self.assertEqual(job.source.table_id, table_ref["tableId"]) - - if "compression" in config: - self.assertEqual(job.compression, config["compression"]) - else: - self.assertIsNone(job.compression) - - if "destinationFormat" in config: - self.assertEqual(job.destination_format, config["destinationFormat"]) - else: - self.assertIsNone(job.destination_format) - - if "fieldDelimiter" in config: - self.assertEqual(job.field_delimiter, config["fieldDelimiter"]) - else: - self.assertIsNone(job.field_delimiter) - - if "printHeader" in config: - self.assertEqual(job.print_header, config["printHeader"]) - else: - self.assertIsNone(job.print_header) - - def test_ctor(self): - from google.cloud.bigquery.table import Table - - client = _make_client(project=self.PROJECT) - source = Table(self.TABLE_REF) - job = self._make_one(self.JOB_ID, source, [self.DESTINATION_URI], client) - self.assertEqual(job.source.project, self.PROJECT) - self.assertEqual(job.source.dataset_id, self.DS_ID) - self.assertEqual(job.source.table_id, self.TABLE_ID) - self.assertEqual(job.destination_uris, [self.DESTINATION_URI]) - self.assertIs(job._client, client) - self.assertEqual(job.job_type, self.JOB_TYPE) - self.assertEqual(job.path, "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID)) - - self._verifyInitialReadonlyProperties(job) - - # set/read from resource['configuration']['extract'] - self.assertIsNone(job.compression) - self.assertIsNone(job.destination_format) - self.assertIsNone(job.field_delimiter) - self.assertIsNone(job.print_header) - - def test_destination_uri_file_counts(self): - file_counts = 23 - client = _make_client(project=self.PROJECT) - job = self._make_one( - self.JOB_ID, self.TABLE_REF, [self.DESTINATION_URI], client - ) - self.assertIsNone(job.destination_uri_file_counts) - - statistics = job._properties["statistics"] = {} - self.assertIsNone(job.destination_uri_file_counts) - - extract_stats = statistics["extract"] = {} - self.assertIsNone(job.destination_uri_file_counts) - - extract_stats["destinationUriFileCounts"] = [str(file_counts)] - self.assertEqual(job.destination_uri_file_counts, [file_counts]) - - def test_from_api_repr_missing_identity(self): - self._setUpConstants() - client = _make_client(project=self.PROJECT) - RESOURCE = {} - klass = self._get_target_class() - with self.assertRaises(KeyError): - klass.from_api_repr(RESOURCE, client=client) - - def test_from_api_repr_missing_config(self): - self._setUpConstants() - client = _make_client(project=self.PROJECT) - RESOURCE = { - "id": "%s:%s" % (self.PROJECT, self.DS_ID), - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - } - klass = self._get_target_class() - with self.assertRaises(KeyError): - klass.from_api_repr(RESOURCE, client=client) - - def test_from_api_repr_bare(self): - self._setUpConstants() - client = _make_client(project=self.PROJECT) - RESOURCE = { - "id": self.JOB_ID, - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "configuration": { - "extract": { - "sourceTable": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.SOURCE_TABLE, - }, - "destinationUris": [self.DESTINATION_URI], - } - }, - } - klass = self._get_target_class() - job = klass.from_api_repr(RESOURCE, client=client) - self.assertIs(job._client, client) - self._verifyResourceProperties(job, RESOURCE) - - def test_from_api_repr_w_properties(self): - from google.cloud.bigquery.job import Compression - - client = _make_client(project=self.PROJECT) - RESOURCE = self._make_resource() - extract_config = RESOURCE["configuration"]["extract"] - extract_config["compression"] = Compression.GZIP - klass = self._get_target_class() - job = klass.from_api_repr(RESOURCE, client=client) - self.assertIs(job._client, client) - self._verifyResourceProperties(job, RESOURCE) - - def test_begin_w_bound_client(self): - from google.cloud.bigquery.dataset import DatasetReference - - PATH = "/projects/%s/jobs" % (self.PROJECT,) - RESOURCE = self._make_resource() - # Ensure None for missing server-set props - del RESOURCE["statistics"]["creationTime"] - del RESOURCE["etag"] - del RESOURCE["selfLink"] - del RESOURCE["user_email"] - conn = _make_connection(RESOURCE) - client = _make_client(project=self.PROJECT, connection=conn) - source_dataset = DatasetReference(self.PROJECT, self.DS_ID) - source = source_dataset.table(self.SOURCE_TABLE) - job = self._make_one(self.JOB_ID, source, [self.DESTINATION_URI], client) - - job._begin() - - conn.api_request.assert_called_once_with( - method="POST", - path=PATH, - data={ - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "configuration": { - "extract": { - "sourceTable": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.SOURCE_TABLE, - }, - "destinationUris": [self.DESTINATION_URI], - } - }, - }, - timeout=None, - ) - self._verifyResourceProperties(job, RESOURCE) - - def test_begin_w_alternate_client(self): - from google.cloud.bigquery.dataset import DatasetReference - from google.cloud.bigquery.job import Compression - from google.cloud.bigquery.job import DestinationFormat - from google.cloud.bigquery.job import ExtractJobConfig - - PATH = "/projects/%s/jobs" % (self.PROJECT,) - RESOURCE = self._make_resource(ended=True) - EXTRACT_CONFIGURATION = { - "sourceTable": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.SOURCE_TABLE, - }, - "destinationUris": [self.DESTINATION_URI], - "compression": Compression.GZIP, - "destinationFormat": DestinationFormat.NEWLINE_DELIMITED_JSON, - "fieldDelimiter": "|", - "printHeader": False, - } - RESOURCE["configuration"]["extract"] = EXTRACT_CONFIGURATION - conn1 = _make_connection() - client1 = _make_client(project=self.PROJECT, connection=conn1) - conn2 = _make_connection(RESOURCE) - client2 = _make_client(project=self.PROJECT, connection=conn2) - source_dataset = DatasetReference(self.PROJECT, self.DS_ID) - source = source_dataset.table(self.SOURCE_TABLE) - config = ExtractJobConfig() - config.compression = Compression.GZIP - config.destination_format = DestinationFormat.NEWLINE_DELIMITED_JSON - config.field_delimiter = "|" - config.print_header = False - job = self._make_one( - self.JOB_ID, source, [self.DESTINATION_URI], client1, config - ) - - job._begin(client=client2) - - conn1.api_request.assert_not_called() - conn2.api_request.assert_called_once_with( - method="POST", - path=PATH, - data={ - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "configuration": {"extract": EXTRACT_CONFIGURATION}, - }, - timeout=None, - ) - self._verifyResourceProperties(job, RESOURCE) - - def test_exists_miss_w_bound_client(self): - PATH = "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID) - conn = _make_connection() - client = _make_client(project=self.PROJECT, connection=conn) - job = self._make_one( - self.JOB_ID, self.TABLE_REF, [self.DESTINATION_URI], client - ) - - self.assertFalse(job.exists()) - - conn.api_request.assert_called_once_with( - method="GET", path=PATH, query_params={"fields": "id"}, timeout=None, - ) - - def test_exists_hit_w_alternate_client(self): - PATH = "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID) - conn1 = _make_connection() - client1 = _make_client(project=self.PROJECT, connection=conn1) - conn2 = _make_connection({}) - client2 = _make_client(project=self.PROJECT, connection=conn2) - job = self._make_one( - self.JOB_ID, self.TABLE_REF, [self.DESTINATION_URI], client1 - ) - - self.assertTrue(job.exists(client=client2)) - - conn1.api_request.assert_not_called() - conn2.api_request.assert_called_once_with( - method="GET", path=PATH, query_params={"fields": "id"}, timeout=None - ) - - def test_reload_w_bound_client(self): - from google.cloud.bigquery.dataset import DatasetReference - - PATH = "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID) - RESOURCE = self._make_resource() - conn = _make_connection(RESOURCE) - client = _make_client(project=self.PROJECT, connection=conn) - source_dataset = DatasetReference(self.PROJECT, self.DS_ID) - source = source_dataset.table(self.SOURCE_TABLE) - job = self._make_one(self.JOB_ID, source, [self.DESTINATION_URI], client) - - job.reload() - - conn.api_request.assert_called_once_with( - method="GET", path=PATH, query_params={}, timeout=None - ) - self._verifyResourceProperties(job, RESOURCE) - - def test_reload_w_alternate_client(self): - from google.cloud.bigquery.dataset import DatasetReference - - PATH = "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID) - RESOURCE = self._make_resource() - conn1 = _make_connection() - client1 = _make_client(project=self.PROJECT, connection=conn1) - conn2 = _make_connection(RESOURCE) - client2 = _make_client(project=self.PROJECT, connection=conn2) - source_dataset = DatasetReference(self.PROJECT, self.DS_ID) - source = source_dataset.table(self.SOURCE_TABLE) - job = self._make_one(self.JOB_ID, source, [self.DESTINATION_URI], client1) - - job.reload(client=client2) - - conn1.api_request.assert_not_called() - conn2.api_request.assert_called_once_with( - method="GET", path=PATH, query_params={}, timeout=None - ) - self._verifyResourceProperties(job, RESOURCE) - - -class TestQueryJobConfig(unittest.TestCase, _Base): - @staticmethod - def _get_target_class(): - from google.cloud.bigquery.job import QueryJobConfig - - return QueryJobConfig - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor(self): - config = self._make_one() - self.assertEqual(config._properties, {"query": {}}) - - def test_ctor_w_none(self): - config = self._make_one() - config.default_dataset = None - config.destination = None - self.assertIsNone(config.default_dataset) - self.assertIsNone(config.destination) - - def test_ctor_w_properties(self): - config = self._get_target_class()(use_query_cache=False, use_legacy_sql=True) - - self.assertFalse(config.use_query_cache) - self.assertTrue(config.use_legacy_sql) - - def test_ctor_w_string_default_dataset(self): - from google.cloud.bigquery import dataset - - default_dataset = "default-proj.default_dset" - config = self._get_target_class()(default_dataset=default_dataset) - expected = dataset.DatasetReference.from_string(default_dataset) - self.assertEqual(config.default_dataset, expected) - - def test_ctor_w_string_destinaton(self): - from google.cloud.bigquery import table - - destination = "dest-proj.dest_dset.dest_tbl" - config = self._get_target_class()(destination=destination) - expected = table.TableReference.from_string(destination) - self.assertEqual(config.destination, expected) - - def test_default_dataset_w_string(self): - from google.cloud.bigquery import dataset - - default_dataset = "default-proj.default_dset" - config = self._make_one() - config.default_dataset = default_dataset - expected = dataset.DatasetReference.from_string(default_dataset) - self.assertEqual(config.default_dataset, expected) - - def test_default_dataset_w_dataset(self): - from google.cloud.bigquery import dataset - - default_dataset = "default-proj.default_dset" - expected = dataset.DatasetReference.from_string(default_dataset) - config = self._make_one() - config.default_dataset = dataset.Dataset(expected) - self.assertEqual(config.default_dataset, expected) - - def test_destinaton_w_string(self): - from google.cloud.bigquery import table - - destination = "dest-proj.dest_dset.dest_tbl" - config = self._make_one() - config.destination = destination - expected = table.TableReference.from_string(destination) - self.assertEqual(config.destination, expected) - - def test_range_partitioning_w_none(self): - object_under_test = self._get_target_class()() - assert object_under_test.range_partitioning is None - - def test_range_partitioning_w_value(self): - object_under_test = self._get_target_class()() - object_under_test._properties["query"]["rangePartitioning"] = { - "field": "column_one", - "range": {"start": 1, "end": 1000, "interval": 10}, - } - object_under_test.range_partitioning.field == "column_one" - object_under_test.range_partitioning.range_.start == 1 - object_under_test.range_partitioning.range_.end == 1000 - object_under_test.range_partitioning.range_.interval == 10 - - def test_range_partitioning_setter(self): - from google.cloud.bigquery.table import PartitionRange - from google.cloud.bigquery.table import RangePartitioning - - object_under_test = self._get_target_class()() - object_under_test.range_partitioning = RangePartitioning( - field="column_one", range_=PartitionRange(start=1, end=1000, interval=10) - ) - object_under_test.range_partitioning.field == "column_one" - object_under_test.range_partitioning.range_.start == 1 - object_under_test.range_partitioning.range_.end == 1000 - object_under_test.range_partitioning.range_.interval == 10 - - def test_range_partitioning_setter_w_none(self): - object_under_test = self._get_target_class()() - object_under_test.range_partitioning = None - assert object_under_test.range_partitioning is None - - def test_range_partitioning_setter_w_wrong_type(self): - object_under_test = self._get_target_class()() - with pytest.raises(ValueError, match="RangePartitioning"): - object_under_test.range_partitioning = object() - - def test_time_partitioning(self): - from google.cloud.bigquery import table - - time_partitioning = table.TimePartitioning( - type_=table.TimePartitioningType.DAY, field="name" - ) - config = self._make_one() - config.time_partitioning = time_partitioning - # TimePartitioning should be configurable after assigning - time_partitioning.expiration_ms = 10000 - - self.assertEqual(config.time_partitioning.type_, table.TimePartitioningType.DAY) - self.assertEqual(config.time_partitioning.field, "name") - self.assertEqual(config.time_partitioning.expiration_ms, 10000) - - config.time_partitioning = None - self.assertIsNone(config.time_partitioning) - - def test_clustering_fields(self): - fields = ["email", "postal_code"] - config = self._get_target_class()() - config.clustering_fields = fields - self.assertEqual(config.clustering_fields, fields) - - config.clustering_fields = None - self.assertIsNone(config.clustering_fields) - - def test_from_api_repr_empty(self): - klass = self._get_target_class() - config = klass.from_api_repr({}) - self.assertIsNone(config.dry_run) - self.assertIsNone(config.use_legacy_sql) - self.assertIsNone(config.default_dataset) - self.assertIsNone(config.destination) - self.assertIsNone(config.destination_encryption_configuration) - - def test_from_api_repr_normal(self): - from google.cloud.bigquery.dataset import DatasetReference - - resource = { - "query": { - "useLegacySql": True, - "query": "no property for me", - "defaultDataset": { - "projectId": "someproject", - "datasetId": "somedataset", - }, - "someNewProperty": "I should be saved, too.", - }, - "dryRun": True, - } - klass = self._get_target_class() - - config = klass.from_api_repr(resource) - - self.assertTrue(config.use_legacy_sql) - self.assertEqual( - config.default_dataset, DatasetReference("someproject", "somedataset") - ) - self.assertTrue(config.dry_run) - # Make sure unknown properties propagate. - self.assertEqual(config._properties["query"]["query"], "no property for me") - self.assertEqual( - config._properties["query"]["someNewProperty"], "I should be saved, too." - ) - - def test_to_api_repr_normal(self): - from google.cloud.bigquery.dataset import DatasetReference - - config = self._make_one() - config.use_legacy_sql = True - config.default_dataset = DatasetReference("someproject", "somedataset") - config.dry_run = False - config._properties["someNewProperty"] = "Woohoo, alpha stuff." - - resource = config.to_api_repr() - - self.assertFalse(resource["dryRun"]) - self.assertTrue(resource["query"]["useLegacySql"]) - self.assertEqual( - resource["query"]["defaultDataset"]["projectId"], "someproject" - ) - self.assertEqual( - resource["query"]["defaultDataset"]["datasetId"], "somedataset" - ) - # Make sure unknown properties propagate. - self.assertEqual(resource["someNewProperty"], "Woohoo, alpha stuff.") - - def test_to_api_repr_with_encryption(self): - from google.cloud.bigquery.encryption_configuration import ( - EncryptionConfiguration, - ) - - config = self._make_one() - config.destination_encryption_configuration = EncryptionConfiguration( - kms_key_name=self.KMS_KEY_NAME - ) - resource = config.to_api_repr() - self.assertEqual( - resource, - { - "query": { - "destinationEncryptionConfiguration": { - "kmsKeyName": self.KMS_KEY_NAME - } - } - }, - ) - - def test_to_api_repr_with_encryption_none(self): - config = self._make_one() - config.destination_encryption_configuration = None - resource = config.to_api_repr() - self.assertEqual( - resource, {"query": {"destinationEncryptionConfiguration": None}} - ) - - def test_from_api_repr_with_encryption(self): - resource = { - "query": { - "destinationEncryptionConfiguration": {"kmsKeyName": self.KMS_KEY_NAME} - } - } - klass = self._get_target_class() - config = klass.from_api_repr(resource) - self.assertEqual( - config.destination_encryption_configuration.kms_key_name, self.KMS_KEY_NAME - ) - - -class TestQueryJob(unittest.TestCase, _Base): - JOB_TYPE = "query" - QUERY = "select count(*) from persons" - DESTINATION_TABLE = "destination_table" - - @staticmethod - def _get_target_class(): - from google.cloud.bigquery.job import QueryJob - - return QueryJob - - def _make_resource(self, started=False, ended=False): - resource = super(TestQueryJob, self)._make_resource(started, ended) - config = resource["configuration"]["query"] - config["query"] = self.QUERY - - if ended: - resource["status"] = {"state": "DONE"} - - return resource - - def _verifyBooleanResourceProperties(self, job, config): - - if "allowLargeResults" in config: - self.assertEqual(job.allow_large_results, config["allowLargeResults"]) - else: - self.assertIsNone(job.allow_large_results) - if "flattenResults" in config: - self.assertEqual(job.flatten_results, config["flattenResults"]) - else: - self.assertIsNone(job.flatten_results) - if "useQueryCache" in config: - self.assertEqual(job.use_query_cache, config["useQueryCache"]) - else: - self.assertIsNone(job.use_query_cache) - if "useLegacySql" in config: - self.assertEqual(job.use_legacy_sql, config["useLegacySql"]) - else: - self.assertIsNone(job.use_legacy_sql) - - def _verifyIntegerResourceProperties(self, job, config): - if "maximumBillingTier" in config: - self.assertEqual(job.maximum_billing_tier, config["maximumBillingTier"]) - else: - self.assertIsNone(job.maximum_billing_tier) - if "maximumBytesBilled" in config: - self.assertEqual( - str(job.maximum_bytes_billed), config["maximumBytesBilled"] - ) - self.assertIsInstance(job.maximum_bytes_billed, int) - else: - self.assertIsNone(job.maximum_bytes_billed) - - def _verify_udf_resources(self, job, config): - udf_resources = config.get("userDefinedFunctionResources", ()) - self.assertEqual(len(job.udf_resources), len(udf_resources)) - for found, expected in zip(job.udf_resources, udf_resources): - if "resourceUri" in expected: - self.assertEqual(found.udf_type, "resourceUri") - self.assertEqual(found.value, expected["resourceUri"]) - else: - self.assertEqual(found.udf_type, "inlineCode") - self.assertEqual(found.value, expected["inlineCode"]) - - def _verifyQueryParameters(self, job, config): - query_parameters = config.get("queryParameters", ()) - self.assertEqual(len(job.query_parameters), len(query_parameters)) - for found, expected in zip(job.query_parameters, query_parameters): - self.assertEqual(found.to_api_repr(), expected) - - def _verify_table_definitions(self, job, config): - table_defs = config.get("tableDefinitions") - if job.table_definitions is None: - self.assertIsNone(table_defs) - else: - self.assertEqual(len(job.table_definitions), len(table_defs)) - for found_key, found_ec in job.table_definitions.items(): - expected_ec = table_defs.get(found_key) - self.assertIsNotNone(expected_ec) - self.assertEqual(found_ec.to_api_repr(), expected_ec) - - def _verify_configuration_properties(self, job, configuration): - if "dryRun" in configuration: - self.assertEqual(job.dry_run, configuration["dryRun"]) - else: - self.assertIsNone(job.dry_run) - - def _verifyResourceProperties(self, job, resource): - self._verifyReadonlyResourceProperties(job, resource) - - configuration = resource.get("configuration", {}) - self._verify_configuration_properties(job, configuration) - - query_config = resource.get("configuration", {}).get("query") - self._verifyBooleanResourceProperties(job, query_config) - self._verifyIntegerResourceProperties(job, query_config) - self._verify_udf_resources(job, query_config) - self._verifyQueryParameters(job, query_config) - self._verify_table_definitions(job, query_config) - - self.assertEqual(job.query, query_config["query"]) - if "createDisposition" in query_config: - self.assertEqual(job.create_disposition, query_config["createDisposition"]) - else: - self.assertIsNone(job.create_disposition) - if "defaultDataset" in query_config: - ds_ref = job.default_dataset - ds_ref = {"projectId": ds_ref.project, "datasetId": ds_ref.dataset_id} - self.assertEqual(ds_ref, query_config["defaultDataset"]) - else: - self.assertIsNone(job.default_dataset) - if "destinationTable" in query_config: - table = job.destination - tb_ref = { - "projectId": table.project, - "datasetId": table.dataset_id, - "tableId": table.table_id, - } - self.assertEqual(tb_ref, query_config["destinationTable"]) - else: - self.assertIsNone(job.destination) - if "priority" in query_config: - self.assertEqual(job.priority, query_config["priority"]) - else: - self.assertIsNone(job.priority) - if "writeDisposition" in query_config: - self.assertEqual(job.write_disposition, query_config["writeDisposition"]) - else: - self.assertIsNone(job.write_disposition) - if "destinationEncryptionConfiguration" in query_config: - self.assertIsNotNone(job.destination_encryption_configuration) - self.assertEqual( - job.destination_encryption_configuration.kms_key_name, - query_config["destinationEncryptionConfiguration"]["kmsKeyName"], - ) - else: - self.assertIsNone(job.destination_encryption_configuration) - if "schemaUpdateOptions" in query_config: - self.assertEqual( - job.schema_update_options, query_config["schemaUpdateOptions"] - ) - else: - self.assertIsNone(job.schema_update_options) - - def test_ctor_defaults(self): - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, self.QUERY, client) - self.assertEqual(job.query, self.QUERY) - self.assertIs(job._client, client) - self.assertEqual(job.job_type, self.JOB_TYPE) - self.assertEqual(job.path, "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID)) - - self._verifyInitialReadonlyProperties(job) - - self.assertFalse(job.use_legacy_sql) - - # set/read from resource['configuration']['query'] - self.assertIsNone(job.allow_large_results) - self.assertIsNone(job.create_disposition) - self.assertIsNone(job.default_dataset) - self.assertIsNone(job.destination) - self.assertIsNone(job.flatten_results) - self.assertIsNone(job.priority) - self.assertIsNone(job.use_query_cache) - self.assertIsNone(job.dry_run) - self.assertIsNone(job.write_disposition) - self.assertIsNone(job.maximum_billing_tier) - self.assertIsNone(job.maximum_bytes_billed) - self.assertIsNone(job.table_definitions) - self.assertIsNone(job.destination_encryption_configuration) - self.assertIsNone(job.range_partitioning) - self.assertIsNone(job.time_partitioning) - self.assertIsNone(job.clustering_fields) - self.assertIsNone(job.schema_update_options) - - def test_ctor_w_udf_resources(self): - from google.cloud.bigquery.job import QueryJobConfig - from google.cloud.bigquery.query import UDFResource - - RESOURCE_URI = "gs://some-bucket/js/lib.js" - udf_resources = [UDFResource("resourceUri", RESOURCE_URI)] - client = _make_client(project=self.PROJECT) - config = QueryJobConfig() - config.udf_resources = udf_resources - job = self._make_one(self.JOB_ID, self.QUERY, client, job_config=config) - self.assertEqual(job.udf_resources, udf_resources) - - def test_ctor_w_query_parameters(self): - from google.cloud.bigquery.job import QueryJobConfig - from google.cloud.bigquery.query import ScalarQueryParameter - - query_parameters = [ScalarQueryParameter("foo", "INT64", 123)] - client = _make_client(project=self.PROJECT) - config = QueryJobConfig(query_parameters=query_parameters) - job = self._make_one(self.JOB_ID, self.QUERY, client, job_config=config) - self.assertEqual(job.query_parameters, query_parameters) - - def test_from_api_repr_missing_identity(self): - self._setUpConstants() - client = _make_client(project=self.PROJECT) - RESOURCE = {} - klass = self._get_target_class() - with self.assertRaises(KeyError): - klass.from_api_repr(RESOURCE, client=client) - - def test_from_api_repr_missing_config(self): - self._setUpConstants() - client = _make_client(project=self.PROJECT) - RESOURCE = { - "id": "%s:%s" % (self.PROJECT, self.DS_ID), - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - } - klass = self._get_target_class() - with self.assertRaises(KeyError): - klass.from_api_repr(RESOURCE, client=client) - - def test_from_api_repr_bare(self): - self._setUpConstants() - client = _make_client(project=self.PROJECT) - RESOURCE = { - "id": self.JOB_ID, - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "configuration": {"query": {"query": self.QUERY}}, - } - klass = self._get_target_class() - job = klass.from_api_repr(RESOURCE, client=client) - self.assertIs(job._client, client) - self._verifyResourceProperties(job, RESOURCE) - - def test_from_api_repr_with_encryption(self): - self._setUpConstants() - client = _make_client(project=self.PROJECT) - RESOURCE = { - "id": self.JOB_ID, - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "configuration": { - "query": { - "query": self.QUERY, - "destinationEncryptionConfiguration": { - "kmsKeyName": self.KMS_KEY_NAME - }, - } - }, - } - klass = self._get_target_class() - job = klass.from_api_repr(RESOURCE, client=client) - self.assertIs(job._client, client) - self._verifyResourceProperties(job, RESOURCE) - - def test_from_api_repr_w_properties(self): - from google.cloud.bigquery.job import CreateDisposition - from google.cloud.bigquery.job import SchemaUpdateOption - from google.cloud.bigquery.job import WriteDisposition - - client = _make_client(project=self.PROJECT) - RESOURCE = self._make_resource() - query_config = RESOURCE["configuration"]["query"] - query_config["createDisposition"] = CreateDisposition.CREATE_IF_NEEDED - query_config["writeDisposition"] = WriteDisposition.WRITE_TRUNCATE - query_config["destinationTable"] = { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.DESTINATION_TABLE, - } - query_config["schemaUpdateOptions"] = [SchemaUpdateOption.ALLOW_FIELD_ADDITION] - klass = self._get_target_class() - job = klass.from_api_repr(RESOURCE, client=client) - self.assertIs(job._client, client) - self._verifyResourceProperties(job, RESOURCE) - - def test_cancelled(self): - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, self.QUERY, client) - job._properties["status"] = { - "state": "DONE", - "errorResult": {"reason": "stopped"}, - } - - self.assertTrue(job.cancelled()) - - def test_done(self): - client = _make_client(project=self.PROJECT) - resource = self._make_resource(ended=True) - job = self._get_target_class().from_api_repr(resource, client) - self.assertTrue(job.done()) - - def test_done_w_timeout(self): - client = _make_client(project=self.PROJECT) - resource = self._make_resource(ended=False) - job = self._get_target_class().from_api_repr(resource, client) - - with mock.patch.object( - client, "_get_query_results" - ) as fake_get_results, mock.patch.object(job, "reload") as fake_reload: - job.done(timeout=42) - - fake_get_results.assert_called_once() - call_args = fake_get_results.call_args - self.assertEqual(call_args.kwargs.get("timeout"), 42) - - call_args = fake_reload.call_args - self.assertEqual(call_args.kwargs.get("timeout"), 42) - - def test_done_w_timeout_and_shorter_internal_api_timeout(self): - from google.cloud.bigquery.job import _TIMEOUT_BUFFER_SECS - from google.cloud.bigquery.job import _SERVER_TIMEOUT_MARGIN_SECS - - client = _make_client(project=self.PROJECT) - resource = self._make_resource(ended=False) - job = self._get_target_class().from_api_repr(resource, client) - job._done_timeout = 8.8 - - with mock.patch.object( - client, "_get_query_results" - ) as fake_get_results, mock.patch.object(job, "reload") as fake_reload: - job.done(timeout=42) - - # The expected timeout used is the job's own done_timeout minus a - # fixed amount (bigquery.job._TIMEOUT_BUFFER_SECS) increased by the - # safety margin on top of server-side processing timeout - that's - # because that final number is smaller than the given timeout (42 seconds). - expected_timeout = 8.8 - _TIMEOUT_BUFFER_SECS + _SERVER_TIMEOUT_MARGIN_SECS - - fake_get_results.assert_called_once() - call_args = fake_get_results.call_args - self.assertAlmostEqual(call_args.kwargs.get("timeout"), expected_timeout) - - call_args = fake_reload.call_args - self.assertAlmostEqual(call_args.kwargs.get("timeout"), expected_timeout) - - def test_done_w_timeout_and_longer_internal_api_timeout(self): - client = _make_client(project=self.PROJECT) - resource = self._make_resource(ended=False) - job = self._get_target_class().from_api_repr(resource, client) - job._done_timeout = 8.8 - - with mock.patch.object( - client, "_get_query_results" - ) as fake_get_results, mock.patch.object(job, "reload") as fake_reload: - job.done(timeout=5.5) - - # The expected timeout used is simply the given timeout, as the latter - # is shorter than the job's internal done timeout. - expected_timeout = 5.5 - - fake_get_results.assert_called_once() - call_args = fake_get_results.call_args - self.assertAlmostEqual(call_args.kwargs.get("timeout"), expected_timeout) - - call_args = fake_reload.call_args - self.assertAlmostEqual(call_args.kwargs.get("timeout"), expected_timeout) - - def test_query_plan(self): - from google.cloud._helpers import _RFC3339_MICROS - from google.cloud.bigquery.job import QueryPlanEntry - from google.cloud.bigquery.job import QueryPlanEntryStep - - plan_entries = [ - { - "name": "NAME", - "id": "1234", - "inputStages": ["88", "101"], - "startMs": "1522540800000", - "endMs": "1522540804000", - "parallelInputs": "1000", - "completedParallelInputs": "5", - "waitMsAvg": "33", - "waitMsMax": "400", - "waitRatioAvg": 2.71828, - "waitRatioMax": 3.14159, - "readMsAvg": "45", - "readMsMax": "90", - "readRatioAvg": 1.41421, - "readRatioMax": 1.73205, - "computeMsAvg": "55", - "computeMsMax": "99", - "computeRatioAvg": 0.69315, - "computeRatioMax": 1.09861, - "writeMsAvg": "203", - "writeMsMax": "340", - "writeRatioAvg": 3.32193, - "writeRatioMax": 2.30258, - "recordsRead": "100", - "recordsWritten": "1", - "status": "STATUS", - "shuffleOutputBytes": "1024", - "shuffleOutputBytesSpilled": "1", - "steps": [{"kind": "KIND", "substeps": ["SUBSTEP1", "SUBSTEP2"]}], - } - ] - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, self.QUERY, client) - self.assertEqual(job.query_plan, []) - - statistics = job._properties["statistics"] = {} - self.assertEqual(job.query_plan, []) - - query_stats = statistics["query"] = {} - self.assertEqual(job.query_plan, []) - - query_stats["queryPlan"] = plan_entries - - self.assertEqual(len(job.query_plan), len(plan_entries)) - for found, expected in zip(job.query_plan, plan_entries): - self.assertIsInstance(found, QueryPlanEntry) - self.assertEqual(found.name, expected["name"]) - self.assertEqual(found.entry_id, expected["id"]) - self.assertEqual(len(found.input_stages), len(expected["inputStages"])) - for f_id in found.input_stages: - self.assertIn(f_id, [int(e) for e in expected["inputStages"]]) - self.assertEqual( - found.start.strftime(_RFC3339_MICROS), "2018-04-01T00:00:00.000000Z" - ) - self.assertEqual( - found.end.strftime(_RFC3339_MICROS), "2018-04-01T00:00:04.000000Z" - ) - self.assertEqual(found.parallel_inputs, int(expected["parallelInputs"])) - self.assertEqual( - found.completed_parallel_inputs, - int(expected["completedParallelInputs"]), - ) - self.assertEqual(found.wait_ms_avg, int(expected["waitMsAvg"])) - self.assertEqual(found.wait_ms_max, int(expected["waitMsMax"])) - self.assertEqual(found.wait_ratio_avg, expected["waitRatioAvg"]) - self.assertEqual(found.wait_ratio_max, expected["waitRatioMax"]) - self.assertEqual(found.read_ms_avg, int(expected["readMsAvg"])) - self.assertEqual(found.read_ms_max, int(expected["readMsMax"])) - self.assertEqual(found.read_ratio_avg, expected["readRatioAvg"]) - self.assertEqual(found.read_ratio_max, expected["readRatioMax"]) - self.assertEqual(found.compute_ms_avg, int(expected["computeMsAvg"])) - self.assertEqual(found.compute_ms_max, int(expected["computeMsMax"])) - self.assertEqual(found.compute_ratio_avg, expected["computeRatioAvg"]) - self.assertEqual(found.compute_ratio_max, expected["computeRatioMax"]) - self.assertEqual(found.write_ms_avg, int(expected["writeMsAvg"])) - self.assertEqual(found.write_ms_max, int(expected["writeMsMax"])) - self.assertEqual(found.write_ratio_avg, expected["writeRatioAvg"]) - self.assertEqual(found.write_ratio_max, expected["writeRatioMax"]) - self.assertEqual(found.records_read, int(expected["recordsRead"])) - self.assertEqual(found.records_written, int(expected["recordsWritten"])) - self.assertEqual(found.status, expected["status"]) - self.assertEqual( - found.shuffle_output_bytes, int(expected["shuffleOutputBytes"]) - ) - self.assertEqual( - found.shuffle_output_bytes_spilled, - int(expected["shuffleOutputBytesSpilled"]), - ) - - self.assertEqual(len(found.steps), len(expected["steps"])) - for f_step, e_step in zip(found.steps, expected["steps"]): - self.assertIsInstance(f_step, QueryPlanEntryStep) - self.assertEqual(f_step.kind, e_step["kind"]) - self.assertEqual(f_step.substeps, e_step["substeps"]) - - def test_total_bytes_processed(self): - total_bytes = 1234 - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, self.QUERY, client) - self.assertIsNone(job.total_bytes_processed) - - statistics = job._properties["statistics"] = {} - self.assertIsNone(job.total_bytes_processed) - - query_stats = statistics["query"] = {} - self.assertIsNone(job.total_bytes_processed) - - query_stats["totalBytesProcessed"] = str(total_bytes) - self.assertEqual(job.total_bytes_processed, total_bytes) - - def test_total_bytes_billed(self): - total_bytes = 1234 - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, self.QUERY, client) - self.assertIsNone(job.total_bytes_billed) - - statistics = job._properties["statistics"] = {} - self.assertIsNone(job.total_bytes_billed) - - query_stats = statistics["query"] = {} - self.assertIsNone(job.total_bytes_billed) - - query_stats["totalBytesBilled"] = str(total_bytes) - self.assertEqual(job.total_bytes_billed, total_bytes) - - def test_billing_tier(self): - billing_tier = 1 - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, self.QUERY, client) - self.assertIsNone(job.billing_tier) - - statistics = job._properties["statistics"] = {} - self.assertIsNone(job.billing_tier) - - query_stats = statistics["query"] = {} - self.assertIsNone(job.billing_tier) - - query_stats["billingTier"] = billing_tier - self.assertEqual(job.billing_tier, billing_tier) - - def test_cache_hit(self): - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, self.QUERY, client) - self.assertIsNone(job.cache_hit) - - statistics = job._properties["statistics"] = {} - self.assertIsNone(job.cache_hit) - - query_stats = statistics["query"] = {} - self.assertIsNone(job.cache_hit) - - query_stats["cacheHit"] = True - self.assertTrue(job.cache_hit) - - def test_ddl_operation_performed(self): - op = "SKIP" - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, self.QUERY, client) - self.assertIsNone(job.ddl_operation_performed) - - statistics = job._properties["statistics"] = {} - self.assertIsNone(job.ddl_operation_performed) - - query_stats = statistics["query"] = {} - self.assertIsNone(job.ddl_operation_performed) - - query_stats["ddlOperationPerformed"] = op - self.assertEqual(job.ddl_operation_performed, op) - - def test_ddl_target_routine(self): - from google.cloud.bigquery.routine import RoutineReference - - ref_routine = { - "projectId": self.PROJECT, - "datasetId": "ddl_ds", - "routineId": "targetroutine", - } - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, self.QUERY, client) - self.assertIsNone(job.ddl_target_routine) - - statistics = job._properties["statistics"] = {} - self.assertIsNone(job.ddl_target_routine) - - query_stats = statistics["query"] = {} - self.assertIsNone(job.ddl_target_routine) - - query_stats["ddlTargetRoutine"] = ref_routine - self.assertIsInstance(job.ddl_target_routine, RoutineReference) - self.assertEqual(job.ddl_target_routine.routine_id, "targetroutine") - self.assertEqual(job.ddl_target_routine.dataset_id, "ddl_ds") - self.assertEqual(job.ddl_target_routine.project, self.PROJECT) - - def test_ddl_target_table(self): - from google.cloud.bigquery.table import TableReference - - ref_table = { - "projectId": self.PROJECT, - "datasetId": "ddl_ds", - "tableId": "targettable", - } - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, self.QUERY, client) - self.assertIsNone(job.ddl_target_table) - - statistics = job._properties["statistics"] = {} - self.assertIsNone(job.ddl_target_table) - - query_stats = statistics["query"] = {} - self.assertIsNone(job.ddl_target_table) - - query_stats["ddlTargetTable"] = ref_table - self.assertIsInstance(job.ddl_target_table, TableReference) - self.assertEqual(job.ddl_target_table.table_id, "targettable") - self.assertEqual(job.ddl_target_table.dataset_id, "ddl_ds") - self.assertEqual(job.ddl_target_table.project, self.PROJECT) - - def test_num_dml_affected_rows(self): - num_rows = 1234 - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, self.QUERY, client) - self.assertIsNone(job.num_dml_affected_rows) - - statistics = job._properties["statistics"] = {} - self.assertIsNone(job.num_dml_affected_rows) - - query_stats = statistics["query"] = {} - self.assertIsNone(job.num_dml_affected_rows) - - query_stats["numDmlAffectedRows"] = str(num_rows) - self.assertEqual(job.num_dml_affected_rows, num_rows) - - def test_slot_millis(self): - millis = 1234 - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, self.QUERY, client) - self.assertIsNone(job.slot_millis) - - statistics = job._properties["statistics"] = {} - self.assertIsNone(job.slot_millis) - - query_stats = statistics["query"] = {} - self.assertIsNone(job.slot_millis) - - query_stats["totalSlotMs"] = millis - self.assertEqual(job.slot_millis, millis) - - def test_statement_type(self): - statement_type = "SELECT" - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, self.QUERY, client) - self.assertIsNone(job.statement_type) - - statistics = job._properties["statistics"] = {} - self.assertIsNone(job.statement_type) - - query_stats = statistics["query"] = {} - self.assertIsNone(job.statement_type) - - query_stats["statementType"] = statement_type - self.assertEqual(job.statement_type, statement_type) - - def test_referenced_tables(self): - from google.cloud.bigquery.table import TableReference - - ref_tables_resource = [ - {"projectId": self.PROJECT, "datasetId": "dataset", "tableId": "local1"}, - {"projectId": self.PROJECT, "datasetId": "dataset", "tableId": "local2"}, - { - "projectId": "other-project-123", - "datasetId": "other-dataset", - "tableId": "other-table", - }, - ] - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, self.QUERY, client) - self.assertEqual(job.referenced_tables, []) - - statistics = job._properties["statistics"] = {} - self.assertEqual(job.referenced_tables, []) - - query_stats = statistics["query"] = {} - self.assertEqual(job.referenced_tables, []) - - query_stats["referencedTables"] = ref_tables_resource - - local1, local2, remote = job.referenced_tables - - self.assertIsInstance(local1, TableReference) - self.assertEqual(local1.table_id, "local1") - self.assertEqual(local1.dataset_id, "dataset") - self.assertEqual(local1.project, self.PROJECT) - - self.assertIsInstance(local2, TableReference) - self.assertEqual(local2.table_id, "local2") - self.assertEqual(local2.dataset_id, "dataset") - self.assertEqual(local2.project, self.PROJECT) - - self.assertIsInstance(remote, TableReference) - self.assertEqual(remote.table_id, "other-table") - self.assertEqual(remote.dataset_id, "other-dataset") - self.assertEqual(remote.project, "other-project-123") - - def test_timeline(self): - timeline_resource = [ - { - "elapsedMs": 1, - "activeUnits": 22, - "pendingUnits": 33, - "completedUnits": 44, - "totalSlotMs": 101, - } - ] - - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, self.QUERY, client) - self.assertEqual(job.timeline, []) - - statistics = job._properties["statistics"] = {} - self.assertEqual(job.timeline, []) - - query_stats = statistics["query"] = {} - self.assertEqual(job.timeline, []) - - query_stats["timeline"] = timeline_resource - - self.assertEqual(len(job.timeline), len(timeline_resource)) - self.assertEqual(job.timeline[0].elapsed_ms, 1) - self.assertEqual(job.timeline[0].active_units, 22) - self.assertEqual(job.timeline[0].pending_units, 33) - self.assertEqual(job.timeline[0].completed_units, 44) - self.assertEqual(job.timeline[0].slot_millis, 101) - - def test_undeclared_query_parameters(self): - from google.cloud.bigquery.query import ArrayQueryParameter - from google.cloud.bigquery.query import ScalarQueryParameter - from google.cloud.bigquery.query import StructQueryParameter - - undeclared = [ - { - "name": "my_scalar", - "parameterType": {"type": "STRING"}, - "parameterValue": {"value": "value"}, - }, - { - "name": "my_array", - "parameterType": {"type": "ARRAY", "arrayType": {"type": "INT64"}}, - "parameterValue": { - "arrayValues": [{"value": "1066"}, {"value": "1745"}] - }, - }, - { - "name": "my_struct", - "parameterType": { - "type": "STRUCT", - "structTypes": [{"name": "count", "type": {"type": "INT64"}}], - }, - "parameterValue": {"structValues": {"count": {"value": "123"}}}, - }, - ] - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, self.QUERY, client) - self.assertEqual(job.undeclared_query_parameters, []) - - statistics = job._properties["statistics"] = {} - self.assertEqual(job.undeclared_query_parameters, []) - - query_stats = statistics["query"] = {} - self.assertEqual(job.undeclared_query_parameters, []) - - query_stats["undeclaredQueryParameters"] = undeclared - - scalar, array, struct = job.undeclared_query_parameters - - self.assertIsInstance(scalar, ScalarQueryParameter) - self.assertEqual(scalar.name, "my_scalar") - self.assertEqual(scalar.type_, "STRING") - self.assertEqual(scalar.value, "value") - - self.assertIsInstance(array, ArrayQueryParameter) - self.assertEqual(array.name, "my_array") - self.assertEqual(array.array_type, "INT64") - self.assertEqual(array.values, [1066, 1745]) - - self.assertIsInstance(struct, StructQueryParameter) - self.assertEqual(struct.name, "my_struct") - self.assertEqual(struct.struct_types, {"count": "INT64"}) - self.assertEqual(struct.struct_values, {"count": 123}) - - def test_estimated_bytes_processed(self): - est_bytes = 123456 - - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, self.QUERY, client) - self.assertIsNone(job.estimated_bytes_processed) - - statistics = job._properties["statistics"] = {} - self.assertIsNone(job.estimated_bytes_processed) - - query_stats = statistics["query"] = {} - self.assertIsNone(job.estimated_bytes_processed) - - query_stats["estimatedBytesProcessed"] = str(est_bytes) - self.assertEqual(job.estimated_bytes_processed, est_bytes) - - def test_result(self): - from google.cloud.bigquery.table import RowIterator - - query_resource = { - "jobComplete": True, - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "schema": {"fields": [{"name": "col1", "type": "STRING"}]}, - "totalRows": "2", - } - tabledata_resource = { - # Explicitly set totalRows to be different from the query response. - # to test update during iteration. - "totalRows": "1", - "pageToken": None, - "rows": [{"f": [{"v": "abc"}]}], - } - connection = _make_connection(query_resource, tabledata_resource) - client = _make_client(self.PROJECT, connection=connection) - resource = self._make_resource(ended=True) - job = self._get_target_class().from_api_repr(resource, client) - - result = job.result() - - self.assertIsInstance(result, RowIterator) - self.assertEqual(result.total_rows, 2) - - rows = list(result) - self.assertEqual(len(rows), 1) - self.assertEqual(rows[0].col1, "abc") - # Test that the total_rows property has changed during iteration, based - # on the response from tabledata.list. - self.assertEqual(result.total_rows, 1) - - def test_result_with_max_results(self): - from google.cloud.bigquery.table import RowIterator - - query_resource = { - "jobComplete": True, - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "schema": {"fields": [{"name": "col1", "type": "STRING"}]}, - "totalRows": "5", - } - tabledata_resource = { - "totalRows": "5", - "pageToken": None, - "rows": [ - {"f": [{"v": "abc"}]}, - {"f": [{"v": "def"}]}, - {"f": [{"v": "ghi"}]}, - ], - } - connection = _make_connection(query_resource, tabledata_resource) - client = _make_client(self.PROJECT, connection=connection) - resource = self._make_resource(ended=True) - job = self._get_target_class().from_api_repr(resource, client) - - max_results = 3 - - result = job.result(max_results=max_results) - - self.assertIsInstance(result, RowIterator) - self.assertEqual(result.total_rows, 5) - - rows = list(result) - - self.assertEqual(len(rows), 3) - self.assertEqual(len(connection.api_request.call_args_list), 2) - tabledata_list_request = connection.api_request.call_args_list[1] - self.assertEqual( - tabledata_list_request[1]["query_params"]["maxResults"], max_results - ) - - def test_result_w_empty_schema(self): - from google.cloud.bigquery.table import _EmptyRowIterator - - # Destination table may have no schema for some DDL and DML queries. - query_resource = { - "jobComplete": True, - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "schema": {"fields": []}, - } - connection = _make_connection(query_resource, query_resource) - client = _make_client(self.PROJECT, connection=connection) - resource = self._make_resource(ended=True) - job = self._get_target_class().from_api_repr(resource, client) - - result = job.result() - - self.assertIsInstance(result, _EmptyRowIterator) - self.assertEqual(list(result), []) - - def test_result_invokes_begins(self): - begun_resource = self._make_resource() - incomplete_resource = { - "jobComplete": False, - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "schema": {"fields": [{"name": "col1", "type": "STRING"}]}, - } - query_resource = copy.deepcopy(incomplete_resource) - query_resource["jobComplete"] = True - done_resource = copy.deepcopy(begun_resource) - done_resource["status"] = {"state": "DONE"} - connection = _make_connection( - begun_resource, - incomplete_resource, - query_resource, - done_resource, - query_resource, - ) - client = _make_client(project=self.PROJECT, connection=connection) - job = self._make_one(self.JOB_ID, self.QUERY, client) - - job.result() - - self.assertEqual(len(connection.api_request.call_args_list), 4) - begin_request = connection.api_request.call_args_list[0] - query_request = connection.api_request.call_args_list[2] - reload_request = connection.api_request.call_args_list[3] - self.assertEqual(begin_request[1]["method"], "POST") - self.assertEqual(query_request[1]["method"], "GET") - self.assertEqual(reload_request[1]["method"], "GET") - - def test_result_w_timeout(self): - begun_resource = self._make_resource() - query_resource = { - "jobComplete": True, - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "schema": {"fields": [{"name": "col1", "type": "STRING"}]}, - } - done_resource = copy.deepcopy(begun_resource) - done_resource["status"] = {"state": "DONE"} - connection = _make_connection(begun_resource, query_resource, done_resource) - client = _make_client(project=self.PROJECT, connection=connection) - job = self._make_one(self.JOB_ID, self.QUERY, client) - - with freezegun.freeze_time("1970-01-01 00:00:00", tick=False): - job.result(timeout=1.0) - - self.assertEqual(len(connection.api_request.call_args_list), 3) - begin_request = connection.api_request.call_args_list[0] - query_request = connection.api_request.call_args_list[1] - reload_request = connection.api_request.call_args_list[2] - self.assertEqual(begin_request[1]["method"], "POST") - self.assertEqual(query_request[1]["method"], "GET") - self.assertEqual( - query_request[1]["path"], - "/projects/{}/queries/{}".format(self.PROJECT, self.JOB_ID), - ) - self.assertEqual(query_request[1]["query_params"]["timeoutMs"], 900) - self.assertEqual(reload_request[1]["method"], "GET") - - @mock.patch("google.api_core.future.polling.PollingFuture.result") - def test_result_splitting_timout_between_requests(self, polling_result): - begun_resource = self._make_resource() - query_resource = { - "jobComplete": True, - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "schema": {"fields": [{"name": "col1", "type": "STRING"}]}, - "totalRows": "5", - } - done_resource = copy.deepcopy(begun_resource) - done_resource["status"] = {"state": "DONE"} - - connection = _make_connection(begun_resource, query_resource, done_resource) - client = _make_client(project=self.PROJECT, connection=connection) - job = self._make_one(self.JOB_ID, self.QUERY, client) - - client.list_rows = mock.Mock() - - with freezegun.freeze_time("1970-01-01 00:00:00", tick=False) as frozen_time: - - def delayed_result(*args, **kwargs): - frozen_time.tick(delta=0.8) - - polling_result.side_effect = delayed_result - - def delayed_get_results(*args, **kwargs): - frozen_time.tick(delta=0.5) - return orig_get_results(*args, **kwargs) - - orig_get_results = client._get_query_results - client._get_query_results = mock.Mock(side_effect=delayed_get_results) - job.result(timeout=2.0) - - polling_result.assert_called_once_with(timeout=2.0) - - client._get_query_results.assert_called_once() - _, kwargs = client._get_query_results.call_args - self.assertAlmostEqual(kwargs.get("timeout"), 1.2) - - client.list_rows.assert_called_once() - _, kwargs = client.list_rows.call_args - self.assertAlmostEqual(kwargs.get("timeout"), 0.7) - - def test_result_w_page_size(self): - # Arrange - query_results_resource = { - "jobComplete": True, - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "schema": {"fields": [{"name": "col1", "type": "STRING"}]}, - "totalRows": "4", - } - job_resource = self._make_resource(started=True, ended=True) - q_config = job_resource["configuration"]["query"] - q_config["destinationTable"] = { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.TABLE_ID, - } - tabledata_resource = { - "totalRows": 4, - "pageToken": "some-page-token", - "rows": [ - {"f": [{"v": "row1"}]}, - {"f": [{"v": "row2"}]}, - {"f": [{"v": "row3"}]}, - ], - } - tabledata_resource_page_2 = {"totalRows": 4, "rows": [{"f": [{"v": "row4"}]}]} - conn = _make_connection( - query_results_resource, tabledata_resource, tabledata_resource_page_2 - ) - client = _make_client(self.PROJECT, connection=conn) - job = self._get_target_class().from_api_repr(job_resource, client) - - # Act - result = job.result(page_size=3) - - # Assert - actual_rows = list(result) - self.assertEqual(len(actual_rows), 4) - - tabledata_path = "/projects/%s/datasets/%s/tables/%s/data" % ( - self.PROJECT, - self.DS_ID, - self.TABLE_ID, - ) - conn.api_request.assert_has_calls( - [ - mock.call( - method="GET", - path=tabledata_path, - query_params={"maxResults": 3}, - timeout=None, - ), - mock.call( - method="GET", - path=tabledata_path, - query_params={"pageToken": "some-page-token", "maxResults": 3}, - timeout=None, - ), - ] - ) - - def test_result_error(self): - from google.cloud import exceptions - - query = textwrap.dedent( - """ - SELECT foo, bar - FROM table_baz - WHERE foo == bar""" - ) - - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, query, client) - error_result = { - "debugInfo": "DEBUG", - "location": "LOCATION", - "message": "MESSAGE", - "reason": "invalid", - } - job._properties["status"] = { - "errorResult": error_result, - "errors": [error_result], - "state": "DONE", - } - job._set_future_result() - - with self.assertRaises(exceptions.GoogleCloudError) as exc_info: - job.result() - - self.assertIsInstance(exc_info.exception, exceptions.GoogleCloudError) - self.assertEqual(exc_info.exception.code, http_client.BAD_REQUEST) - - exc_job_instance = getattr(exc_info.exception, "query_job", None) - self.assertIs(exc_job_instance, job) - - full_text = str(exc_info.exception) - assert job.job_id in full_text - assert "Query Job SQL Follows" in full_text - - for i, line in enumerate(query.splitlines(), start=1): - expected_line = "{}:{}".format(i, line) - assert expected_line in full_text - - def test_result_transport_timeout_error(self): - query = textwrap.dedent( - """ - SELECT foo, bar - FROM table_baz - WHERE foo == bar""" - ) - - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, query, client) - call_api_patch = mock.patch( - "google.cloud.bigquery.client.Client._call_api", - autospec=True, - side_effect=requests.exceptions.Timeout("Server response took too long."), - ) - - # Make sure that timeout errors get rebranded to concurrent futures timeout. - with call_api_patch, self.assertRaises(concurrent.futures.TimeoutError): - job.result(timeout=1) - - def test__begin_error(self): - from google.cloud import exceptions - - query = textwrap.dedent( - """ - SELECT foo, bar - FROM table_baz - WHERE foo == bar""" - ) - - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, query, client) - call_api_patch = mock.patch( - "google.cloud.bigquery.client.Client._call_api", - autospec=True, - side_effect=exceptions.BadRequest("Syntax error in SQL query"), - ) - - with call_api_patch, self.assertRaises(exceptions.GoogleCloudError) as exc_info: - job.result() - - self.assertIsInstance(exc_info.exception, exceptions.GoogleCloudError) - self.assertEqual(exc_info.exception.code, http_client.BAD_REQUEST) - - exc_job_instance = getattr(exc_info.exception, "query_job", None) - self.assertIs(exc_job_instance, job) - - full_text = str(exc_info.exception) - assert job.job_id in full_text - assert "Query Job SQL Follows" in full_text - - for i, line in enumerate(query.splitlines(), start=1): - expected_line = "{}:{}".format(i, line) - assert expected_line in full_text - - def test__begin_w_timeout(self): - PATH = "/projects/%s/jobs" % (self.PROJECT,) - RESOURCE = self._make_resource() - - conn = _make_connection(RESOURCE) - client = _make_client(project=self.PROJECT, connection=conn) - job = self._make_one(self.JOB_ID, self.QUERY, client) - - job._begin(timeout=7.5) - - conn.api_request.assert_called_once_with( - method="POST", - path=PATH, - data={ - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "configuration": { - "query": {"query": self.QUERY, "useLegacySql": False} - }, - }, - timeout=7.5, - ) - - def test_begin_w_bound_client(self): - from google.cloud.bigquery.dataset import DatasetReference - from google.cloud.bigquery.job import QueryJobConfig - - PATH = "/projects/%s/jobs" % (self.PROJECT,) - DS_ID = "DATASET" - RESOURCE = self._make_resource() - # Ensure None for missing server-set props - del RESOURCE["statistics"]["creationTime"] - del RESOURCE["etag"] - del RESOURCE["selfLink"] - del RESOURCE["user_email"] - conn = _make_connection(RESOURCE) - client = _make_client(project=self.PROJECT, connection=conn) - - config = QueryJobConfig() - config.default_dataset = DatasetReference(self.PROJECT, DS_ID) - job = self._make_one(self.JOB_ID, self.QUERY, client, job_config=config) - - job._begin() - - self.assertIsNone(job.default_dataset) - self.assertEqual(job.udf_resources, []) - conn.api_request.assert_called_once_with( - method="POST", - path=PATH, - data={ - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "configuration": { - "query": { - "query": self.QUERY, - "useLegacySql": False, - "defaultDataset": { - "projectId": self.PROJECT, - "datasetId": DS_ID, - }, - } - }, - }, - timeout=None, - ) - self._verifyResourceProperties(job, RESOURCE) - - def test_begin_w_alternate_client(self): - from google.cloud.bigquery.dataset import DatasetReference - from google.cloud.bigquery.job import CreateDisposition - from google.cloud.bigquery.job import QueryJobConfig - from google.cloud.bigquery.job import QueryPriority - from google.cloud.bigquery.job import SchemaUpdateOption - from google.cloud.bigquery.job import WriteDisposition - - PATH = "/projects/%s/jobs" % (self.PROJECT,) - TABLE = "TABLE" - DS_ID = "DATASET" - RESOURCE = self._make_resource(ended=True) - QUERY_CONFIGURATION = { - "query": self.QUERY, - "allowLargeResults": True, - "createDisposition": CreateDisposition.CREATE_NEVER, - "defaultDataset": {"projectId": self.PROJECT, "datasetId": DS_ID}, - "destinationTable": { - "projectId": self.PROJECT, - "datasetId": DS_ID, - "tableId": TABLE, - }, - "flattenResults": True, - "priority": QueryPriority.INTERACTIVE, - "useQueryCache": True, - "useLegacySql": True, - "writeDisposition": WriteDisposition.WRITE_TRUNCATE, - "maximumBillingTier": 4, - "maximumBytesBilled": "123456", - "schemaUpdateOptions": [SchemaUpdateOption.ALLOW_FIELD_RELAXATION], - } - RESOURCE["configuration"]["query"] = QUERY_CONFIGURATION - RESOURCE["configuration"]["dryRun"] = True - conn1 = _make_connection() - client1 = _make_client(project=self.PROJECT, connection=conn1) - conn2 = _make_connection(RESOURCE) - client2 = _make_client(project=self.PROJECT, connection=conn2) - dataset_ref = DatasetReference(self.PROJECT, DS_ID) - table_ref = dataset_ref.table(TABLE) - - config = QueryJobConfig() - config.allow_large_results = True - config.create_disposition = CreateDisposition.CREATE_NEVER - config.default_dataset = dataset_ref - config.destination = table_ref - config.dry_run = True - config.flatten_results = True - config.maximum_billing_tier = 4 - config.priority = QueryPriority.INTERACTIVE - config.use_legacy_sql = True - config.use_query_cache = True - config.write_disposition = WriteDisposition.WRITE_TRUNCATE - config.maximum_bytes_billed = 123456 - config.schema_update_options = [SchemaUpdateOption.ALLOW_FIELD_RELAXATION] - job = self._make_one(self.JOB_ID, self.QUERY, client1, job_config=config) - - job._begin(client=client2) - - conn1.api_request.assert_not_called() - conn2.api_request.assert_called_once_with( - method="POST", - path=PATH, - data={ - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "configuration": {"dryRun": True, "query": QUERY_CONFIGURATION}, - }, - timeout=None, - ) - self._verifyResourceProperties(job, RESOURCE) - - def test_begin_w_udf(self): - from google.cloud.bigquery.job import QueryJobConfig - from google.cloud.bigquery.query import UDFResource - - RESOURCE_URI = "gs://some-bucket/js/lib.js" - INLINE_UDF_CODE = 'var someCode = "here";' - PATH = "/projects/%s/jobs" % (self.PROJECT,) - RESOURCE = self._make_resource() - # Ensure None for missing server-set props - del RESOURCE["statistics"]["creationTime"] - del RESOURCE["etag"] - del RESOURCE["selfLink"] - del RESOURCE["user_email"] - RESOURCE["configuration"]["query"]["userDefinedFunctionResources"] = [ - {"resourceUri": RESOURCE_URI}, - {"inlineCode": INLINE_UDF_CODE}, - ] - conn = _make_connection(RESOURCE) - client = _make_client(project=self.PROJECT, connection=conn) - udf_resources = [ - UDFResource("resourceUri", RESOURCE_URI), - UDFResource("inlineCode", INLINE_UDF_CODE), - ] - config = QueryJobConfig() - config.udf_resources = udf_resources - config.use_legacy_sql = True - job = self._make_one(self.JOB_ID, self.QUERY, client, job_config=config) - - job._begin() - - self.assertEqual(job.udf_resources, udf_resources) - conn.api_request.assert_called_once_with( - method="POST", - path=PATH, - data={ - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "configuration": { - "query": { - "query": self.QUERY, - "useLegacySql": True, - "userDefinedFunctionResources": [ - {"resourceUri": RESOURCE_URI}, - {"inlineCode": INLINE_UDF_CODE}, - ], - } - }, - }, - timeout=None, - ) - self._verifyResourceProperties(job, RESOURCE) - - def test_begin_w_named_query_parameter(self): - from google.cloud.bigquery.job import QueryJobConfig - from google.cloud.bigquery.query import ScalarQueryParameter - - query_parameters = [ScalarQueryParameter("foo", "INT64", 123)] - PATH = "/projects/%s/jobs" % (self.PROJECT,) - RESOURCE = self._make_resource() - # Ensure None for missing server-set props - del RESOURCE["statistics"]["creationTime"] - del RESOURCE["etag"] - del RESOURCE["selfLink"] - del RESOURCE["user_email"] - config = RESOURCE["configuration"]["query"] - config["parameterMode"] = "NAMED" - config["queryParameters"] = [ - { - "name": "foo", - "parameterType": {"type": "INT64"}, - "parameterValue": {"value": "123"}, - } - ] - conn = _make_connection(RESOURCE) - client = _make_client(project=self.PROJECT, connection=conn) - jconfig = QueryJobConfig() - jconfig.query_parameters = query_parameters - job = self._make_one(self.JOB_ID, self.QUERY, client, job_config=jconfig) - - job._begin() - - self.assertEqual(job.query_parameters, query_parameters) - conn.api_request.assert_called_once_with( - method="POST", - path=PATH, - data={ - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "configuration": { - "query": { - "query": self.QUERY, - "useLegacySql": False, - "parameterMode": "NAMED", - "queryParameters": config["queryParameters"], - } - }, - }, - timeout=None, - ) - self._verifyResourceProperties(job, RESOURCE) - - def test_begin_w_positional_query_parameter(self): - from google.cloud.bigquery.job import QueryJobConfig - from google.cloud.bigquery.query import ScalarQueryParameter - - query_parameters = [ScalarQueryParameter.positional("INT64", 123)] - PATH = "/projects/%s/jobs" % (self.PROJECT,) - RESOURCE = self._make_resource() - # Ensure None for missing server-set props - del RESOURCE["statistics"]["creationTime"] - del RESOURCE["etag"] - del RESOURCE["selfLink"] - del RESOURCE["user_email"] - config = RESOURCE["configuration"]["query"] - config["parameterMode"] = "POSITIONAL" - config["queryParameters"] = [ - {"parameterType": {"type": "INT64"}, "parameterValue": {"value": "123"}} - ] - conn = _make_connection(RESOURCE) - client = _make_client(project=self.PROJECT, connection=conn) - jconfig = QueryJobConfig() - jconfig.query_parameters = query_parameters - job = self._make_one(self.JOB_ID, self.QUERY, client, job_config=jconfig) - - job._begin() - - self.assertEqual(job.query_parameters, query_parameters) - conn.api_request.assert_called_once_with( - method="POST", - path=PATH, - data={ - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "configuration": { - "query": { - "query": self.QUERY, - "useLegacySql": False, - "parameterMode": "POSITIONAL", - "queryParameters": config["queryParameters"], - } - }, - }, - timeout=None, - ) - self._verifyResourceProperties(job, RESOURCE) - - def test_begin_w_table_defs(self): - from google.cloud.bigquery.job import QueryJobConfig - from google.cloud.bigquery.external_config import ExternalConfig - from google.cloud.bigquery.external_config import BigtableColumn - from google.cloud.bigquery.external_config import BigtableColumnFamily - - PATH = "/projects/%s/jobs" % (self.PROJECT,) - RESOURCE = self._make_resource() - # Ensure None for missing server-set props - del RESOURCE["statistics"]["creationTime"] - del RESOURCE["etag"] - del RESOURCE["selfLink"] - del RESOURCE["user_email"] - - bt_config = ExternalConfig("BIGTABLE") - bt_config.ignore_unknown_values = True - bt_config.options.read_rowkey_as_string = True - cf = BigtableColumnFamily() - cf.family_id = "cf" - col = BigtableColumn() - col.field_name = "fn" - cf.columns = [col] - bt_config.options.column_families = [cf] - BT_CONFIG_RESOURCE = { - "sourceFormat": "BIGTABLE", - "ignoreUnknownValues": True, - "bigtableOptions": { - "readRowkeyAsString": True, - "columnFamilies": [ - {"familyId": "cf", "columns": [{"fieldName": "fn"}]} - ], - }, - } - CSV_CONFIG_RESOURCE = { - "sourceFormat": "CSV", - "maxBadRecords": 8, - "csvOptions": {"allowJaggedRows": True}, - } - csv_config = ExternalConfig("CSV") - csv_config.max_bad_records = 8 - csv_config.options.allow_jagged_rows = True - bt_table = "bigtable-table" - csv_table = "csv-table" - RESOURCE["configuration"]["query"]["tableDefinitions"] = { - bt_table: BT_CONFIG_RESOURCE, - csv_table: CSV_CONFIG_RESOURCE, - } - want_resource = copy.deepcopy(RESOURCE) - conn = _make_connection(RESOURCE) - client = _make_client(project=self.PROJECT, connection=conn) - config = QueryJobConfig() - config.table_definitions = {bt_table: bt_config, csv_table: csv_config} - config.use_legacy_sql = True - job = self._make_one(self.JOB_ID, self.QUERY, client, job_config=config) - - job._begin() - - conn.api_request.assert_called_once_with( - method="POST", - path=PATH, - data={ - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "configuration": { - "query": { - "query": self.QUERY, - "useLegacySql": True, - "tableDefinitions": { - bt_table: BT_CONFIG_RESOURCE, - csv_table: CSV_CONFIG_RESOURCE, - }, - } - }, - }, - timeout=None, - ) - self._verifyResourceProperties(job, want_resource) - - def test_dry_run_query(self): - from google.cloud.bigquery.job import QueryJobConfig - - PATH = "/projects/%s/jobs" % (self.PROJECT,) - RESOURCE = self._make_resource() - # Ensure None for missing server-set props - del RESOURCE["statistics"]["creationTime"] - del RESOURCE["etag"] - del RESOURCE["selfLink"] - del RESOURCE["user_email"] - RESOURCE["configuration"]["dryRun"] = True - conn = _make_connection(RESOURCE) - client = _make_client(project=self.PROJECT, connection=conn) - config = QueryJobConfig() - config.dry_run = True - job = self._make_one(self.JOB_ID, self.QUERY, client, job_config=config) - - job._begin() - self.assertEqual(job.udf_resources, []) - conn.api_request.assert_called_once_with( - method="POST", - path=PATH, - data={ - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "configuration": { - "query": {"query": self.QUERY, "useLegacySql": False}, - "dryRun": True, - }, - }, - timeout=None, - ) - self._verifyResourceProperties(job, RESOURCE) - - def test_exists_miss_w_bound_client(self): - PATH = "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID) - conn = _make_connection() - client = _make_client(project=self.PROJECT, connection=conn) - job = self._make_one(self.JOB_ID, self.QUERY, client) - - self.assertFalse(job.exists()) - - conn.api_request.assert_called_once_with( - method="GET", path=PATH, query_params={"fields": "id"}, timeout=None - ) - - def test_exists_hit_w_alternate_client(self): - PATH = "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID) - conn1 = _make_connection() - client1 = _make_client(project=self.PROJECT, connection=conn1) - conn2 = _make_connection({}) - client2 = _make_client(project=self.PROJECT, connection=conn2) - job = self._make_one(self.JOB_ID, self.QUERY, client1) - - self.assertTrue(job.exists(client=client2)) - - conn1.api_request.assert_not_called() - conn2.api_request.assert_called_once_with( - method="GET", path=PATH, query_params={"fields": "id"}, timeout=None - ) - - def test_reload_w_bound_client(self): - from google.cloud.bigquery.dataset import DatasetReference - from google.cloud.bigquery.job import QueryJobConfig - - PATH = "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID) - DS_ID = "DATASET" - DEST_TABLE = "dest_table" - RESOURCE = self._make_resource() - conn = _make_connection(RESOURCE) - client = _make_client(project=self.PROJECT, connection=conn) - dataset_ref = DatasetReference(self.PROJECT, DS_ID) - table_ref = dataset_ref.table(DEST_TABLE) - config = QueryJobConfig() - config.destination = table_ref - job = self._make_one(self.JOB_ID, None, client, job_config=config) - - job.reload() - - self.assertNotEqual(job.destination, table_ref) - - conn.api_request.assert_called_once_with( - method="GET", path=PATH, query_params={}, timeout=None - ) - self._verifyResourceProperties(job, RESOURCE) - - def test_reload_w_alternate_client(self): - PATH = "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID) - DS_ID = "DATASET" - DEST_TABLE = "dest_table" - RESOURCE = self._make_resource() - q_config = RESOURCE["configuration"]["query"] - q_config["destinationTable"] = { - "projectId": self.PROJECT, - "datasetId": DS_ID, - "tableId": DEST_TABLE, - } - conn1 = _make_connection() - client1 = _make_client(project=self.PROJECT, connection=conn1) - conn2 = _make_connection(RESOURCE) - client2 = _make_client(project=self.PROJECT, connection=conn2) - job = self._make_one(self.JOB_ID, self.QUERY, client1) - - job.reload(client=client2) - - conn1.api_request.assert_not_called() - conn2.api_request.assert_called_once_with( - method="GET", path=PATH, query_params={}, timeout=None - ) - self._verifyResourceProperties(job, RESOURCE) - - def test_reload_w_timeout(self): - from google.cloud.bigquery.dataset import DatasetReference - from google.cloud.bigquery.job import QueryJobConfig - - PATH = "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID) - DS_ID = "DATASET" - DEST_TABLE = "dest_table" - RESOURCE = self._make_resource() - conn = _make_connection(RESOURCE) - client = _make_client(project=self.PROJECT, connection=conn) - dataset_ref = DatasetReference(self.PROJECT, DS_ID) - table_ref = dataset_ref.table(DEST_TABLE) - config = QueryJobConfig() - config.destination = table_ref - job = self._make_one(self.JOB_ID, None, client, job_config=config) - - job.reload(timeout=4.2) - - self.assertNotEqual(job.destination, table_ref) - - conn.api_request.assert_called_once_with( - method="GET", path=PATH, query_params={}, timeout=4.2 - ) - - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - def test_to_arrow(self): - begun_resource = self._make_resource() - query_resource = { - "jobComplete": True, - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "totalRows": "4", - "schema": { - "fields": [ - { - "name": "spouse_1", - "type": "RECORD", - "fields": [ - {"name": "name", "type": "STRING", "mode": "NULLABLE"}, - {"name": "age", "type": "INTEGER", "mode": "NULLABLE"}, - ], - }, - { - "name": "spouse_2", - "type": "RECORD", - "fields": [ - {"name": "name", "type": "STRING", "mode": "NULLABLE"}, - {"name": "age", "type": "INTEGER", "mode": "NULLABLE"}, - ], - }, - ] - }, - } - tabledata_resource = { - "rows": [ - { - "f": [ - {"v": {"f": [{"v": "Phred Phlyntstone"}, {"v": "32"}]}}, - {"v": {"f": [{"v": "Wylma Phlyntstone"}, {"v": "29"}]}}, - ] - }, - { - "f": [ - {"v": {"f": [{"v": "Bhettye Rhubble"}, {"v": "27"}]}}, - {"v": {"f": [{"v": "Bharney Rhubble"}, {"v": "33"}]}}, - ] - }, - ] - } - done_resource = copy.deepcopy(begun_resource) - done_resource["status"] = {"state": "DONE"} - connection = _make_connection( - begun_resource, query_resource, done_resource, tabledata_resource - ) - client = _make_client(project=self.PROJECT, connection=connection) - job = self._make_one(self.JOB_ID, self.QUERY, client) - - tbl = job.to_arrow() - - self.assertIsInstance(tbl, pyarrow.Table) - self.assertEqual(tbl.num_rows, 2) - - # Check the schema. - self.assertEqual(tbl.schema[0].name, "spouse_1") - self.assertEqual(tbl.schema[0].type[0].name, "name") - self.assertEqual(tbl.schema[0].type[1].name, "age") - self.assertTrue(pyarrow.types.is_struct(tbl.schema[0].type)) - self.assertTrue(pyarrow.types.is_string(tbl.schema[0].type[0].type)) - self.assertTrue(pyarrow.types.is_int64(tbl.schema[0].type[1].type)) - self.assertEqual(tbl.schema[1].name, "spouse_2") - self.assertEqual(tbl.schema[1].type[0].name, "name") - self.assertEqual(tbl.schema[1].type[1].name, "age") - self.assertTrue(pyarrow.types.is_struct(tbl.schema[1].type)) - self.assertTrue(pyarrow.types.is_string(tbl.schema[1].type[0].type)) - self.assertTrue(pyarrow.types.is_int64(tbl.schema[1].type[1].type)) - - # Check the data. - tbl_data = tbl.to_pydict() - spouse_1 = tbl_data["spouse_1"] - self.assertEqual( - spouse_1, - [ - {"name": "Phred Phlyntstone", "age": 32}, - {"name": "Bhettye Rhubble", "age": 27}, - ], - ) - spouse_2 = tbl_data["spouse_2"] - self.assertEqual( - spouse_2, - [ - {"name": "Wylma Phlyntstone", "age": 29}, - {"name": "Bharney Rhubble", "age": 33}, - ], - ) - - @unittest.skipIf(pandas is None, "Requires `pandas`") - def test_to_dataframe(self): - begun_resource = self._make_resource() - query_resource = { - "jobComplete": True, - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "totalRows": "4", - "schema": { - "fields": [ - {"name": "name", "type": "STRING", "mode": "NULLABLE"}, - {"name": "age", "type": "INTEGER", "mode": "NULLABLE"}, - ] - }, - } - tabledata_resource = { - "rows": [ - {"f": [{"v": "Phred Phlyntstone"}, {"v": "32"}]}, - {"f": [{"v": "Bharney Rhubble"}, {"v": "33"}]}, - {"f": [{"v": "Wylma Phlyntstone"}, {"v": "29"}]}, - {"f": [{"v": "Bhettye Rhubble"}, {"v": "27"}]}, - ] - } - done_resource = copy.deepcopy(begun_resource) - done_resource["status"] = {"state": "DONE"} - connection = _make_connection( - begun_resource, query_resource, done_resource, tabledata_resource - ) - client = _make_client(project=self.PROJECT, connection=connection) - job = self._make_one(self.JOB_ID, self.QUERY, client) - - df = job.to_dataframe() - - self.assertIsInstance(df, pandas.DataFrame) - self.assertEqual(len(df), 4) # verify the number of rows - self.assertEqual(list(df), ["name", "age"]) # verify the column names - - @unittest.skipIf(pandas is None, "Requires `pandas`") - def test_to_dataframe_ddl_query(self): - # Destination table may have no schema for some DDL and DML queries. - query_resource = { - "jobComplete": True, - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "schema": {"fields": []}, - } - connection = _make_connection(query_resource) - client = _make_client(self.PROJECT, connection=connection) - resource = self._make_resource(ended=True) - job = self._get_target_class().from_api_repr(resource, client) - - df = job.to_dataframe() - - self.assertEqual(len(df), 0) - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf( - bigquery_storage_v1beta1 is None, "Requires `google-cloud-bigquery-storage`" - ) - def test_to_dataframe_bqstorage(self): - query_resource = { - "jobComplete": True, - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "totalRows": "4", - "schema": { - "fields": [ - {"name": "name", "type": "STRING", "mode": "NULLABLE"}, - {"name": "age", "type": "INTEGER", "mode": "NULLABLE"}, - ] - }, - } - connection = _make_connection(query_resource) - client = _make_client(self.PROJECT, connection=connection) - resource = self._make_resource(ended=True) - job = self._get_target_class().from_api_repr(resource, client) - bqstorage_client = mock.create_autospec( - bigquery_storage_v1beta1.BigQueryStorageClient - ) - session = bigquery_storage_v1beta1.types.ReadSession() - session.avro_schema.schema = json.dumps( - { - "type": "record", - "name": "__root__", - "fields": [ - {"name": "name", "type": ["null", "string"]}, - {"name": "age", "type": ["null", "long"]}, - ], - } - ) - bqstorage_client.create_read_session.return_value = session - - job.to_dataframe(bqstorage_client=bqstorage_client) - - bqstorage_client.create_read_session.assert_called_once_with( - mock.ANY, - "projects/{}".format(self.PROJECT), - format_=bigquery_storage_v1beta1.enums.DataFormat.ARROW, - read_options=mock.ANY, - # Use default number of streams for best performance. - requested_streams=0, - ) - - @unittest.skipIf(pandas is None, "Requires `pandas`") - def test_to_dataframe_column_dtypes(self): - begun_resource = self._make_resource() - query_resource = { - "jobComplete": True, - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "totalRows": "4", - "schema": { - "fields": [ - {"name": "start_timestamp", "type": "TIMESTAMP"}, - {"name": "seconds", "type": "INT64"}, - {"name": "miles", "type": "FLOAT64"}, - {"name": "km", "type": "FLOAT64"}, - {"name": "payment_type", "type": "STRING"}, - {"name": "complete", "type": "BOOL"}, - {"name": "date", "type": "DATE"}, - ] - }, - } - row_data = [ - ["1.4338368E9", "420", "1.1", "1.77", "Cash", "true", "1999-12-01"], - ["1.3878117E9", "2580", "17.7", "28.5", "Cash", "false", "1953-06-14"], - ["1.3855653E9", "2280", "4.4", "7.1", "Credit", "true", "1981-11-04"], - ] - rows = [{"f": [{"v": field} for field in row]} for row in row_data] - query_resource["rows"] = rows - done_resource = copy.deepcopy(begun_resource) - done_resource["status"] = {"state": "DONE"} - connection = _make_connection( - begun_resource, query_resource, done_resource, query_resource - ) - client = _make_client(project=self.PROJECT, connection=connection) - job = self._make_one(self.JOB_ID, self.QUERY, client) - - df = job.to_dataframe(dtypes={"km": "float16"}) - - self.assertIsInstance(df, pandas.DataFrame) - self.assertEqual(len(df), 3) # verify the number of rows - exp_columns = [field["name"] for field in query_resource["schema"]["fields"]] - self.assertEqual(list(df), exp_columns) # verify the column names - - self.assertEqual(df.start_timestamp.dtype.name, "datetime64[ns, UTC]") - self.assertEqual(df.seconds.dtype.name, "int64") - self.assertEqual(df.miles.dtype.name, "float64") - self.assertEqual(df.km.dtype.name, "float16") - self.assertEqual(df.payment_type.dtype.name, "object") - self.assertEqual(df.complete.dtype.name, "bool") - self.assertEqual(df.date.dtype.name, "object") - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(tqdm is None, "Requires `tqdm`") - @mock.patch("tqdm.tqdm") - def test_to_dataframe_with_progress_bar(self, tqdm_mock): - begun_resource = self._make_resource() - query_resource = { - "jobComplete": True, - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "totalRows": "4", - "schema": { - "fields": [{"name": "name", "type": "STRING", "mode": "NULLABLE"}] - }, - } - done_resource = copy.deepcopy(begun_resource) - done_resource["status"] = {"state": "DONE"} - connection = _make_connection( - begun_resource, - query_resource, - done_resource, - query_resource, - query_resource, - ) - client = _make_client(project=self.PROJECT, connection=connection) - job = self._make_one(self.JOB_ID, self.QUERY, client) - - job.to_dataframe(progress_bar_type=None) - tqdm_mock.assert_not_called() - - job.to_dataframe(progress_bar_type="tqdm") - tqdm_mock.assert_called() - - def test_iter(self): - import types - - begun_resource = self._make_resource() - query_resource = { - "jobComplete": True, - "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, - "totalRows": "0", - "schema": {"fields": [{"name": "col1", "type": "STRING"}]}, - } - done_resource = copy.deepcopy(begun_resource) - done_resource["status"] = {"state": "DONE"} - connection = _make_connection(begun_resource, query_resource, done_resource) - client = _make_client(project=self.PROJECT, connection=connection) - job = self._make_one(self.JOB_ID, self.QUERY, client) - - self.assertIsInstance(iter(job), types.GeneratorType) - - -class TestQueryPlanEntryStep(unittest.TestCase, _Base): - KIND = "KIND" - SUBSTEPS = ("SUB1", "SUB2") - - @staticmethod - def _get_target_class(): - from google.cloud.bigquery.job import QueryPlanEntryStep - - return QueryPlanEntryStep - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor(self): - step = self._make_one(self.KIND, self.SUBSTEPS) - self.assertEqual(step.kind, self.KIND) - self.assertEqual(step.substeps, list(self.SUBSTEPS)) - - def test_from_api_repr_empty(self): - klass = self._get_target_class() - step = klass.from_api_repr({}) - self.assertIsNone(step.kind) - self.assertEqual(step.substeps, []) - - def test_from_api_repr_normal(self): - resource = {"kind": self.KIND, "substeps": self.SUBSTEPS} - klass = self._get_target_class() - step = klass.from_api_repr(resource) - self.assertEqual(step.kind, self.KIND) - self.assertEqual(step.substeps, list(self.SUBSTEPS)) - - def test___eq___mismatched_type(self): - step = self._make_one(self.KIND, self.SUBSTEPS) - self.assertNotEqual(step, object()) - - def test___eq___mismatch_kind(self): - step = self._make_one(self.KIND, self.SUBSTEPS) - other = self._make_one("OTHER", self.SUBSTEPS) - self.assertNotEqual(step, other) - - def test___eq___mismatch_substeps(self): - step = self._make_one(self.KIND, self.SUBSTEPS) - other = self._make_one(self.KIND, ()) - self.assertNotEqual(step, other) - - def test___eq___hit(self): - step = self._make_one(self.KIND, self.SUBSTEPS) - other = self._make_one(self.KIND, self.SUBSTEPS) - self.assertEqual(step, other) - - def test___eq___wrong_type(self): - step = self._make_one(self.KIND, self.SUBSTEPS) - self.assertFalse(step == "hello") - - -class TestQueryPlanEntry(unittest.TestCase, _Base): - NAME = "NAME" - ENTRY_ID = 1234 - START_MS = 1522540800000 - END_MS = 1522540804000 - INPUT_STAGES = (88, 101) - PARALLEL_INPUTS = 1000 - COMPLETED_PARALLEL_INPUTS = 5 - WAIT_MS_AVG = 33 - WAIT_MS_MAX = 400 - WAIT_RATIO_AVG = 2.71828 - WAIT_RATIO_MAX = 3.14159 - READ_MS_AVG = 45 - READ_MS_MAX = 90 - READ_RATIO_AVG = 1.41421 - READ_RATIO_MAX = 1.73205 - COMPUTE_MS_AVG = 55 - COMPUTE_MS_MAX = 99 - COMPUTE_RATIO_AVG = 0.69315 - COMPUTE_RATIO_MAX = 1.09861 - WRITE_MS_AVG = 203 - WRITE_MS_MAX = 340 - WRITE_RATIO_AVG = 3.32193 - WRITE_RATIO_MAX = 2.30258 - RECORDS_READ = 100 - RECORDS_WRITTEN = 1 - STATUS = "STATUS" - SHUFFLE_OUTPUT_BYTES = 1024 - SHUFFLE_OUTPUT_BYTES_SPILLED = 1 - - START_RFC3339_MICROS = "2018-04-01T00:00:00.000000Z" - END_RFC3339_MICROS = "2018-04-01T00:00:04.000000Z" - - @staticmethod - def _get_target_class(): - from google.cloud.bigquery.job import QueryPlanEntry - - return QueryPlanEntry - - def test_from_api_repr_empty(self): - klass = self._get_target_class() - - entry = klass.from_api_repr({}) - - self.assertIsNone(entry.name) - self.assertIsNone(entry.entry_id) - self.assertEqual(entry.input_stages, []) - self.assertIsNone(entry.start) - self.assertIsNone(entry.end) - self.assertIsNone(entry.parallel_inputs) - self.assertIsNone(entry.completed_parallel_inputs) - self.assertIsNone(entry.wait_ms_avg) - self.assertIsNone(entry.wait_ms_max) - self.assertIsNone(entry.wait_ratio_avg) - self.assertIsNone(entry.wait_ratio_max) - self.assertIsNone(entry.read_ms_avg) - self.assertIsNone(entry.read_ms_max) - self.assertIsNone(entry.read_ratio_avg) - self.assertIsNone(entry.read_ratio_max) - self.assertIsNone(entry.compute_ms_avg) - self.assertIsNone(entry.compute_ms_max) - self.assertIsNone(entry.compute_ratio_avg) - self.assertIsNone(entry.compute_ratio_max) - self.assertIsNone(entry.write_ms_avg) - self.assertIsNone(entry.write_ms_max) - self.assertIsNone(entry.write_ratio_avg) - self.assertIsNone(entry.write_ratio_max) - self.assertIsNone(entry.records_read) - self.assertIsNone(entry.records_written) - self.assertIsNone(entry.status) - self.assertIsNone(entry.shuffle_output_bytes) - self.assertIsNone(entry.shuffle_output_bytes_spilled) - self.assertEqual(entry.steps, []) - - def test_from_api_repr_normal(self): - from google.cloud.bigquery.job import QueryPlanEntryStep - - steps = [ - QueryPlanEntryStep( - kind=TestQueryPlanEntryStep.KIND, - substeps=TestQueryPlanEntryStep.SUBSTEPS, - ) - ] - resource = { - "name": self.NAME, - "id": self.ENTRY_ID, - "inputStages": self.INPUT_STAGES, - "startMs": self.START_MS, - "endMs": self.END_MS, - "waitMsAvg": self.WAIT_MS_AVG, - "waitMsMax": self.WAIT_MS_MAX, - "waitRatioAvg": self.WAIT_RATIO_AVG, - "waitRatioMax": self.WAIT_RATIO_MAX, - "readMsAvg": self.READ_MS_AVG, - "readMsMax": self.READ_MS_MAX, - "readRatioAvg": self.READ_RATIO_AVG, - "readRatioMax": self.READ_RATIO_MAX, - "computeMsAvg": self.COMPUTE_MS_AVG, - "computeMsMax": self.COMPUTE_MS_MAX, - "computeRatioAvg": self.COMPUTE_RATIO_AVG, - "computeRatioMax": self.COMPUTE_RATIO_MAX, - "writeMsAvg": self.WRITE_MS_AVG, - "writeMsMax": self.WRITE_MS_MAX, - "writeRatioAvg": self.WRITE_RATIO_AVG, - "writeRatioMax": self.WRITE_RATIO_MAX, - "recordsRead": self.RECORDS_READ, - "recordsWritten": self.RECORDS_WRITTEN, - "status": self.STATUS, - "shuffleOutputBytes": self.SHUFFLE_OUTPUT_BYTES, - "shuffleOutputBytesSpilled": self.SHUFFLE_OUTPUT_BYTES_SPILLED, - "steps": [ - { - "kind": TestQueryPlanEntryStep.KIND, - "substeps": TestQueryPlanEntryStep.SUBSTEPS, - } - ], - } - klass = self._get_target_class() - - entry = klass.from_api_repr(resource) - self.assertEqual(entry.name, self.NAME) - self.assertEqual(entry.entry_id, self.ENTRY_ID) - self.assertEqual(entry.wait_ratio_avg, self.WAIT_RATIO_AVG) - self.assertEqual(entry.wait_ratio_max, self.WAIT_RATIO_MAX) - self.assertEqual(entry.read_ratio_avg, self.READ_RATIO_AVG) - self.assertEqual(entry.read_ratio_max, self.READ_RATIO_MAX) - self.assertEqual(entry.compute_ratio_avg, self.COMPUTE_RATIO_AVG) - self.assertEqual(entry.compute_ratio_max, self.COMPUTE_RATIO_MAX) - self.assertEqual(entry.write_ratio_avg, self.WRITE_RATIO_AVG) - self.assertEqual(entry.write_ratio_max, self.WRITE_RATIO_MAX) - self.assertEqual(entry.records_read, self.RECORDS_READ) - self.assertEqual(entry.records_written, self.RECORDS_WRITTEN) - self.assertEqual(entry.status, self.STATUS) - self.assertEqual(entry.steps, steps) - - def test_start(self): - from google.cloud._helpers import _RFC3339_MICROS - - klass = self._get_target_class() - - entry = klass.from_api_repr({}) - self.assertEqual(entry.start, None) - - entry._properties["startMs"] = self.START_MS - self.assertEqual( - entry.start.strftime(_RFC3339_MICROS), self.START_RFC3339_MICROS - ) - - def test_end(self): - from google.cloud._helpers import _RFC3339_MICROS - - klass = self._get_target_class() - - entry = klass.from_api_repr({}) - self.assertEqual(entry.end, None) - - entry._properties["endMs"] = self.END_MS - self.assertEqual(entry.end.strftime(_RFC3339_MICROS), self.END_RFC3339_MICROS) - - -class TestScriptStackFrame(unittest.TestCase, _Base): - def _make_one(self, resource): - from google.cloud.bigquery.job import ScriptStackFrame - - return ScriptStackFrame(resource) - - def test_procedure_id(self): - frame = self._make_one({"procedureId": "some-procedure"}) - self.assertEqual(frame.procedure_id, "some-procedure") - del frame._properties["procedureId"] - self.assertIsNone(frame.procedure_id) - - def test_start_line(self): - frame = self._make_one({"startLine": 5}) - self.assertEqual(frame.start_line, 5) - frame._properties["startLine"] = "5" - self.assertEqual(frame.start_line, 5) - - def test_start_column(self): - frame = self._make_one({"startColumn": 29}) - self.assertEqual(frame.start_column, 29) - frame._properties["startColumn"] = "29" - self.assertEqual(frame.start_column, 29) - - def test_end_line(self): - frame = self._make_one({"endLine": 9}) - self.assertEqual(frame.end_line, 9) - frame._properties["endLine"] = "9" - self.assertEqual(frame.end_line, 9) - - def test_end_column(self): - frame = self._make_one({"endColumn": 14}) - self.assertEqual(frame.end_column, 14) - frame._properties["endColumn"] = "14" - self.assertEqual(frame.end_column, 14) - - def test_text(self): - frame = self._make_one({"text": "QUERY TEXT"}) - self.assertEqual(frame.text, "QUERY TEXT") - - -class TestScriptStatistics(unittest.TestCase, _Base): - def _make_one(self, resource): - from google.cloud.bigquery.job import ScriptStatistics - - return ScriptStatistics(resource) - - def test_evalutation_kind(self): - stats = self._make_one({"evaluationKind": "EXPRESSION"}) - self.assertEqual(stats.evaluation_kind, "EXPRESSION") - self.assertEqual(stats.stack_frames, []) - - def test_stack_frames(self): - stats = self._make_one( - { - "stackFrames": [ - { - "procedureId": "some-procedure", - "startLine": 5, - "startColumn": 29, - "endLine": 9, - "endColumn": 14, - "text": "QUERY TEXT", - }, - {}, - ] - } - ) - stack_frames = stats.stack_frames - self.assertEqual(len(stack_frames), 2) - stack_frame = stack_frames[0] - self.assertEqual(stack_frame.procedure_id, "some-procedure") - self.assertEqual(stack_frame.start_line, 5) - self.assertEqual(stack_frame.start_column, 29) - self.assertEqual(stack_frame.end_line, 9) - self.assertEqual(stack_frame.end_column, 14) - self.assertEqual(stack_frame.text, "QUERY TEXT") - stack_frame = stack_frames[1] - self.assertIsNone(stack_frame.procedure_id) - self.assertIsNone(stack_frame.start_line) - self.assertIsNone(stack_frame.start_column) - self.assertIsNone(stack_frame.end_line) - self.assertIsNone(stack_frame.end_column) - self.assertIsNone(stack_frame.text) - - -class TestTimelineEntry(unittest.TestCase, _Base): - ELAPSED_MS = 101 - ACTIVE_UNITS = 50 - PENDING_UNITS = 98 - COMPLETED_UNITS = 520 - SLOT_MILLIS = 12029 - - @staticmethod - def _get_target_class(): - from google.cloud.bigquery.job import TimelineEntry - - return TimelineEntry - - def test_from_api_repr_empty(self): - klass = self._get_target_class() - entry = klass.from_api_repr({}) - self.assertIsNone(entry.elapsed_ms) - self.assertIsNone(entry.active_units) - self.assertIsNone(entry.pending_units) - self.assertIsNone(entry.completed_units) - self.assertIsNone(entry.slot_millis) - - def test_from_api_repr_normal(self): - resource = { - "elapsedMs": self.ELAPSED_MS, - "activeUnits": self.ACTIVE_UNITS, - "pendingUnits": self.PENDING_UNITS, - "completedUnits": self.COMPLETED_UNITS, - "totalSlotMs": self.SLOT_MILLIS, - } - klass = self._get_target_class() - - entry = klass.from_api_repr(resource) - self.assertEqual(entry.elapsed_ms, self.ELAPSED_MS) - self.assertEqual(entry.active_units, self.ACTIVE_UNITS) - self.assertEqual(entry.pending_units, self.PENDING_UNITS) - self.assertEqual(entry.completed_units, self.COMPLETED_UNITS) - self.assertEqual(entry.slot_millis, self.SLOT_MILLIS) - - -@pytest.mark.parametrize( - "query,expected", - ( - (None, False), - ("", False), - ("select name, age from table", False), - ("select name, age from table LIMIT 10;", False), - ("select name, age from table order by other_column;", True), - ("Select name, age From table Order By other_column", True), - ("SELECT name, age FROM table ORDER BY other_column;", True), - ("select name, age from table order\nby other_column", True), - ("Select name, age From table Order\nBy other_column;", True), - ("SELECT name, age FROM table ORDER\nBY other_column", True), - ("SelecT name, age froM table OrdeR \n\t BY other_column;", True), - ), -) -def test__contains_order_by(query, expected): - from google.cloud.bigquery import job as mut - - if expected: - assert mut._contains_order_by(query) - else: - assert not mut._contains_order_by(query) - - -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -@pytest.mark.skipif( - bigquery_storage_v1beta1 is None, reason="Requires `google-cloud-bigquery-storage`" -) -@pytest.mark.parametrize( - "query", - ( - "select name, age from table order by other_column;", - "Select name, age From table Order By other_column;", - "SELECT name, age FROM table ORDER BY other_column;", - "select name, age from table order\nby other_column;", - "Select name, age From table Order\nBy other_column;", - "SELECT name, age FROM table ORDER\nBY other_column;", - "SelecT name, age froM table OrdeR \n\t BY other_column;", - ), -) -def test_to_dataframe_bqstorage_preserve_order(query): - from google.cloud.bigquery.job import QueryJob as target_class - - job_resource = _make_job_resource( - project_id="test-project", job_type="query", ended=True - ) - job_resource["configuration"]["query"]["query"] = query - job_resource["status"] = {"state": "DONE"} - get_query_results_resource = { - "jobComplete": True, - "jobReference": {"projectId": "test-project", "jobId": "test-job"}, - "schema": { - "fields": [ - {"name": "name", "type": "STRING", "mode": "NULLABLE"}, - {"name": "age", "type": "INTEGER", "mode": "NULLABLE"}, - ] - }, - "totalRows": "4", - } - connection = _make_connection(get_query_results_resource, job_resource) - client = _make_client(connection=connection) - job = target_class.from_api_repr(job_resource, client) - bqstorage_client = mock.create_autospec( - bigquery_storage_v1beta1.BigQueryStorageClient - ) - session = bigquery_storage_v1beta1.types.ReadSession() - session.avro_schema.schema = json.dumps( - { - "type": "record", - "name": "__root__", - "fields": [ - {"name": "name", "type": ["null", "string"]}, - {"name": "age", "type": ["null", "long"]}, - ], - } - ) - bqstorage_client.create_read_session.return_value = session - - job.to_dataframe(bqstorage_client=bqstorage_client) - - bqstorage_client.create_read_session.assert_called_once_with( - mock.ANY, - "projects/test-project", - format_=bigquery_storage_v1beta1.enums.DataFormat.ARROW, - read_options=mock.ANY, - # Use a single stream to preserve row order. - requested_streams=1, - ) diff --git a/bigquery/tests/unit/test_magics.py b/bigquery/tests/unit/test_magics.py deleted file mode 100644 index 3f66b2c4b765..000000000000 --- a/bigquery/tests/unit/test_magics.py +++ /dev/null @@ -1,1373 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import copy -import re -from concurrent import futures - -import mock -import pytest -import six - -try: - import pandas -except ImportError: # pragma: NO COVER - pandas = None -try: - import IPython - from IPython.utils import io - from IPython.testing import tools - from IPython.terminal import interactiveshell -except ImportError: # pragma: NO COVER - IPython = None - -from google.api_core import exceptions -import google.auth.credentials - -try: - from google.cloud import bigquery_storage_v1beta1 -except ImportError: # pragma: NO COVER - bigquery_storage_v1beta1 = None -from google.cloud import bigquery -from google.cloud.bigquery import job -from google.cloud.bigquery import table -from google.cloud.bigquery import magics -from tests.unit.helpers import make_connection -from test_utils.imports import maybe_fail_import - - -pytestmark = pytest.mark.skipif(IPython is None, reason="Requires `ipython`") - - -@pytest.fixture(scope="session") -def ipython(): - config = tools.default_config() - config.TerminalInteractiveShell.simple_prompt = True - shell = interactiveshell.TerminalInteractiveShell.instance(config=config) - return shell - - -@pytest.fixture() -def ipython_interactive(request, ipython): - """Activate IPython's builtin hooks - - for the duration of the test scope. - """ - with ipython.builtin_trap: - yield ipython - - -@pytest.fixture(scope="session") -def missing_bq_storage(): - """Provide a patcher that can make the bigquery storage import to fail.""" - - def fail_if(name, globals, locals, fromlist, level): - # NOTE: *very* simplified, assuming a straightforward absolute import - return "bigquery_storage_v1beta1" in name or ( - fromlist is not None and "bigquery_storage_v1beta1" in fromlist - ) - - return maybe_fail_import(predicate=fail_if) - - -@pytest.fixture(scope="session") -def missing_grpcio_lib(): - """Provide a patcher that can make the gapic library import to fail.""" - - def fail_if(name, globals, locals, fromlist, level): - # NOTE: *very* simplified, assuming a straightforward absolute import - return "gapic_v1" in name or (fromlist is not None and "gapic_v1" in fromlist) - - return maybe_fail_import(predicate=fail_if) - - -JOB_REFERENCE_RESOURCE = {"projectId": "its-a-project-eh", "jobId": "some-random-id"} -TABLE_REFERENCE_RESOURCE = { - "projectId": "its-a-project-eh", - "datasetId": "ds", - "tableId": "persons", -} -QUERY_RESOURCE = { - "jobReference": JOB_REFERENCE_RESOURCE, - "configuration": { - "query": { - "destinationTable": TABLE_REFERENCE_RESOURCE, - "query": "SELECT 42 FROM `life.the_universe.and_everything`;", - "queryParameters": [], - "useLegacySql": False, - } - }, - "status": {"state": "DONE"}, -} - - -def test_context_credentials_auto_set_w_application_default_credentials(): - """When Application Default Credentials are set, the context credentials - will be created the first time it is called - """ - assert magics.context._credentials is None - assert magics.context._project is None - - project = "prahj-ekt" - credentials_mock = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - default_patch = mock.patch( - "google.auth.default", return_value=(credentials_mock, project) - ) - with default_patch as default_mock: - assert magics.context.credentials is credentials_mock - assert magics.context.project == project - - assert default_mock.call_count == 2 - - -def test_context_credentials_and_project_can_be_set_explicitly(): - project1 = "one-project-55564" - project2 = "other-project-52569" - credentials_mock = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - default_patch = mock.patch( - "google.auth.default", return_value=(credentials_mock, project1) - ) - with default_patch as default_mock: - magics.context.credentials = credentials_mock - magics.context.project = project2 - - assert magics.context.project == project2 - assert magics.context.credentials is credentials_mock - # default should not be called if credentials & project are explicitly set - assert default_mock.call_count == 0 - - -@pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -def test_context_connection_can_be_overriden(): - ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._project = None - magics.context._credentials = None - - credentials_mock = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - project = "project-123" - default_patch = mock.patch( - "google.auth.default", return_value=(credentials_mock, project) - ) - job_reference = copy.deepcopy(JOB_REFERENCE_RESOURCE) - job_reference["projectId"] = project - - query = "select * from persons" - resource = copy.deepcopy(QUERY_RESOURCE) - resource["jobReference"] = job_reference - resource["configuration"]["query"]["query"] = query - data = {"jobReference": job_reference, "totalRows": 0, "rows": []} - - conn = magics.context._connection = make_connection(resource, data) - list_rows_patch = mock.patch( - "google.cloud.bigquery.client.Client.list_rows", - return_value=google.cloud.bigquery.table._EmptyRowIterator(), - ) - with list_rows_patch as list_rows, default_patch: - ip.run_cell_magic("bigquery", "", query) - - # Check that query actually starts the job. - list_rows.assert_called() - assert len(conn.api_request.call_args_list) == 2 - _, req = conn.api_request.call_args_list[0] - assert req["method"] == "POST" - assert req["path"] == "/projects/{}/jobs".format(project) - sent = req["data"] - assert isinstance(sent["jobReference"]["jobId"], six.string_types) - sent_config = sent["configuration"]["query"] - assert sent_config["query"] == query - - -@pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -def test_context_no_connection(): - ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._project = None - magics.context._credentials = None - magics.context._connection = None - - credentials_mock = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - project = "project-123" - default_patch = mock.patch( - "google.auth.default", return_value=(credentials_mock, project) - ) - job_reference = copy.deepcopy(JOB_REFERENCE_RESOURCE) - job_reference["projectId"] = project - - query = "select * from persons" - resource = copy.deepcopy(QUERY_RESOURCE) - resource["jobReference"] = job_reference - resource["configuration"]["query"]["query"] = query - data = {"jobReference": job_reference, "totalRows": 0, "rows": []} - - conn_mock = make_connection(resource, data, data, data) - conn_patch = mock.patch("google.cloud.bigquery.client.Connection", autospec=True) - list_rows_patch = mock.patch( - "google.cloud.bigquery.client.Client.list_rows", - return_value=google.cloud.bigquery.table._EmptyRowIterator(), - ) - with conn_patch as conn, list_rows_patch as list_rows, default_patch: - conn.return_value = conn_mock - ip.run_cell_magic("bigquery", "", query) - - # Check that query actually starts the job. - list_rows.assert_called() - assert len(conn_mock.api_request.call_args_list) == 2 - _, req = conn_mock.api_request.call_args_list[0] - assert req["method"] == "POST" - assert req["path"] == "/projects/{}/jobs".format(project) - sent = req["data"] - assert isinstance(sent["jobReference"]["jobId"], six.string_types) - sent_config = sent["configuration"]["query"] - assert sent_config["query"] == query - - -def test__run_query(): - magics.context._credentials = None - - job_id = "job_1234" - sql = "SELECT 17" - responses = [ - futures.TimeoutError, - futures.TimeoutError, - [table.Row((17,), {"num": 0})], - ] - - client_patch = mock.patch( - "google.cloud.bigquery.magics.bigquery.Client", autospec=True - ) - with client_patch as client_mock, io.capture_output() as captured: - client_mock().query(sql).result.side_effect = responses - client_mock().query(sql).job_id = job_id - - query_job = magics._run_query(client_mock(), sql) - - lines = re.split("\n|\r", captured.stdout) - # Removes blanks & terminal code (result of display clearing) - updates = list(filter(lambda x: bool(x) and x != "\x1b[2K", lines)) - - assert query_job.job_id == job_id - expected_first_line = "Executing query with job ID: {}".format(job_id) - assert updates[0] == expected_first_line - execution_updates = updates[1:-1] - assert len(execution_updates) == 3 # one update per API response - for line in execution_updates: - assert re.match("Query executing: .*s", line) - assert re.match("Query complete after .*s", updates[-1]) - - -def test__run_query_dry_run_without_errors_is_silent(): - magics.context._credentials = None - - sql = "SELECT 17" - - client_patch = mock.patch( - "google.cloud.bigquery.magics.bigquery.Client", autospec=True - ) - - job_config = job.QueryJobConfig() - job_config.dry_run = True - with client_patch as client_mock, io.capture_output() as captured: - client_mock().query(sql).job_id = None - magics._run_query(client_mock(), sql, job_config=job_config) - - assert len(captured.stderr) == 0 - assert len(captured.stdout) == 0 - - -def test__make_bqstorage_client_false(): - credentials_mock = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - got = magics._make_bqstorage_client(False, credentials_mock) - assert got is None - - -@pytest.mark.skipif( - bigquery_storage_v1beta1 is None, reason="Requires `google-cloud-bigquery-storage`" -) -def test__make_bqstorage_client_true(): - credentials_mock = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - got = magics._make_bqstorage_client(True, credentials_mock) - assert isinstance(got, bigquery_storage_v1beta1.BigQueryStorageClient) - - -def test__make_bqstorage_client_true_raises_import_error(missing_bq_storage): - credentials_mock = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - - with pytest.raises(ImportError) as exc_context, missing_bq_storage: - magics._make_bqstorage_client(True, credentials_mock) - - error_msg = str(exc_context.value) - assert "google-cloud-bigquery-storage" in error_msg - assert "pyarrow" in error_msg - - -@pytest.mark.skipif( - bigquery_storage_v1beta1 is None, reason="Requires `google-cloud-bigquery-storage`" -) -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -def test__make_bqstorage_client_true_missing_gapic(missing_grpcio_lib): - credentials_mock = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - - with pytest.raises(ImportError) as exc_context, missing_grpcio_lib: - magics._make_bqstorage_client(True, credentials_mock) - - assert "grpcio" in str(exc_context.value) - - -def test__create_dataset_if_necessary_exists(): - project = "project_id" - dataset_id = "dataset_id" - dataset_reference = bigquery.dataset.DatasetReference(project, dataset_id) - dataset = bigquery.Dataset(dataset_reference) - client_patch = mock.patch( - "google.cloud.bigquery.magics.bigquery.Client", autospec=True - ) - with client_patch as client_mock: - client = client_mock() - client.project = project - client.get_dataset.result_value = dataset - magics._create_dataset_if_necessary(client, dataset_id) - client.create_dataset.assert_not_called() - - -def test__create_dataset_if_necessary_not_exist(): - project = "project_id" - dataset_id = "dataset_id" - client_patch = mock.patch( - "google.cloud.bigquery.magics.bigquery.Client", autospec=True - ) - with client_patch as client_mock: - client = client_mock() - client.location = "us" - client.project = project - client.get_dataset.side_effect = exceptions.NotFound("dataset not found") - magics._create_dataset_if_necessary(client, dataset_id) - client.create_dataset.assert_called_once() - - -@pytest.mark.usefixtures("ipython_interactive") -def test_extension_load(): - ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - - # verify that the magic is registered and has the correct source - magic = ip.magics_manager.magics["cell"].get("bigquery") - assert magic.__module__ == "google.cloud.bigquery.magics" - - -@pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -def test_bigquery_magic_without_optional_arguments(missing_bq_storage): - ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - - sql = "SELECT 17 AS num" - result = pandas.DataFrame([17], columns=["num"]) - run_query_patch = mock.patch( - "google.cloud.bigquery.magics._run_query", autospec=True - ) - query_job_mock = mock.create_autospec( - google.cloud.bigquery.job.QueryJob, instance=True - ) - query_job_mock.to_dataframe.return_value = result - - # Shouldn't fail when BigQuery Storage client isn't installed. - with run_query_patch as run_query_mock, missing_bq_storage: - run_query_mock.return_value = query_job_mock - return_value = ip.run_cell_magic("bigquery", "", sql) - - assert isinstance(return_value, pandas.DataFrame) - assert len(return_value) == len(result) # verify row count - assert list(return_value) == list(result) # verify column names - - -@pytest.mark.usefixtures("ipython_interactive") -def test_bigquery_magic_default_connection_user_agent(): - ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._connection = None - - credentials_mock = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - default_patch = mock.patch( - "google.auth.default", return_value=(credentials_mock, "general-project") - ) - run_query_patch = mock.patch( - "google.cloud.bigquery.magics._run_query", autospec=True - ) - conn_patch = mock.patch("google.cloud.bigquery.client.Connection", autospec=True) - - with conn_patch as conn, run_query_patch, default_patch: - ip.run_cell_magic("bigquery", "", "SELECT 17 as num") - - client_info_arg = conn.call_args.kwargs.get("client_info") - assert client_info_arg is not None - assert client_info_arg.user_agent == "ipython-" + IPython.__version__ - - -@pytest.mark.usefixtures("ipython_interactive") -def test_bigquery_magic_with_legacy_sql(): - ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - - run_query_patch = mock.patch( - "google.cloud.bigquery.magics._run_query", autospec=True - ) - with run_query_patch as run_query_mock: - ip.run_cell_magic("bigquery", "--use_legacy_sql", "SELECT 17 AS num") - - job_config_used = run_query_mock.call_args_list[0][1]["job_config"] - assert job_config_used.use_legacy_sql is True - - -@pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -def test_bigquery_magic_with_result_saved_to_variable(): - ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - - sql = "SELECT 17 AS num" - result = pandas.DataFrame([17], columns=["num"]) - assert "df" not in ip.user_ns - - run_query_patch = mock.patch( - "google.cloud.bigquery.magics._run_query", autospec=True - ) - query_job_mock = mock.create_autospec( - google.cloud.bigquery.job.QueryJob, instance=True - ) - query_job_mock.to_dataframe.return_value = result - with run_query_patch as run_query_mock: - run_query_mock.return_value = query_job_mock - - return_value = ip.run_cell_magic("bigquery", "df", sql) - - assert return_value is None - assert "df" in ip.user_ns # verify that variable exists - df = ip.user_ns["df"] - assert len(df) == len(result) # verify row count - assert list(df) == list(result) # verify column names - - -@pytest.mark.usefixtures("ipython_interactive") -def test_bigquery_magic_does_not_clear_display_in_verbose_mode(): - ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - - clear_patch = mock.patch( - "google.cloud.bigquery.magics.display.clear_output", autospec=True - ) - run_query_patch = mock.patch( - "google.cloud.bigquery.magics._run_query", autospec=True - ) - with clear_patch as clear_mock, run_query_patch: - ip.run_cell_magic("bigquery", "--verbose", "SELECT 17 as num") - - assert clear_mock.call_count == 0 - - -@pytest.mark.usefixtures("ipython_interactive") -def test_bigquery_magic_clears_display_in_verbose_mode(): - ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - - clear_patch = mock.patch( - "google.cloud.bigquery.magics.display.clear_output", autospec=True - ) - run_query_patch = mock.patch( - "google.cloud.bigquery.magics._run_query", autospec=True - ) - with clear_patch as clear_mock, run_query_patch: - ip.run_cell_magic("bigquery", "", "SELECT 17 as num") - - assert clear_mock.call_count == 1 - - -@pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif( - bigquery_storage_v1beta1 is None, reason="Requires `google-cloud-bigquery-storage`" -) -def test_bigquery_magic_with_bqstorage_from_argument(monkeypatch): - ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - mock_credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - - # Set up the context with monkeypatch so that it's reset for subsequent - # tests. - monkeypatch.setattr(magics.context, "credentials", mock_credentials) - monkeypatch.setattr(magics.context, "use_bqstorage_api", False) - - # Mock out the BigQuery Storage API. - bqstorage_mock = mock.create_autospec( - bigquery_storage_v1beta1.BigQueryStorageClient - ) - bqstorage_instance_mock = mock.create_autospec( - bigquery_storage_v1beta1.BigQueryStorageClient, instance=True - ) - bqstorage_instance_mock.transport = mock.Mock() - bqstorage_mock.return_value = bqstorage_instance_mock - bqstorage_client_patch = mock.patch( - "google.cloud.bigquery_storage_v1beta1.BigQueryStorageClient", bqstorage_mock - ) - - sql = "SELECT 17 AS num" - result = pandas.DataFrame([17], columns=["num"]) - run_query_patch = mock.patch( - "google.cloud.bigquery.magics._run_query", autospec=True - ) - query_job_mock = mock.create_autospec( - google.cloud.bigquery.job.QueryJob, instance=True - ) - query_job_mock.to_dataframe.return_value = result - with run_query_patch as run_query_mock, bqstorage_client_patch: - run_query_mock.return_value = query_job_mock - - return_value = ip.run_cell_magic("bigquery", "--use_bqstorage_api", sql) - - assert len(bqstorage_mock.call_args_list) == 1 - kwargs = bqstorage_mock.call_args_list[0].kwargs - assert kwargs.get("credentials") is mock_credentials - client_info = kwargs.get("client_info") - assert client_info is not None - assert client_info.user_agent == "ipython-" + IPython.__version__ - - query_job_mock.to_dataframe.assert_called_once_with( - bqstorage_client=bqstorage_instance_mock - ) - - assert isinstance(return_value, pandas.DataFrame) - - -@pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif( - bigquery_storage_v1beta1 is None, reason="Requires `google-cloud-bigquery-storage`" -) -def test_bigquery_magic_with_bqstorage_from_context(monkeypatch): - ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - mock_credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - - # Set up the context with monkeypatch so that it's reset for subsequent - # tests. - monkeypatch.setattr(magics.context, "credentials", mock_credentials) - monkeypatch.setattr(magics.context, "use_bqstorage_api", True) - - # Mock out the BigQuery Storage API. - bqstorage_mock = mock.create_autospec( - bigquery_storage_v1beta1.BigQueryStorageClient - ) - bqstorage_instance_mock = mock.create_autospec( - bigquery_storage_v1beta1.BigQueryStorageClient, instance=True - ) - bqstorage_instance_mock.transport = mock.Mock() - bqstorage_mock.return_value = bqstorage_instance_mock - bqstorage_client_patch = mock.patch( - "google.cloud.bigquery_storage_v1beta1.BigQueryStorageClient", bqstorage_mock - ) - - sql = "SELECT 17 AS num" - result = pandas.DataFrame([17], columns=["num"]) - run_query_patch = mock.patch( - "google.cloud.bigquery.magics._run_query", autospec=True - ) - query_job_mock = mock.create_autospec( - google.cloud.bigquery.job.QueryJob, instance=True - ) - query_job_mock.to_dataframe.return_value = result - with run_query_patch as run_query_mock, bqstorage_client_patch: - run_query_mock.return_value = query_job_mock - - return_value = ip.run_cell_magic("bigquery", "", sql) - - assert len(bqstorage_mock.call_args_list) == 1 - kwargs = bqstorage_mock.call_args_list[0].kwargs - assert kwargs.get("credentials") is mock_credentials - client_info = kwargs.get("client_info") - assert client_info is not None - assert client_info.user_agent == "ipython-" + IPython.__version__ - - query_job_mock.to_dataframe.assert_called_once_with( - bqstorage_client=bqstorage_instance_mock - ) - - assert isinstance(return_value, pandas.DataFrame) - - -@pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif( - bigquery_storage_v1beta1 is None, reason="Requires `google-cloud-bigquery-storage`" -) -def test_bigquery_magic_without_bqstorage(monkeypatch): - ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - mock_credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - - # Set up the context with monkeypatch so that it's reset for subsequent - # tests. - monkeypatch.setattr(magics.context, "credentials", mock_credentials) - - # Mock out the BigQuery Storage API. - bqstorage_mock = mock.create_autospec( - bigquery_storage_v1beta1.BigQueryStorageClient - ) - bqstorage_client_patch = mock.patch( - "google.cloud.bigquery_storage_v1beta1.BigQueryStorageClient", bqstorage_mock - ) - - sql = "SELECT 17 AS num" - result = pandas.DataFrame([17], columns=["num"]) - run_query_patch = mock.patch( - "google.cloud.bigquery.magics._run_query", autospec=True - ) - query_job_mock = mock.create_autospec( - google.cloud.bigquery.job.QueryJob, instance=True - ) - query_job_mock.to_dataframe.return_value = result - with run_query_patch as run_query_mock, bqstorage_client_patch: - run_query_mock.return_value = query_job_mock - - return_value = ip.run_cell_magic("bigquery", "", sql) - - bqstorage_mock.assert_not_called() - query_job_mock.to_dataframe.assert_called_once_with(bqstorage_client=None) - - assert isinstance(return_value, pandas.DataFrame) - - -@pytest.mark.usefixtures("ipython_interactive") -def test_bigquery_magic_w_max_results_invalid(): - ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._project = None - - credentials_mock = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - default_patch = mock.patch( - "google.auth.default", return_value=(credentials_mock, "general-project") - ) - client_query_patch = mock.patch( - "google.cloud.bigquery.client.Client.query", autospec=True - ) - - sql = "SELECT 17 AS num" - - with pytest.raises(ValueError), default_patch, client_query_patch: - ip.run_cell_magic("bigquery", "--max_results=abc", sql) - - -@pytest.mark.usefixtures("ipython_interactive") -def test_bigquery_magic_w_max_results_valid_calls_queryjob_result(): - ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._project = None - - credentials_mock = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - default_patch = mock.patch( - "google.auth.default", return_value=(credentials_mock, "general-project") - ) - client_query_patch = mock.patch( - "google.cloud.bigquery.client.Client.query", autospec=True - ) - - sql = "SELECT 17 AS num" - - query_job_mock = mock.create_autospec( - google.cloud.bigquery.job.QueryJob, instance=True - ) - - with client_query_patch as client_query_mock, default_patch: - client_query_mock.return_value = query_job_mock - ip.run_cell_magic("bigquery", "--max_results=5", sql) - - query_job_mock.result.assert_called_with(max_results=5) - - -@pytest.mark.usefixtures("ipython_interactive") -def test_bigquery_magic_w_max_results_query_job_results_fails(): - ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._project = None - - credentials_mock = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - default_patch = mock.patch( - "google.auth.default", return_value=(credentials_mock, "general-project") - ) - client_query_patch = mock.patch( - "google.cloud.bigquery.client.Client.query", autospec=True - ) - close_transports_patch = mock.patch( - "google.cloud.bigquery.magics._close_transports", autospec=True, - ) - - sql = "SELECT 17 AS num" - - query_job_mock = mock.create_autospec( - google.cloud.bigquery.job.QueryJob, instance=True - ) - query_job_mock.result.side_effect = [[], OSError] - - with pytest.raises( - OSError - ), client_query_patch as client_query_mock, default_patch, close_transports_patch as close_transports: - client_query_mock.return_value = query_job_mock - ip.run_cell_magic("bigquery", "--max_results=5", sql) - - assert close_transports.called - - -def test_bigquery_magic_w_table_id_invalid(): - ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._project = None - - credentials_mock = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - default_patch = mock.patch( - "google.auth.default", return_value=(credentials_mock, "general-project") - ) - - list_rows_patch = mock.patch( - "google.cloud.bigquery.magics.bigquery.Client.list_rows", - autospec=True, - side_effect=exceptions.BadRequest("Not a valid table ID"), - ) - - table_id = "not-a-real-table" - - with list_rows_patch, default_patch, io.capture_output() as captured_io: - ip.run_cell_magic("bigquery", "df", table_id) - - output = captured_io.stderr - assert "Could not save output to variable" in output - assert "400 Not a valid table ID" in output - assert "Traceback (most recent call last)" not in output - - -@pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -def test_bigquery_magic_w_table_id_and_destination_var(): - ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._project = None - - credentials_mock = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - default_patch = mock.patch( - "google.auth.default", return_value=(credentials_mock, "general-project") - ) - - row_iterator_mock = mock.create_autospec( - google.cloud.bigquery.table.RowIterator, instance=True - ) - - client_patch = mock.patch( - "google.cloud.bigquery.magics.bigquery.Client", autospec=True - ) - - table_id = "bigquery-public-data.samples.shakespeare" - result = pandas.DataFrame([17], columns=["num"]) - - with client_patch as client_mock, default_patch: - client_mock().list_rows.return_value = row_iterator_mock - row_iterator_mock.to_dataframe.return_value = result - - ip.run_cell_magic("bigquery", "df", table_id) - - assert "df" in ip.user_ns - df = ip.user_ns["df"] - - assert isinstance(df, pandas.DataFrame) - - -@pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif( - bigquery_storage_v1beta1 is None, reason="Requires `google-cloud-bigquery-storage`" -) -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -def test_bigquery_magic_w_table_id_and_bqstorage_client(): - ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._project = None - - credentials_mock = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - default_patch = mock.patch( - "google.auth.default", return_value=(credentials_mock, "general-project") - ) - - row_iterator_mock = mock.create_autospec( - google.cloud.bigquery.table.RowIterator, instance=True - ) - - client_patch = mock.patch( - "google.cloud.bigquery.magics.bigquery.Client", autospec=True - ) - - bqstorage_mock = mock.create_autospec( - bigquery_storage_v1beta1.BigQueryStorageClient - ) - bqstorage_instance_mock = mock.create_autospec( - bigquery_storage_v1beta1.BigQueryStorageClient, instance=True - ) - bqstorage_instance_mock.transport = mock.Mock() - bqstorage_mock.return_value = bqstorage_instance_mock - bqstorage_client_patch = mock.patch( - "google.cloud.bigquery_storage_v1beta1.BigQueryStorageClient", bqstorage_mock - ) - - table_id = "bigquery-public-data.samples.shakespeare" - - with default_patch, client_patch as client_mock, bqstorage_client_patch: - client_mock().list_rows.return_value = row_iterator_mock - - ip.run_cell_magic("bigquery", "--use_bqstorage_api --max_results=5", table_id) - row_iterator_mock.to_dataframe.assert_called_once_with( - bqstorage_client=bqstorage_instance_mock - ) - - -@pytest.mark.usefixtures("ipython_interactive") -def test_bigquery_magic_dryrun_option_sets_job_config(): - ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - - run_query_patch = mock.patch( - "google.cloud.bigquery.magics._run_query", autospec=True - ) - - sql = "SELECT 17 AS num" - - with run_query_patch as run_query_mock: - ip.run_cell_magic("bigquery", "--dry_run", sql) - - job_config_used = run_query_mock.call_args_list[0][1]["job_config"] - assert job_config_used.dry_run is True - - -@pytest.mark.usefixtures("ipython_interactive") -def test_bigquery_magic_dryrun_option_returns_query_job(): - ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - query_job_mock = mock.create_autospec( - google.cloud.bigquery.job.QueryJob, instance=True - ) - run_query_patch = mock.patch( - "google.cloud.bigquery.magics._run_query", autospec=True - ) - - sql = "SELECT 17 AS num" - - with run_query_patch as run_query_mock, io.capture_output() as captured_io: - run_query_mock.return_value = query_job_mock - return_value = ip.run_cell_magic("bigquery", "--dry_run", sql) - - assert "Query validated. This query will process" in captured_io.stdout - assert isinstance(return_value, job.QueryJob) - - -@pytest.mark.usefixtures("ipython_interactive") -def test_bigquery_magic_dryrun_option_variable_error_message(): - ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - - run_query_patch = mock.patch( - "google.cloud.bigquery.magics._run_query", - autospec=True, - side_effect=exceptions.BadRequest("Syntax error in SQL query"), - ) - - sql = "SELECT SELECT 17 AS num" - - assert "q_job" not in ip.user_ns - - with run_query_patch, io.capture_output() as captured: - ip.run_cell_magic("bigquery", "q_job --dry_run", sql) - - full_text = captured.stderr - assert "Could not save output to variable 'q_job'." in full_text - - -@pytest.mark.usefixtures("ipython_interactive") -def test_bigquery_magic_dryrun_option_saves_query_job_to_variable(): - ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - query_job_mock = mock.create_autospec( - google.cloud.bigquery.job.QueryJob, instance=True - ) - run_query_patch = mock.patch( - "google.cloud.bigquery.magics._run_query", autospec=True - ) - - sql = "SELECT 17 AS num" - - assert "q_job" not in ip.user_ns - - with run_query_patch as run_query_mock: - run_query_mock.return_value = query_job_mock - return_value = ip.run_cell_magic("bigquery", "q_job --dry_run", sql) - - assert return_value is None - assert "q_job" in ip.user_ns - q_job = ip.user_ns["q_job"] - assert isinstance(q_job, job.QueryJob) - - -@pytest.mark.usefixtures("ipython_interactive") -def test_bigquery_magic_saves_query_job_to_variable_on_error(): - ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - - client_query_patch = mock.patch( - "google.cloud.bigquery.client.Client.query", autospec=True - ) - - query_job = mock.create_autospec(job.QueryJob, instance=True) - exception = Exception("Unexpected SELECT") - exception.query_job = query_job - query_job.result.side_effect = exception - - sql = "SELECT SELECT 17 AS num" - - assert "result" not in ip.user_ns - - with client_query_patch as client_query_mock: - client_query_mock.return_value = query_job - return_value = ip.run_cell_magic("bigquery", "result", sql) - - assert return_value is None - assert "result" in ip.user_ns - result = ip.user_ns["result"] - assert isinstance(result, job.QueryJob) - - -@pytest.mark.usefixtures("ipython_interactive") -def test_bigquery_magic_w_maximum_bytes_billed_invalid(): - ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._project = None - - credentials_mock = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - default_patch = mock.patch( - "google.auth.default", return_value=(credentials_mock, "general-project") - ) - client_query_patch = mock.patch("google.cloud.bigquery.client.Client.query") - - sql = "SELECT 17 AS num" - - with pytest.raises(ValueError), default_patch, client_query_patch: - ip.run_cell_magic("bigquery", "--maximum_bytes_billed=abc", sql) - - -@pytest.mark.parametrize( - "param_value,expected", [("987654321", "987654321"), ("None", "0")] -) -@pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -def test_bigquery_magic_w_maximum_bytes_billed_overrides_context(param_value, expected): - ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._project = None - - # Set the default maximum bytes billed, so we know it's overridable by the param. - magics.context.default_query_job_config.maximum_bytes_billed = 1234567 - - project = "test-project" - job_reference = copy.deepcopy(JOB_REFERENCE_RESOURCE) - job_reference["projectId"] = project - query = "SELECT 17 AS num" - resource = copy.deepcopy(QUERY_RESOURCE) - resource["jobReference"] = job_reference - resource["configuration"]["query"]["query"] = query - data = {"jobReference": job_reference, "totalRows": 0, "rows": []} - credentials_mock = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - default_patch = mock.patch( - "google.auth.default", return_value=(credentials_mock, "general-project") - ) - conn = magics.context._connection = make_connection(resource, data) - list_rows_patch = mock.patch( - "google.cloud.bigquery.client.Client.list_rows", - return_value=google.cloud.bigquery.table._EmptyRowIterator(), - ) - with list_rows_patch, default_patch: - ip.run_cell_magic( - "bigquery", "--maximum_bytes_billed={}".format(param_value), query - ) - - _, req = conn.api_request.call_args_list[0] - sent_config = req["data"]["configuration"]["query"] - assert sent_config["maximumBytesBilled"] == expected - - -@pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -def test_bigquery_magic_w_maximum_bytes_billed_w_context_inplace(): - ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._project = None - - magics.context.default_query_job_config.maximum_bytes_billed = 1337 - - project = "test-project" - job_reference = copy.deepcopy(JOB_REFERENCE_RESOURCE) - job_reference["projectId"] = project - query = "SELECT 17 AS num" - resource = copy.deepcopy(QUERY_RESOURCE) - resource["jobReference"] = job_reference - resource["configuration"]["query"]["query"] = query - data = {"jobReference": job_reference, "totalRows": 0, "rows": []} - credentials_mock = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - default_patch = mock.patch( - "google.auth.default", return_value=(credentials_mock, "general-project") - ) - conn = magics.context._connection = make_connection(resource, data) - list_rows_patch = mock.patch( - "google.cloud.bigquery.client.Client.list_rows", - return_value=google.cloud.bigquery.table._EmptyRowIterator(), - ) - with list_rows_patch, default_patch: - ip.run_cell_magic("bigquery", "", query) - - _, req = conn.api_request.call_args_list[0] - sent_config = req["data"]["configuration"]["query"] - assert sent_config["maximumBytesBilled"] == "1337" - - -@pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -def test_bigquery_magic_w_maximum_bytes_billed_w_context_setter(): - ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._project = None - - magics.context.default_query_job_config = job.QueryJobConfig( - maximum_bytes_billed=10203 - ) - - project = "test-project" - job_reference = copy.deepcopy(JOB_REFERENCE_RESOURCE) - job_reference["projectId"] = project - query = "SELECT 17 AS num" - resource = copy.deepcopy(QUERY_RESOURCE) - resource["jobReference"] = job_reference - resource["configuration"]["query"]["query"] = query - data = {"jobReference": job_reference, "totalRows": 0, "rows": []} - credentials_mock = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - default_patch = mock.patch( - "google.auth.default", return_value=(credentials_mock, "general-project") - ) - conn = magics.context._connection = make_connection(resource, data) - list_rows_patch = mock.patch( - "google.cloud.bigquery.client.Client.list_rows", - return_value=google.cloud.bigquery.table._EmptyRowIterator(), - ) - with list_rows_patch, default_patch: - ip.run_cell_magic("bigquery", "", query) - - _, req = conn.api_request.call_args_list[0] - sent_config = req["data"]["configuration"]["query"] - assert sent_config["maximumBytesBilled"] == "10203" - - -@pytest.mark.usefixtures("ipython_interactive") -def test_bigquery_magic_with_project(): - ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._project = None - - credentials_mock = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - default_patch = mock.patch( - "google.auth.default", return_value=(credentials_mock, "general-project") - ) - run_query_patch = mock.patch( - "google.cloud.bigquery.magics._run_query", autospec=True - ) - with run_query_patch as run_query_mock, default_patch: - ip.run_cell_magic("bigquery", "--project=specific-project", "SELECT 17 as num") - - client_used = run_query_mock.call_args_list[0][0][0] - assert client_used.project == "specific-project" - # context project should not change - assert magics.context.project == "general-project" - - -@pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -def test_bigquery_magic_with_string_params(): - ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - - sql = "SELECT @num AS num" - result = pandas.DataFrame([17], columns=["num"]) - assert "params_string_df" not in ip.user_ns - - run_query_patch = mock.patch( - "google.cloud.bigquery.magics._run_query", autospec=True - ) - query_job_mock = mock.create_autospec( - google.cloud.bigquery.job.QueryJob, instance=True - ) - query_job_mock.to_dataframe.return_value = result - with run_query_patch as run_query_mock: - run_query_mock.return_value = query_job_mock - - ip.run_cell_magic("bigquery", 'params_string_df --params {"num":17}', sql) - - run_query_mock.assert_called_once_with(mock.ANY, sql.format(num=17), mock.ANY) - - assert "params_string_df" in ip.user_ns # verify that the variable exists - df = ip.user_ns["params_string_df"] - assert len(df) == len(result) # verify row count - assert list(df) == list(result) # verify column names - - -@pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -def test_bigquery_magic_with_dict_params(): - ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - - sql = "SELECT @num AS num" - result = pandas.DataFrame([17], columns=["num"]) - assert "params_dict_df" not in ip.user_ns - - run_query_patch = mock.patch( - "google.cloud.bigquery.magics._run_query", autospec=True - ) - query_job_mock = mock.create_autospec( - google.cloud.bigquery.job.QueryJob, instance=True - ) - query_job_mock.to_dataframe.return_value = result - with run_query_patch as run_query_mock: - run_query_mock.return_value = query_job_mock - - params = {"num": 17} - # Insert dictionary into user namespace so that it can be expanded - ip.user_ns["params"] = params - ip.run_cell_magic("bigquery", "params_dict_df --params $params", sql) - - run_query_mock.assert_called_once_with(mock.ANY, sql.format(num=17), mock.ANY) - - assert "params_dict_df" in ip.user_ns # verify that the variable exists - df = ip.user_ns["params_dict_df"] - assert len(df) == len(result) # verify row count - assert list(df) == list(result) # verify column names - - -@pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -def test_bigquery_magic_with_improperly_formatted_params(): - ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - - sql = "SELECT @num AS num" - - with pytest.raises(SyntaxError): - ip.run_cell_magic("bigquery", "--params {17}", sql) - - -@pytest.mark.usefixtures("ipython_interactive") -def test_bigquery_magic_omits_tracebacks_from_error_message(): - ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - - credentials_mock = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - default_patch = mock.patch( - "google.auth.default", return_value=(credentials_mock, "general-project") - ) - - run_query_patch = mock.patch( - "google.cloud.bigquery.magics._run_query", - autospec=True, - side_effect=exceptions.BadRequest("Syntax error in SQL query"), - ) - - with run_query_patch, default_patch, io.capture_output() as captured_io: - ip.run_cell_magic("bigquery", "", "SELECT foo FROM WHERE LIMIT bar") - - output = captured_io.stderr - assert "400 Syntax error in SQL query" in output - assert "Traceback (most recent call last)" not in output - assert "Syntax error" not in captured_io.stdout - - -@pytest.mark.usefixtures("ipython_interactive") -def test_bigquery_magic_w_destination_table_invalid_format(): - ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._project = None - - credentials_mock = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - default_patch = mock.patch( - "google.auth.default", return_value=(credentials_mock, "general-project") - ) - - client_patch = mock.patch( - "google.cloud.bigquery.magics.bigquery.Client", autospec=True - ) - - with client_patch, default_patch, pytest.raises(ValueError) as exc_context: - ip.run_cell_magic( - "bigquery", "--destination_table dataset", "SELECT foo FROM WHERE LIMIT bar" - ) - error_msg = str(exc_context.value) - assert ( - "--destination_table should be in a " - ". format." in error_msg - ) - - -@pytest.mark.usefixtures("ipython_interactive") -def test_bigquery_magic_w_destination_table(): - ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - - create_dataset_if_necessary_patch = mock.patch( - "google.cloud.bigquery.magics._create_dataset_if_necessary", autospec=True - ) - - run_query_patch = mock.patch( - "google.cloud.bigquery.magics._run_query", autospec=True - ) - - with create_dataset_if_necessary_patch, run_query_patch as run_query_mock: - ip.run_cell_magic( - "bigquery", - "--destination_table dataset_id.table_id", - "SELECT foo FROM WHERE LIMIT bar", - ) - - job_config_used = run_query_mock.call_args_list[0][1]["job_config"] - assert job_config_used.allow_large_results is True - assert job_config_used.create_disposition == "CREATE_IF_NEEDED" - assert job_config_used.write_disposition == "WRITE_TRUNCATE" - assert job_config_used.destination.dataset_id == "dataset_id" - assert job_config_used.destination.table_id == "table_id" - - -@pytest.mark.usefixtures("ipython_interactive") -def test_bigquery_magic_create_dataset_fails(): - ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - - create_dataset_if_necessary_patch = mock.patch( - "google.cloud.bigquery.magics._create_dataset_if_necessary", - autospec=True, - side_effect=OSError, - ) - close_transports_patch = mock.patch( - "google.cloud.bigquery.magics._close_transports", autospec=True, - ) - - with pytest.raises( - OSError - ), create_dataset_if_necessary_patch, close_transports_patch as close_transports: - ip.run_cell_magic( - "bigquery", - "--destination_table dataset_id.table_id", - "SELECT foo FROM WHERE LIMIT bar", - ) - - assert close_transports.called diff --git a/bigquery/tests/unit/test_query.py b/bigquery/tests/unit/test_query.py deleted file mode 100644 index a7c639ed1e77..000000000000 --- a/bigquery/tests/unit/test_query.py +++ /dev/null @@ -1,1111 +0,0 @@ -# Copyright 2015 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import datetime -import unittest - -import mock - - -class Test_UDFResource(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigquery.query import UDFResource - - return UDFResource - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor(self): - udf = self._make_one("resourceUri", "gs://some_bucket/some_file") - self.assertEqual(udf.udf_type, "resourceUri") - self.assertEqual(udf.value, "gs://some_bucket/some_file") - - def test___eq__(self): - udf = self._make_one("resourceUri", "gs://some_bucket/some_file") - self.assertEqual(udf, udf) - self.assertNotEqual(udf, object()) - wrong_val = self._make_one("resourceUri", "gs://some_bucket/other_file") - self.assertNotEqual(udf, wrong_val) - wrong_type = self._make_one("inlineCode", udf.value) - self.assertNotEqual(udf, wrong_type) - - -class Test__AbstractQueryParameter(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigquery.query import _AbstractQueryParameter - - return _AbstractQueryParameter - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_from_api_virtual(self): - klass = self._get_target_class() - with self.assertRaises(NotImplementedError): - klass.from_api_repr({}) - - def test_to_api_virtual(self): - param = self._make_one() - with self.assertRaises(NotImplementedError): - param.to_api_repr() - - -class Test_ScalarQueryParameter(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigquery.query import ScalarQueryParameter - - return ScalarQueryParameter - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor(self): - param = self._make_one(name="foo", type_="INT64", value=123) - self.assertEqual(param.name, "foo") - self.assertEqual(param.type_, "INT64") - self.assertEqual(param.value, 123) - - def test___eq__(self): - param = self._make_one(name="foo", type_="INT64", value=123) - self.assertEqual(param, param) - self.assertNotEqual(param, object()) - alias = self._make_one(name="bar", type_="INT64", value=123) - self.assertNotEqual(param, alias) - wrong_type = self._make_one(name="foo", type_="FLOAT64", value=123.0) - self.assertNotEqual(param, wrong_type) - wrong_val = self._make_one(name="foo", type_="INT64", value=234) - self.assertNotEqual(param, wrong_val) - - def test_positional(self): - klass = self._get_target_class() - param = klass.positional(type_="INT64", value=123) - self.assertEqual(param.name, None) - self.assertEqual(param.type_, "INT64") - self.assertEqual(param.value, 123) - - def test_from_api_repr_w_name(self): - RESOURCE = { - "name": "foo", - "parameterType": {"type": "INT64"}, - "parameterValue": {"value": 123}, - } - klass = self._get_target_class() - param = klass.from_api_repr(RESOURCE) - self.assertEqual(param.name, "foo") - self.assertEqual(param.type_, "INT64") - self.assertEqual(param.value, 123) - - def test_from_api_repr_wo_name(self): - RESOURCE = { - "parameterType": {"type": "INT64"}, - "parameterValue": {"value": "123"}, - } - klass = self._get_target_class() - param = klass.from_api_repr(RESOURCE) - self.assertEqual(param.name, None) - self.assertEqual(param.type_, "INT64") - self.assertEqual(param.value, 123) - - def test_from_api_repr_wo_value(self): - # Back-end may not send back values for None params. See #9027 - RESOURCE = {"name": "foo", "parameterType": {"type": "INT64"}} - klass = self._get_target_class() - param = klass.from_api_repr(RESOURCE) - self.assertEqual(param.name, "foo") - self.assertEqual(param.type_, "INT64") - self.assertIs(param.value, None) - - def test_to_api_repr_w_name(self): - EXPECTED = { - "name": "foo", - "parameterType": {"type": "INT64"}, - "parameterValue": {"value": "123"}, - } - param = self._make_one(name="foo", type_="INT64", value=123) - self.assertEqual(param.to_api_repr(), EXPECTED) - - def test_to_api_repr_wo_name(self): - EXPECTED = { - "parameterType": {"type": "INT64"}, - "parameterValue": {"value": "123"}, - } - klass = self._get_target_class() - param = klass.positional(type_="INT64", value=123) - self.assertEqual(param.to_api_repr(), EXPECTED) - - def test_to_api_repr_w_float(self): - EXPECTED = { - "parameterType": {"type": "FLOAT64"}, - "parameterValue": {"value": 12.345}, - } - klass = self._get_target_class() - param = klass.positional(type_="FLOAT64", value=12.345) - self.assertEqual(param.to_api_repr(), EXPECTED) - - def test_to_api_repr_w_numeric(self): - EXPECTED = { - "parameterType": {"type": "NUMERIC"}, - "parameterValue": {"value": "123456789.123456789"}, - } - klass = self._get_target_class() - param = klass.positional(type_="NUMERIC", value="123456789.123456789") - self.assertEqual(param.to_api_repr(), EXPECTED) - - def test_to_api_repr_w_bool(self): - EXPECTED = { - "parameterType": {"type": "BOOL"}, - "parameterValue": {"value": "false"}, - } - klass = self._get_target_class() - param = klass.positional(type_="BOOL", value=False) - self.assertEqual(param.to_api_repr(), EXPECTED) - - def test_to_api_repr_w_timestamp_datetime(self): - from google.cloud._helpers import UTC - - STAMP = "2016-12-20 15:58:27.339328+00:00" - when = datetime.datetime(2016, 12, 20, 15, 58, 27, 339328, tzinfo=UTC) - EXPECTED = { - "parameterType": {"type": "TIMESTAMP"}, - "parameterValue": {"value": STAMP}, - } - klass = self._get_target_class() - param = klass.positional(type_="TIMESTAMP", value=when) - self.assertEqual(param.to_api_repr(), EXPECTED) - - def test_to_api_repr_w_timestamp_micros(self): - from google.cloud._helpers import _microseconds_from_datetime - - now = datetime.datetime.utcnow() - seconds = _microseconds_from_datetime(now) / 1.0e6 - EXPECTED = { - "parameterType": {"type": "TIMESTAMP"}, - "parameterValue": {"value": seconds}, - } - klass = self._get_target_class() - param = klass.positional(type_="TIMESTAMP", value=seconds) - self.assertEqual(param.to_api_repr(), EXPECTED) - - def test_to_api_repr_w_datetime_datetime(self): - from google.cloud._helpers import _datetime_to_rfc3339 - - now = datetime.datetime.utcnow() - EXPECTED = { - "parameterType": {"type": "DATETIME"}, - "parameterValue": { - "value": _datetime_to_rfc3339(now)[:-1] # strip trailing 'Z' - }, - } - klass = self._get_target_class() - param = klass.positional(type_="DATETIME", value=now) - self.assertEqual(param.to_api_repr(), EXPECTED) - - def test_to_api_repr_w_datetime_string(self): - from google.cloud._helpers import _datetime_to_rfc3339 - - now = datetime.datetime.utcnow() - now_str = _datetime_to_rfc3339(now) - EXPECTED = { - "parameterType": {"type": "DATETIME"}, - "parameterValue": {"value": now_str}, - } - klass = self._get_target_class() - param = klass.positional(type_="DATETIME", value=now_str) - self.assertEqual(param.to_api_repr(), EXPECTED) - - def test_to_api_repr_w_date_date(self): - today = datetime.date.today() - EXPECTED = { - "parameterType": {"type": "DATE"}, - "parameterValue": {"value": today.isoformat()}, - } - klass = self._get_target_class() - param = klass.positional(type_="DATE", value=today) - self.assertEqual(param.to_api_repr(), EXPECTED) - - def test_to_api_repr_w_date_string(self): - today = datetime.date.today() - today_str = (today.isoformat(),) - EXPECTED = { - "parameterType": {"type": "DATE"}, - "parameterValue": {"value": today_str}, - } - klass = self._get_target_class() - param = klass.positional(type_="DATE", value=today_str) - self.assertEqual(param.to_api_repr(), EXPECTED) - - def test_to_api_repr_w_unknown_type(self): - EXPECTED = { - "parameterType": {"type": "UNKNOWN"}, - "parameterValue": {"value": "unknown"}, - } - klass = self._get_target_class() - param = klass.positional(type_="UNKNOWN", value="unknown") - self.assertEqual(param.to_api_repr(), EXPECTED) - - def test___eq___wrong_type(self): - field = self._make_one("test", "STRING", "value") - other = object() - self.assertNotEqual(field, other) - self.assertEqual(field, mock.ANY) - - def test___eq___name_mismatch(self): - field = self._make_one("test", "STRING", "value") - other = self._make_one("other", "STRING", "value") - self.assertNotEqual(field, other) - - def test___eq___field_type_mismatch(self): - field = self._make_one("test", "STRING", None) - other = self._make_one("test", "INT64", None) - self.assertNotEqual(field, other) - - def test___eq___value_mismatch(self): - field = self._make_one("test", "STRING", "hello") - other = self._make_one("test", "STRING", "world") - self.assertNotEqual(field, other) - - def test___eq___hit(self): - field = self._make_one("test", "STRING", "gotcha") - other = self._make_one("test", "STRING", "gotcha") - self.assertEqual(field, other) - - def test___ne___wrong_type(self): - field = self._make_one("toast", "INT64", 13) - other = object() - self.assertNotEqual(field, other) - self.assertEqual(field, mock.ANY) - - def test___ne___same_value(self): - field1 = self._make_one("test", "INT64", 12) - field2 = self._make_one("test", "INT64", 12) - # unittest ``assertEqual`` uses ``==`` not ``!=``. - comparison_val = field1 != field2 - self.assertFalse(comparison_val) - - def test___ne___different_values(self): - field1 = self._make_one("test", "INT64", 11) - field2 = self._make_one("test", "INT64", 12) - self.assertNotEqual(field1, field2) - - def test___repr__(self): - field1 = self._make_one("field1", "STRING", "value") - expected = "ScalarQueryParameter('field1', 'STRING', 'value')" - self.assertEqual(repr(field1), expected) - - -def _make_subparam(name, type_, value): - from google.cloud.bigquery.query import ScalarQueryParameter - - return ScalarQueryParameter(name, type_, value) - - -class Test_ArrayQueryParameter(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigquery.query import ArrayQueryParameter - - return ArrayQueryParameter - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor(self): - param = self._make_one(name="foo", array_type="INT64", values=[1, 2]) - self.assertEqual(param.name, "foo") - self.assertEqual(param.array_type, "INT64") - self.assertEqual(param.values, [1, 2]) - - def test___eq__(self): - param = self._make_one(name="foo", array_type="INT64", values=[123]) - self.assertEqual(param, param) - self.assertNotEqual(param, object()) - alias = self._make_one(name="bar", array_type="INT64", values=[123]) - self.assertNotEqual(param, alias) - wrong_type = self._make_one(name="foo", array_type="FLOAT64", values=[123.0]) - self.assertNotEqual(param, wrong_type) - wrong_val = self._make_one(name="foo", array_type="INT64", values=[234]) - self.assertNotEqual(param, wrong_val) - - def test_positional(self): - klass = self._get_target_class() - param = klass.positional(array_type="INT64", values=[1, 2]) - self.assertEqual(param.name, None) - self.assertEqual(param.array_type, "INT64") - self.assertEqual(param.values, [1, 2]) - - def test_from_api_repr_w_name(self): - RESOURCE = { - "name": "foo", - "parameterType": {"type": "ARRAY", "arrayType": {"type": "INT64"}}, - "parameterValue": {"arrayValues": [{"value": "1"}, {"value": "2"}]}, - } - klass = self._get_target_class() - param = klass.from_api_repr(RESOURCE) - self.assertEqual(param.name, "foo") - self.assertEqual(param.array_type, "INT64") - self.assertEqual(param.values, [1, 2]) - - def test_from_api_repr_wo_name(self): - RESOURCE = { - "parameterType": {"type": "ARRAY", "arrayType": {"type": "INT64"}}, - "parameterValue": {"arrayValues": [{"value": "1"}, {"value": "2"}]}, - } - klass = self._get_target_class() - param = klass.from_api_repr(RESOURCE) - self.assertEqual(param.name, None) - self.assertEqual(param.array_type, "INT64") - self.assertEqual(param.values, [1, 2]) - - def test_from_api_repr_wo_values(self): - # Back-end may not send back values for empty array params. See #7309 - RESOURCE = { - "name": "foo", - "parameterType": {"type": "ARRAY", "arrayType": {"type": "INT64"}}, - } - klass = self._get_target_class() - param = klass.from_api_repr(RESOURCE) - self.assertEqual(param.name, "foo") - self.assertEqual(param.array_type, "INT64") - self.assertEqual(param.values, []) - - def test_from_api_repr_w_struct_type(self): - from google.cloud.bigquery.query import StructQueryParameter - - RESOURCE = { - "parameterType": { - "type": "ARRAY", - "arrayType": { - "type": "STRUCT", - "structTypes": [ - {"name": "name", "type": {"type": "STRING"}}, - {"name": "age", "type": {"type": "INT64"}}, - ], - }, - }, - "parameterValue": { - "arrayValues": [ - { - "structValues": { - "name": {"value": "Phred Phlyntstone"}, - "age": {"value": "32"}, - } - }, - { - "structValues": { - "name": {"value": "Bharney Rhubbyl"}, - "age": {"value": "31"}, - } - }, - ] - }, - } - - klass = self._get_target_class() - param = klass.from_api_repr(RESOURCE) - - phred = StructQueryParameter.positional( - _make_subparam("name", "STRING", "Phred Phlyntstone"), - _make_subparam("age", "INT64", 32), - ) - bharney = StructQueryParameter.positional( - _make_subparam("name", "STRING", "Bharney Rhubbyl"), - _make_subparam("age", "INT64", 31), - ) - self.assertEqual(param.array_type, "STRUCT") - self.assertEqual(param.values, [phred, bharney]) - - def test_to_api_repr_w_name(self): - EXPECTED = { - "name": "foo", - "parameterType": {"type": "ARRAY", "arrayType": {"type": "INT64"}}, - "parameterValue": {"arrayValues": [{"value": "1"}, {"value": "2"}]}, - } - param = self._make_one(name="foo", array_type="INT64", values=[1, 2]) - self.assertEqual(param.to_api_repr(), EXPECTED) - - def test_to_api_repr_wo_name(self): - EXPECTED = { - "parameterType": {"type": "ARRAY", "arrayType": {"type": "INT64"}}, - "parameterValue": {"arrayValues": [{"value": "1"}, {"value": "2"}]}, - } - klass = self._get_target_class() - param = klass.positional(array_type="INT64", values=[1, 2]) - self.assertEqual(param.to_api_repr(), EXPECTED) - - def test_to_api_repr_w_unknown_type(self): - EXPECTED = { - "parameterType": {"type": "ARRAY", "arrayType": {"type": "UNKNOWN"}}, - "parameterValue": {"arrayValues": [{"value": "unknown"}]}, - } - klass = self._get_target_class() - param = klass.positional(array_type="UNKNOWN", values=["unknown"]) - self.assertEqual(param.to_api_repr(), EXPECTED) - - def test_to_api_repr_w_record_type(self): - from google.cloud.bigquery.query import StructQueryParameter - - EXPECTED = { - "parameterType": { - "type": "ARRAY", - "arrayType": { - "type": "STRUCT", - "structTypes": [ - {"name": "foo", "type": {"type": "STRING"}}, - {"name": "bar", "type": {"type": "INT64"}}, - ], - }, - }, - "parameterValue": { - "arrayValues": [ - {"structValues": {"foo": {"value": "Foo"}, "bar": {"value": "123"}}} - ] - }, - } - one = _make_subparam("foo", "STRING", "Foo") - another = _make_subparam("bar", "INT64", 123) - struct = StructQueryParameter.positional(one, another) - klass = self._get_target_class() - param = klass.positional(array_type="RECORD", values=[struct]) - self.assertEqual(param.to_api_repr(), EXPECTED) - - def test___eq___wrong_type(self): - field = self._make_one("test", "STRING", ["value"]) - other = object() - self.assertNotEqual(field, other) - self.assertEqual(field, mock.ANY) - - def test___eq___name_mismatch(self): - field = self._make_one("field", "STRING", ["value"]) - other = self._make_one("other", "STRING", ["value"]) - self.assertNotEqual(field, other) - - def test___eq___field_type_mismatch(self): - field = self._make_one("test", "STRING", []) - other = self._make_one("test", "INT64", []) - self.assertNotEqual(field, other) - - def test___eq___value_mismatch(self): - field = self._make_one("test", "STRING", ["hello"]) - other = self._make_one("test", "STRING", ["hello", "world"]) - self.assertNotEqual(field, other) - - def test___eq___hit(self): - field = self._make_one("test", "STRING", ["gotcha"]) - other = self._make_one("test", "STRING", ["gotcha"]) - self.assertEqual(field, other) - - def test___ne___wrong_type(self): - field = self._make_one("toast", "INT64", [13]) - other = object() - self.assertNotEqual(field, other) - self.assertEqual(field, mock.ANY) - - def test___ne___same_value(self): - field1 = self._make_one("test", "INT64", [12]) - field2 = self._make_one("test", "INT64", [12]) - # unittest ``assertEqual`` uses ``==`` not ``!=``. - comparison_val = field1 != field2 - self.assertFalse(comparison_val) - - def test___ne___different_values(self): - field1 = self._make_one("test", "INT64", [11]) - field2 = self._make_one("test", "INT64", [12]) - self.assertNotEqual(field1, field2) - - def test___repr__(self): - field1 = self._make_one("field1", "STRING", ["value"]) - expected = "ArrayQueryParameter('field1', 'STRING', ['value'])" - self.assertEqual(repr(field1), expected) - - -class Test_StructQueryParameter(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigquery.query import StructQueryParameter - - return StructQueryParameter - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor(self): - sub_1 = _make_subparam("bar", "INT64", 123) - sub_2 = _make_subparam("baz", "STRING", "abc") - param = self._make_one("foo", sub_1, sub_2) - self.assertEqual(param.name, "foo") - self.assertEqual(param.struct_types, {"bar": "INT64", "baz": "STRING"}) - self.assertEqual(param.struct_values, {"bar": 123, "baz": "abc"}) - - def test___eq__(self): - sub_1 = _make_subparam("bar", "INT64", 123) - sub_2 = _make_subparam("baz", "STRING", "abc") - sub_3 = _make_subparam("baz", "STRING", "def") - sub_1_float = _make_subparam("bar", "FLOAT64", 123.0) - param = self._make_one("foo", sub_1, sub_2) - self.assertEqual(param, param) - self.assertNotEqual(param, object()) - alias = self._make_one("bar", sub_1, sub_2) - self.assertNotEqual(param, alias) - wrong_type = self._make_one("foo", sub_1_float, sub_2) - self.assertNotEqual(param, wrong_type) - wrong_val = self._make_one("foo", sub_2, sub_3) - self.assertNotEqual(param, wrong_val) - - def test_positional(self): - sub_1 = _make_subparam("bar", "INT64", 123) - sub_2 = _make_subparam("baz", "STRING", "abc") - klass = self._get_target_class() - param = klass.positional(sub_1, sub_2) - self.assertEqual(param.name, None) - self.assertEqual(param.struct_types, {"bar": "INT64", "baz": "STRING"}) - self.assertEqual(param.struct_values, {"bar": 123, "baz": "abc"}) - - def test_from_api_repr_w_name(self): - RESOURCE = { - "name": "foo", - "parameterType": { - "type": "STRUCT", - "structTypes": [ - {"name": "bar", "type": {"type": "INT64"}}, - {"name": "baz", "type": {"type": "STRING"}}, - ], - }, - "parameterValue": { - "structValues": {"bar": {"value": 123}, "baz": {"value": "abc"}} - }, - } - klass = self._get_target_class() - param = klass.from_api_repr(RESOURCE) - self.assertEqual(param.name, "foo") - self.assertEqual(param.struct_types, {"bar": "INT64", "baz": "STRING"}) - self.assertEqual(param.struct_values, {"bar": 123, "baz": "abc"}) - - def test_from_api_repr_wo_name(self): - RESOURCE = { - "parameterType": { - "type": "STRUCT", - "structTypes": [ - {"name": "bar", "type": {"type": "INT64"}}, - {"name": "baz", "type": {"type": "STRING"}}, - ], - }, - "parameterValue": { - "structValues": {"bar": {"value": 123}, "baz": {"value": "abc"}} - }, - } - klass = self._get_target_class() - param = klass.from_api_repr(RESOURCE) - self.assertEqual(param.name, None) - self.assertEqual(param.struct_types, {"bar": "INT64", "baz": "STRING"}) - self.assertEqual(param.struct_values, {"bar": 123, "baz": "abc"}) - - def test_from_api_repr_w_nested_array(self): - from google.cloud.bigquery.query import ArrayQueryParameter - - RESOURCE = { - "name": "foo", - "parameterType": { - "type": "STRUCT", - "structTypes": [ - {"name": "bar", "type": {"type": "STRING"}}, - { - "name": "baz", - "type": {"type": "ARRAY", "arrayType": {"type": "INT64"}}, - }, - ], - }, - "parameterValue": { - "structValues": { - "bar": {"value": "abc"}, - "baz": {"arrayValues": [{"value": "123"}, {"value": "456"}]}, - } - }, - } - klass = self._get_target_class() - param = klass.from_api_repr(RESOURCE) - self.assertEqual( - param, - self._make_one( - "foo", - _make_subparam("bar", "STRING", "abc"), - ArrayQueryParameter("baz", "INT64", [123, 456]), - ), - ) - - def test_from_api_repr_w_nested_struct(self): - RESOURCE = { - "name": "foo", - "parameterType": { - "type": "STRUCT", - "structTypes": [ - {"name": "bar", "type": {"type": "STRING"}}, - { - "name": "baz", - "type": { - "type": "STRUCT", - "structTypes": [ - {"name": "qux", "type": {"type": "INT64"}}, - {"name": "spam", "type": {"type": "BOOL"}}, - ], - }, - }, - ], - }, - "parameterValue": { - "structValues": { - "bar": {"value": "abc"}, - "baz": { - "structValues": { - "qux": {"value": "123"}, - "spam": {"value": "true"}, - } - }, - } - }, - } - - klass = self._get_target_class() - param = klass.from_api_repr(RESOURCE) - - expected = self._make_one( - "foo", - _make_subparam("bar", "STRING", "abc"), - self._make_one( - "baz", - _make_subparam("qux", "INT64", 123), - _make_subparam("spam", "BOOL", True), - ), - ) - self.assertEqual(param.name, "foo") - self.assertEqual(param.struct_types, expected.struct_types) - self.assertEqual(param.struct_values, expected.struct_values) - - def test_to_api_repr_w_name(self): - EXPECTED = { - "name": "foo", - "parameterType": { - "type": "STRUCT", - "structTypes": [ - {"name": "bar", "type": {"type": "INT64"}}, - {"name": "baz", "type": {"type": "STRING"}}, - ], - }, - "parameterValue": { - "structValues": {"bar": {"value": "123"}, "baz": {"value": "abc"}} - }, - } - sub_1 = _make_subparam("bar", "INT64", 123) - sub_2 = _make_subparam("baz", "STRING", "abc") - param = self._make_one("foo", sub_1, sub_2) - self.assertEqual(param.to_api_repr(), EXPECTED) - - def test_to_api_repr_wo_name(self): - EXPECTED = { - "parameterType": { - "type": "STRUCT", - "structTypes": [ - {"name": "bar", "type": {"type": "INT64"}}, - {"name": "baz", "type": {"type": "STRING"}}, - ], - }, - "parameterValue": { - "structValues": {"bar": {"value": "123"}, "baz": {"value": "abc"}} - }, - } - sub_1 = _make_subparam("bar", "INT64", 123) - sub_2 = _make_subparam("baz", "STRING", "abc") - klass = self._get_target_class() - param = klass.positional(sub_1, sub_2) - self.assertEqual(param.to_api_repr(), EXPECTED) - - def test_to_api_repr_w_nested_array(self): - from google.cloud.bigquery.query import ArrayQueryParameter - - EXPECTED = { - "name": "foo", - "parameterType": { - "type": "STRUCT", - "structTypes": [ - {"name": "bar", "type": {"type": "STRING"}}, - { - "name": "baz", - "type": {"type": "ARRAY", "arrayType": {"type": "INT64"}}, - }, - ], - }, - "parameterValue": { - "structValues": { - "bar": {"value": "abc"}, - "baz": {"arrayValues": [{"value": "123"}, {"value": "456"}]}, - } - }, - } - scalar = _make_subparam("bar", "STRING", "abc") - array = ArrayQueryParameter("baz", "INT64", [123, 456]) - param = self._make_one("foo", scalar, array) - self.assertEqual(param.to_api_repr(), EXPECTED) - - def test_to_api_repr_w_nested_struct(self): - EXPECTED = { - "name": "foo", - "parameterType": { - "type": "STRUCT", - "structTypes": [ - {"name": "bar", "type": {"type": "STRING"}}, - { - "name": "baz", - "type": { - "type": "STRUCT", - "structTypes": [ - {"name": "qux", "type": {"type": "INT64"}}, - {"name": "spam", "type": {"type": "BOOL"}}, - ], - }, - }, - ], - }, - "parameterValue": { - "structValues": { - "bar": {"value": "abc"}, - "baz": { - "structValues": { - "qux": {"value": "123"}, - "spam": {"value": "true"}, - } - }, - } - }, - } - scalar_1 = _make_subparam("bar", "STRING", "abc") - scalar_2 = _make_subparam("qux", "INT64", 123) - scalar_3 = _make_subparam("spam", "BOOL", True) - sub = self._make_one("baz", scalar_2, scalar_3) - param = self._make_one("foo", scalar_1, sub) - self.assertEqual(param.to_api_repr(), EXPECTED) - - def test___eq___wrong_type(self): - field = self._make_one("test", _make_subparam("bar", "STRING", "abc")) - other = object() - self.assertNotEqual(field, other) - self.assertEqual(field, mock.ANY) - - def test___eq___name_mismatch(self): - field = self._make_one("test", _make_subparam("bar", "STRING", "abc")) - other = self._make_one("other ", _make_subparam("bar", "STRING", "abc")) - self.assertNotEqual(field, other) - - def test___eq___field_type_mismatch(self): - field = self._make_one("test", _make_subparam("bar", "STRING", None)) - other = self._make_one("test", _make_subparam("bar", "INT64", None)) - self.assertNotEqual(field, other) - - def test___eq___value_mismatch(self): - field = self._make_one("test", _make_subparam("bar", "STRING", "hello")) - other = self._make_one("test", _make_subparam("bar", "STRING", "world")) - self.assertNotEqual(field, other) - - def test___eq___hit(self): - field = self._make_one("test", _make_subparam("bar", "STRING", "gotcha")) - other = self._make_one("test", _make_subparam("bar", "STRING", "gotcha")) - self.assertEqual(field, other) - - def test___ne___wrong_type(self): - field = self._make_one("test", _make_subparam("bar", "STRING", "hello")) - other = object() - self.assertNotEqual(field, other) - self.assertEqual(field, mock.ANY) - - def test___ne___same_value(self): - field1 = self._make_one("test", _make_subparam("bar", "STRING", "hello")) - field2 = self._make_one("test", _make_subparam("bar", "STRING", "hello")) - # unittest ``assertEqual`` uses ``==`` not ``!=``. - comparison_val = field1 != field2 - self.assertFalse(comparison_val) - - def test___ne___different_values(self): - field1 = self._make_one("test", _make_subparam("bar", "STRING", "hello")) - field2 = self._make_one("test", _make_subparam("bar", "STRING", "world")) - self.assertNotEqual(field1, field2) - - def test___repr__(self): - field1 = self._make_one("test", _make_subparam("field1", "STRING", "hello")) - got = repr(field1) - self.assertIn("StructQueryParameter", got) - self.assertIn("'field1', 'STRING'", got) - self.assertIn("'field1': 'hello'", got) - - -class Test_QueryResults(unittest.TestCase): - PROJECT = "project" - JOB_ID = "test-synchronous-query" - TOKEN = "TOKEN" - - @staticmethod - def _get_target_class(): - from google.cloud.bigquery.query import _QueryResults - - return _QueryResults - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def _make_resource(self): - return {"jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}} - - def _verifySchema(self, query, resource): - from google.cloud.bigquery.schema import SchemaField - - if "schema" in resource: - fields = resource["schema"]["fields"] - self.assertEqual(len(query.schema), len(fields)) - for found, expected in zip(query.schema, fields): - self.assertIsInstance(found, SchemaField) - self.assertEqual(found.name, expected["name"]) - self.assertEqual(found.field_type, expected["type"]) - self.assertEqual(found.mode, expected["mode"]) - self.assertEqual(found.description, expected.get("description")) - self.assertEqual(found.fields, expected.get("fields", ())) - else: - self.assertEqual(query.schema, ()) - - def test_ctor_defaults(self): - query = self._make_one(self._make_resource()) - self.assertIsNone(query.cache_hit) - self.assertIsNone(query.complete) - self.assertIsNone(query.errors) - self.assertIsNone(query.page_token) - self.assertEqual(query.project, self.PROJECT) - self.assertEqual(query.rows, []) - self.assertEqual(query.schema, ()) - self.assertIsNone(query.total_rows) - self.assertIsNone(query.total_bytes_processed) - - def test_cache_hit_missing(self): - query = self._make_one(self._make_resource()) - self.assertIsNone(query.cache_hit) - - def test_cache_hit_present(self): - resource = self._make_resource() - resource["cacheHit"] = True - query = self._make_one(resource) - self.assertTrue(query.cache_hit) - - def test_complete_missing(self): - query = self._make_one(self._make_resource()) - self.assertIsNone(query.complete) - - def test_complete_present(self): - resource = self._make_resource() - resource["jobComplete"] = True - query = self._make_one(resource) - self.assertTrue(query.complete) - - def test_errors_missing(self): - query = self._make_one(self._make_resource()) - self.assertIsNone(query.errors) - - def test_errors_present(self): - ERRORS = [{"reason": "testing"}] - resource = self._make_resource() - resource["errors"] = ERRORS - query = self._make_one(resource) - self.assertEqual(query.errors, ERRORS) - - def test_job_id_missing(self): - with self.assertRaises(ValueError): - self._make_one({}) - - def test_job_id_broken_job_reference(self): - resource = {"jobReference": {"bogus": "BOGUS"}} - with self.assertRaises(ValueError): - self._make_one(resource) - - def test_job_id_present(self): - resource = self._make_resource() - resource["jobReference"]["jobId"] = "custom-job" - query = self._make_one(resource) - self.assertEqual(query.job_id, "custom-job") - - def test_page_token_missing(self): - query = self._make_one(self._make_resource()) - self.assertIsNone(query.page_token) - - def test_page_token_present(self): - resource = self._make_resource() - resource["pageToken"] = "TOKEN" - query = self._make_one(resource) - self.assertEqual(query.page_token, "TOKEN") - - def test_total_rows_present_integer(self): - resource = self._make_resource() - resource["totalRows"] = 42 - query = self._make_one(resource) - self.assertEqual(query.total_rows, 42) - - def test_total_rows_present_string(self): - resource = self._make_resource() - resource["totalRows"] = "42" - query = self._make_one(resource) - self.assertEqual(query.total_rows, 42) - - def test_total_bytes_processed_missing(self): - query = self._make_one(self._make_resource()) - self.assertIsNone(query.total_bytes_processed) - - def test_total_bytes_processed_present_integer(self): - resource = self._make_resource() - resource["totalBytesProcessed"] = 123456 - query = self._make_one(resource) - self.assertEqual(query.total_bytes_processed, 123456) - - def test_total_bytes_processed_present_string(self): - resource = self._make_resource() - resource["totalBytesProcessed"] = "123456" - query = self._make_one(resource) - self.assertEqual(query.total_bytes_processed, 123456) - - def test_num_dml_affected_rows_missing(self): - query = self._make_one(self._make_resource()) - self.assertIsNone(query.num_dml_affected_rows) - - def test_num_dml_affected_rows_present_integer(self): - resource = self._make_resource() - resource["numDmlAffectedRows"] = 123456 - query = self._make_one(resource) - self.assertEqual(query.num_dml_affected_rows, 123456) - - def test_num_dml_affected_rows_present_string(self): - resource = self._make_resource() - resource["numDmlAffectedRows"] = "123456" - query = self._make_one(resource) - self.assertEqual(query.num_dml_affected_rows, 123456) - - def test_schema(self): - query = self._make_one(self._make_resource()) - self._verifySchema(query, self._make_resource()) - resource = self._make_resource() - resource["schema"] = { - "fields": [ - {"name": "full_name", "type": "STRING", "mode": "REQURED"}, - {"name": "age", "type": "INTEGER", "mode": "REQURED"}, - ] - } - query._set_properties(resource) - self._verifySchema(query, resource) - - -class Test__query_param_from_api_repr(unittest.TestCase): - @staticmethod - def _call_fut(resource): - from google.cloud.bigquery.query import _query_param_from_api_repr - - return _query_param_from_api_repr(resource) - - def test_w_scalar(self): - from google.cloud.bigquery.query import ScalarQueryParameter - - RESOURCE = { - "name": "foo", - "parameterType": {"type": "INT64"}, - "parameterValue": {"value": "123"}, - } - - parameter = self._call_fut(RESOURCE) - - self.assertIsInstance(parameter, ScalarQueryParameter) - self.assertEqual(parameter.name, "foo") - self.assertEqual(parameter.type_, "INT64") - self.assertEqual(parameter.value, 123) - - def test_w_scalar_timestamp(self): - from google.cloud._helpers import UTC - from google.cloud.bigquery.query import ScalarQueryParameter - - RESOURCE = { - "name": "zoned", - "parameterType": {"type": "TIMESTAMP"}, - "parameterValue": {"value": "2012-03-04 05:06:07+00:00"}, - } - - parameter = self._call_fut(RESOURCE) - - self.assertIsInstance(parameter, ScalarQueryParameter) - self.assertEqual(parameter.name, "zoned") - self.assertEqual(parameter.type_, "TIMESTAMP") - self.assertEqual( - parameter.value, datetime.datetime(2012, 3, 4, 5, 6, 7, tzinfo=UTC) - ) - - def test_w_scalar_timestamp_micros(self): - from google.cloud._helpers import UTC - from google.cloud.bigquery.query import ScalarQueryParameter - - RESOURCE = { - "name": "zoned", - "parameterType": {"type": "TIMESTAMP"}, - "parameterValue": {"value": "2012-03-04 05:06:07.250000+00:00"}, - } - - parameter = self._call_fut(RESOURCE) - - self.assertIsInstance(parameter, ScalarQueryParameter) - self.assertEqual(parameter.name, "zoned") - self.assertEqual(parameter.type_, "TIMESTAMP") - self.assertEqual( - parameter.value, datetime.datetime(2012, 3, 4, 5, 6, 7, 250000, tzinfo=UTC) - ) - - def test_w_array(self): - from google.cloud.bigquery.query import ArrayQueryParameter - - RESOURCE = { - "name": "foo", - "parameterType": {"type": "ARRAY", "arrayType": {"type": "INT64"}}, - "parameterValue": {"arrayValues": [{"value": "123"}]}, - } - - parameter = self._call_fut(RESOURCE) - - self.assertIsInstance(parameter, ArrayQueryParameter) - self.assertEqual(parameter.name, "foo") - self.assertEqual(parameter.array_type, "INT64") - self.assertEqual(parameter.values, [123]) - - def test_w_struct(self): - from google.cloud.bigquery.query import StructQueryParameter - - RESOURCE = { - "name": "foo", - "parameterType": { - "type": "STRUCT", - "structTypes": [ - {"name": "foo", "type": {"type": "STRING"}}, - {"name": "bar", "type": {"type": "INT64"}}, - ], - }, - "parameterValue": { - "structValues": {"foo": {"value": "Foo"}, "bar": {"value": "123"}} - }, - } - - parameter = self._call_fut(RESOURCE) - - self.assertIsInstance(parameter, StructQueryParameter) - self.assertEqual(parameter.name, "foo") - self.assertEqual(parameter.struct_types, {"foo": "STRING", "bar": "INT64"}) - self.assertEqual(parameter.struct_values, {"foo": "Foo", "bar": 123}) diff --git a/bigquery/tests/unit/test_retry.py b/bigquery/tests/unit/test_retry.py deleted file mode 100644 index d9f867cb30f7..000000000000 --- a/bigquery/tests/unit/test_retry.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - - -class Test_should_retry(unittest.TestCase): - def _call_fut(self, exc): - from google.cloud.bigquery.retry import _should_retry - - return _should_retry(exc) - - def test_wo_errors_attribute(self): - self.assertFalse(self._call_fut(object())) - - def test_w_empty_errors(self): - exc = mock.Mock(errors=[], spec=["errors"]) - self.assertFalse(self._call_fut(exc)) - - def test_w_non_matching_reason(self): - exc = mock.Mock(errors=[{"reason": "bogus"}], spec=["errors"]) - self.assertFalse(self._call_fut(exc)) - - def test_w_backendError(self): - exc = mock.Mock(errors=[{"reason": "backendError"}], spec=["errors"]) - self.assertTrue(self._call_fut(exc)) - - def test_w_rateLimitExceeded(self): - exc = mock.Mock(errors=[{"reason": "rateLimitExceeded"}], spec=["errors"]) - self.assertTrue(self._call_fut(exc)) - - def test_w_unstructured_too_many_requests(self): - from google.api_core.exceptions import TooManyRequests - - exc = TooManyRequests("testing") - self.assertTrue(self._call_fut(exc)) - - def test_w_internalError(self): - exc = mock.Mock(errors=[{"reason": "internalError"}], spec=["errors"]) - self.assertTrue(self._call_fut(exc)) - - def test_w_unstructured_internal_server_error(self): - from google.api_core.exceptions import InternalServerError - - exc = InternalServerError("testing") - self.assertTrue(self._call_fut(exc)) - - def test_w_badGateway(self): - exc = mock.Mock(errors=[{"reason": "badGateway"}], spec=["errors"]) - self.assertTrue(self._call_fut(exc)) - - def test_w_unstructured_bad_gateway(self): - from google.api_core.exceptions import BadGateway - - exc = BadGateway("testing") - self.assertTrue(self._call_fut(exc)) diff --git a/bigquery/tests/unit/test_schema.py b/bigquery/tests/unit/test_schema.py deleted file mode 100644 index e1bdd7b2fb73..000000000000 --- a/bigquery/tests/unit/test_schema.py +++ /dev/null @@ -1,634 +0,0 @@ -# Copyright 2015 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - - -class TestSchemaField(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigquery.schema import SchemaField - - return SchemaField - - @staticmethod - def _get_standard_sql_data_type_class(): - from google.cloud.bigquery_v2 import types - - return types.StandardSqlDataType - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_constructor_defaults(self): - field = self._make_one("test", "STRING") - self.assertEqual(field._name, "test") - self.assertEqual(field._field_type, "STRING") - self.assertEqual(field._mode, "NULLABLE") - self.assertIsNone(field._description) - self.assertEqual(field._fields, ()) - - def test_constructor_explicit(self): - field = self._make_one("test", "STRING", mode="REQUIRED", description="Testing") - self.assertEqual(field._name, "test") - self.assertEqual(field._field_type, "STRING") - self.assertEqual(field._mode, "REQUIRED") - self.assertEqual(field._description, "Testing") - self.assertEqual(field._fields, ()) - - def test_constructor_subfields(self): - sub_field1 = self._make_one("area_code", "STRING") - sub_field2 = self._make_one("local_number", "STRING") - field = self._make_one( - "phone_number", "RECORD", fields=[sub_field1, sub_field2] - ) - self.assertEqual(field._name, "phone_number") - self.assertEqual(field._field_type, "RECORD") - self.assertEqual(field._mode, "NULLABLE") - self.assertIsNone(field._description) - self.assertEqual(len(field._fields), 2) - self.assertIs(field._fields[0], sub_field1) - self.assertIs(field._fields[1], sub_field2) - - def test_to_api_repr(self): - field = self._make_one("foo", "INTEGER", "NULLABLE") - self.assertEqual( - field.to_api_repr(), - {"mode": "NULLABLE", "name": "foo", "type": "INTEGER", "description": None}, - ) - - def test_to_api_repr_with_subfield(self): - for record_type in ("RECORD", "STRUCT"): - subfield = self._make_one("bar", "INTEGER", "NULLABLE") - field = self._make_one("foo", record_type, "REQUIRED", fields=(subfield,)) - self.assertEqual( - field.to_api_repr(), - { - "fields": [ - { - "mode": "NULLABLE", - "name": "bar", - "type": "INTEGER", - "description": None, - } - ], - "mode": "REQUIRED", - "name": "foo", - "type": record_type, - "description": None, - }, - ) - - def test_from_api_repr(self): - field = self._get_target_class().from_api_repr( - { - "fields": [{"mode": "nullable", "name": "bar", "type": "integer"}], - "mode": "required", - "description": "test_description", - "name": "foo", - "type": "record", - } - ) - self.assertEqual(field.name, "foo") - self.assertEqual(field.field_type, "RECORD") - self.assertEqual(field.mode, "REQUIRED") - self.assertEqual(field.description, "test_description") - self.assertEqual(len(field.fields), 1) - self.assertEqual(field.fields[0].name, "bar") - self.assertEqual(field.fields[0].field_type, "INTEGER") - self.assertEqual(field.fields[0].mode, "NULLABLE") - - def test_from_api_repr_defaults(self): - field = self._get_target_class().from_api_repr( - {"name": "foo", "type": "record"} - ) - self.assertEqual(field.name, "foo") - self.assertEqual(field.field_type, "RECORD") - self.assertEqual(field.mode, "NULLABLE") - self.assertEqual(field.description, None) - self.assertEqual(len(field.fields), 0) - - def test_name_property(self): - name = "lemon-ness" - schema_field = self._make_one(name, "INTEGER") - self.assertIs(schema_field.name, name) - - def test_field_type_property(self): - field_type = "BOOLEAN" - schema_field = self._make_one("whether", field_type) - self.assertIs(schema_field.field_type, field_type) - - def test_mode_property(self): - mode = "REPEATED" - schema_field = self._make_one("again", "FLOAT", mode=mode) - self.assertIs(schema_field.mode, mode) - - def test_is_nullable(self): - mode = "NULLABLE" - schema_field = self._make_one("test", "FLOAT", mode=mode) - self.assertTrue(schema_field.is_nullable) - - def test_is_not_nullable(self): - mode = "REPEATED" - schema_field = self._make_one("test", "FLOAT", mode=mode) - self.assertFalse(schema_field.is_nullable) - - def test_description_property(self): - description = "It holds some data." - schema_field = self._make_one("do", "TIMESTAMP", description=description) - self.assertIs(schema_field.description, description) - - def test_fields_property(self): - sub_field1 = self._make_one("one", "STRING") - sub_field2 = self._make_one("fish", "INTEGER") - fields = (sub_field1, sub_field2) - schema_field = self._make_one("boat", "RECORD", fields=fields) - self.assertIs(schema_field.fields, fields) - - def test_to_standard_sql_simple_type(self): - sql_type = self._get_standard_sql_data_type_class() - examples = ( - # a few legacy types - ("INTEGER", sql_type.INT64), - ("FLOAT", sql_type.FLOAT64), - ("BOOLEAN", sql_type.BOOL), - ("DATETIME", sql_type.DATETIME), - # a few standard types - ("INT64", sql_type.INT64), - ("FLOAT64", sql_type.FLOAT64), - ("BOOL", sql_type.BOOL), - ("GEOGRAPHY", sql_type.GEOGRAPHY), - ) - for legacy_type, standard_type in examples: - field = self._make_one("some_field", legacy_type) - standard_field = field.to_standard_sql() - self.assertEqual(standard_field.name, "some_field") - self.assertEqual(standard_field.type.type_kind, standard_type) - - def test_to_standard_sql_struct_type(self): - from google.cloud.bigquery_v2 import types - - # Expected result object: - # - # name: "image_usage" - # type { - # type_kind: STRUCT - # struct_type { - # fields { - # name: "image_content" - # type {type_kind: BYTES} - # } - # fields { - # name: "last_used" - # type { - # type_kind: STRUCT - # struct_type { - # fields { - # name: "date_field" - # type {type_kind: DATE} - # } - # fields { - # name: "time_field" - # type {type_kind: TIME} - # } - # } - # } - # } - # } - # } - - sql_type = self._get_standard_sql_data_type_class() - - # level 2 fields - sub_sub_field_date = types.StandardSqlField( - name="date_field", type=sql_type(type_kind=sql_type.DATE) - ) - sub_sub_field_time = types.StandardSqlField( - name="time_field", type=sql_type(type_kind=sql_type.TIME) - ) - - # level 1 fields - sub_field_struct = types.StandardSqlField( - name="last_used", type=sql_type(type_kind=sql_type.STRUCT) - ) - sub_field_struct.type.struct_type.fields.extend( - [sub_sub_field_date, sub_sub_field_time] - ) - sub_field_bytes = types.StandardSqlField( - name="image_content", type=sql_type(type_kind=sql_type.BYTES) - ) - - # level 0 (top level) - expected_result = types.StandardSqlField( - name="image_usage", type=sql_type(type_kind=sql_type.STRUCT) - ) - expected_result.type.struct_type.fields.extend( - [sub_field_bytes, sub_field_struct] - ) - - # construct legacy SchemaField object - sub_sub_field1 = self._make_one("date_field", "DATE") - sub_sub_field2 = self._make_one("time_field", "TIME") - sub_field_record = self._make_one( - "last_used", "RECORD", fields=(sub_sub_field1, sub_sub_field2) - ) - sub_field_bytes = self._make_one("image_content", "BYTES") - - for type_name in ("RECORD", "STRUCT"): - schema_field = self._make_one( - "image_usage", type_name, fields=(sub_field_bytes, sub_field_record) - ) - standard_field = schema_field.to_standard_sql() - self.assertEqual(standard_field, expected_result) - - def test_to_standard_sql_array_type_simple(self): - from google.cloud.bigquery_v2 import types - - sql_type = self._get_standard_sql_data_type_class() - - # construct expected result object - expected_sql_type = sql_type(type_kind=sql_type.ARRAY) - expected_sql_type.array_element_type.type_kind = sql_type.INT64 - expected_result = types.StandardSqlField( - name="valid_numbers", type=expected_sql_type - ) - - # construct "repeated" SchemaField object and convert to standard SQL - schema_field = self._make_one("valid_numbers", "INT64", mode="REPEATED") - standard_field = schema_field.to_standard_sql() - - self.assertEqual(standard_field, expected_result) - - def test_to_standard_sql_array_type_struct(self): - from google.cloud.bigquery_v2 import types - - sql_type = self._get_standard_sql_data_type_class() - - # define person STRUCT - name_field = types.StandardSqlField( - name="name", type=sql_type(type_kind=sql_type.STRING) - ) - age_field = types.StandardSqlField( - name="age", type=sql_type(type_kind=sql_type.INT64) - ) - person_struct = types.StandardSqlField( - name="person_info", type=sql_type(type_kind=sql_type.STRUCT) - ) - person_struct.type.struct_type.fields.extend([name_field, age_field]) - - # define expected result - an ARRAY of person structs - expected_sql_type = sql_type( - type_kind=sql_type.ARRAY, array_element_type=person_struct.type - ) - expected_result = types.StandardSqlField( - name="known_people", type=expected_sql_type - ) - - # construct legacy repeated SchemaField object - sub_field1 = self._make_one("name", "STRING") - sub_field2 = self._make_one("age", "INTEGER") - schema_field = self._make_one( - "known_people", "RECORD", fields=(sub_field1, sub_field2), mode="REPEATED" - ) - - standard_field = schema_field.to_standard_sql() - self.assertEqual(standard_field, expected_result) - - def test_to_standard_sql_unknown_type(self): - sql_type = self._get_standard_sql_data_type_class() - field = self._make_one("weird_field", "TROOLEAN") - - standard_field = field.to_standard_sql() - - self.assertEqual(standard_field.name, "weird_field") - self.assertEqual(standard_field.type.type_kind, sql_type.TYPE_KIND_UNSPECIFIED) - - def test___eq___wrong_type(self): - field = self._make_one("test", "STRING") - other = object() - self.assertNotEqual(field, other) - self.assertEqual(field, mock.ANY) - - def test___eq___name_mismatch(self): - field = self._make_one("test", "STRING") - other = self._make_one("other", "STRING") - self.assertNotEqual(field, other) - - def test___eq___field_type_mismatch(self): - field = self._make_one("test", "STRING") - other = self._make_one("test", "INTEGER") - self.assertNotEqual(field, other) - - def test___eq___mode_mismatch(self): - field = self._make_one("test", "STRING", mode="REQUIRED") - other = self._make_one("test", "STRING", mode="NULLABLE") - self.assertNotEqual(field, other) - - def test___eq___description_mismatch(self): - field = self._make_one("test", "STRING", description="Testing") - other = self._make_one("test", "STRING", description="Other") - self.assertNotEqual(field, other) - - def test___eq___fields_mismatch(self): - sub1 = self._make_one("sub1", "STRING") - sub2 = self._make_one("sub2", "STRING") - field = self._make_one("test", "RECORD", fields=[sub1]) - other = self._make_one("test", "RECORD", fields=[sub2]) - self.assertNotEqual(field, other) - - def test___eq___hit(self): - field = self._make_one("test", "STRING", mode="REQUIRED", description="Testing") - other = self._make_one("test", "STRING", mode="REQUIRED", description="Testing") - self.assertEqual(field, other) - - def test___eq___hit_case_diff_on_type(self): - field = self._make_one("test", "STRING", mode="REQUIRED", description="Testing") - other = self._make_one("test", "string", mode="REQUIRED", description="Testing") - self.assertEqual(field, other) - - def test___eq___hit_w_fields(self): - sub1 = self._make_one("sub1", "STRING") - sub2 = self._make_one("sub2", "STRING") - field = self._make_one("test", "RECORD", fields=[sub1, sub2]) - other = self._make_one("test", "RECORD", fields=[sub1, sub2]) - self.assertEqual(field, other) - - def test___ne___wrong_type(self): - field = self._make_one("toast", "INTEGER") - other = object() - self.assertNotEqual(field, other) - self.assertEqual(field, mock.ANY) - - def test___ne___same_value(self): - field1 = self._make_one("test", "TIMESTAMP", mode="REPEATED") - field2 = self._make_one("test", "TIMESTAMP", mode="REPEATED") - # unittest ``assertEqual`` uses ``==`` not ``!=``. - comparison_val = field1 != field2 - self.assertFalse(comparison_val) - - def test___ne___different_values(self): - field1 = self._make_one( - "test1", "FLOAT", mode="REPEATED", description="Not same" - ) - field2 = self._make_one( - "test2", "FLOAT", mode="NULLABLE", description="Knot saym" - ) - self.assertNotEqual(field1, field2) - - def test___hash__set_equality(self): - sub1 = self._make_one("sub1", "STRING") - sub2 = self._make_one("sub2", "STRING") - field1 = self._make_one("test", "RECORD", fields=[sub1]) - field2 = self._make_one("test", "RECORD", fields=[sub2]) - set_one = {field1, field2} - set_two = {field1, field2} - self.assertEqual(set_one, set_two) - - def test___hash__not_equals(self): - sub1 = self._make_one("sub1", "STRING") - sub2 = self._make_one("sub2", "STRING") - field1 = self._make_one("test", "RECORD", fields=[sub1]) - field2 = self._make_one("test", "RECORD", fields=[sub2]) - set_one = {field1} - set_two = {field2} - self.assertNotEqual(set_one, set_two) - - def test___repr__(self): - field1 = self._make_one("field1", "STRING") - expected = "SchemaField('field1', 'STRING', 'NULLABLE', None, ())" - self.assertEqual(repr(field1), expected) - - -# TODO: dedup with the same class in test_table.py. -class _SchemaBase(object): - def _verify_field(self, field, r_field): - self.assertEqual(field.name, r_field["name"]) - self.assertEqual(field.field_type, r_field["type"]) - self.assertEqual(field.mode, r_field.get("mode", "NULLABLE")) - - def _verifySchema(self, schema, resource): - r_fields = resource["schema"]["fields"] - self.assertEqual(len(schema), len(r_fields)) - - for field, r_field in zip(schema, r_fields): - self._verify_field(field, r_field) - - -class Test_parse_schema_resource(unittest.TestCase, _SchemaBase): - def _call_fut(self, resource): - from google.cloud.bigquery.schema import _parse_schema_resource - - return _parse_schema_resource(resource) - - def _make_resource(self): - return { - "schema": { - "fields": [ - {"name": "full_name", "type": "STRING", "mode": "REQUIRED"}, - {"name": "age", "type": "INTEGER", "mode": "REQUIRED"}, - ] - } - } - - def test__parse_schema_resource_defaults(self): - RESOURCE = self._make_resource() - schema = self._call_fut(RESOURCE["schema"]) - self._verifySchema(schema, RESOURCE) - - def test__parse_schema_resource_subfields(self): - RESOURCE = self._make_resource() - RESOURCE["schema"]["fields"].append( - { - "name": "phone", - "type": "RECORD", - "mode": "REPEATED", - "fields": [ - {"name": "type", "type": "STRING", "mode": "REQUIRED"}, - {"name": "number", "type": "STRING", "mode": "REQUIRED"}, - ], - } - ) - schema = self._call_fut(RESOURCE["schema"]) - self._verifySchema(schema, RESOURCE) - - def test__parse_schema_resource_fields_without_mode(self): - RESOURCE = self._make_resource() - RESOURCE["schema"]["fields"].append({"name": "phone", "type": "STRING"}) - - schema = self._call_fut(RESOURCE["schema"]) - self._verifySchema(schema, RESOURCE) - - -class Test_build_schema_resource(unittest.TestCase, _SchemaBase): - def _call_fut(self, resource): - from google.cloud.bigquery.schema import _build_schema_resource - - return _build_schema_resource(resource) - - def test_defaults(self): - from google.cloud.bigquery.schema import SchemaField - - full_name = SchemaField("full_name", "STRING", mode="REQUIRED") - age = SchemaField("age", "INTEGER", mode="REQUIRED") - resource = self._call_fut([full_name, age]) - self.assertEqual(len(resource), 2) - self.assertEqual( - resource[0], - { - "name": "full_name", - "type": "STRING", - "mode": "REQUIRED", - "description": None, - }, - ) - self.assertEqual( - resource[1], - {"name": "age", "type": "INTEGER", "mode": "REQUIRED", "description": None}, - ) - - def test_w_description(self): - from google.cloud.bigquery.schema import SchemaField - - DESCRIPTION = "DESCRIPTION" - full_name = SchemaField( - "full_name", "STRING", mode="REQUIRED", description=DESCRIPTION - ) - age = SchemaField("age", "INTEGER", mode="REQUIRED") - resource = self._call_fut([full_name, age]) - self.assertEqual(len(resource), 2) - self.assertEqual( - resource[0], - { - "name": "full_name", - "type": "STRING", - "mode": "REQUIRED", - "description": DESCRIPTION, - }, - ) - self.assertEqual( - resource[1], - {"name": "age", "type": "INTEGER", "mode": "REQUIRED", "description": None}, - ) - - def test_w_subfields(self): - from google.cloud.bigquery.schema import SchemaField - - full_name = SchemaField("full_name", "STRING", mode="REQUIRED") - ph_type = SchemaField("type", "STRING", "REQUIRED") - ph_num = SchemaField("number", "STRING", "REQUIRED") - phone = SchemaField( - "phone", "RECORD", mode="REPEATED", fields=[ph_type, ph_num] - ) - resource = self._call_fut([full_name, phone]) - self.assertEqual(len(resource), 2) - self.assertEqual( - resource[0], - { - "name": "full_name", - "type": "STRING", - "mode": "REQUIRED", - "description": None, - }, - ) - self.assertEqual( - resource[1], - { - "name": "phone", - "type": "RECORD", - "mode": "REPEATED", - "description": None, - "fields": [ - { - "name": "type", - "type": "STRING", - "mode": "REQUIRED", - "description": None, - }, - { - "name": "number", - "type": "STRING", - "mode": "REQUIRED", - "description": None, - }, - ], - }, - ) - - -class Test_to_schema_fields(unittest.TestCase): - @staticmethod - def _call_fut(schema): - from google.cloud.bigquery.schema import _to_schema_fields - - return _to_schema_fields(schema) - - def test_invalid_type(self): - schema = [ - ("full_name", "STRING", "REQUIRED"), - ("address", "STRING", "REQUIRED"), - ] - with self.assertRaises(ValueError): - self._call_fut(schema) - - def test_schema_fields_sequence(self): - from google.cloud.bigquery.schema import SchemaField - - schema = [ - SchemaField("full_name", "STRING", mode="REQUIRED"), - SchemaField("age", "INT64", mode="NULLABLE"), - ] - result = self._call_fut(schema) - self.assertEqual(result, schema) - - def test_invalid_mapping_representation(self): - schema = [ - {"name": "full_name", "type": "STRING", "mode": "REQUIRED"}, - {"name": "address", "typeooo": "STRING", "mode": "REQUIRED"}, - ] - with self.assertRaises(Exception): - self._call_fut(schema) - - def test_valid_mapping_representation(self): - from google.cloud.bigquery.schema import SchemaField - - schema = [ - {"name": "full_name", "type": "STRING", "mode": "REQUIRED"}, - { - "name": "residence", - "type": "STRUCT", - "mode": "NULLABLE", - "fields": [ - {"name": "foo", "type": "DATE", "mode": "NULLABLE"}, - {"name": "bar", "type": "BYTES", "mode": "REQUIRED"}, - ], - }, - ] - - expected_schema = [ - SchemaField("full_name", "STRING", mode="REQUIRED"), - SchemaField( - "residence", - "STRUCT", - mode="NULLABLE", - fields=[ - SchemaField("foo", "DATE", mode="NULLABLE"), - SchemaField("bar", "BYTES", mode="REQUIRED"), - ], - ), - ] - - result = self._call_fut(schema) - self.assertEqual(result, expected_schema) diff --git a/bigquery/tests/unit/test_signature_compatibility.py b/bigquery/tests/unit/test_signature_compatibility.py deleted file mode 100644 index 6002ae3e87c9..000000000000 --- a/bigquery/tests/unit/test_signature_compatibility.py +++ /dev/null @@ -1,51 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import inspect - -import pytest - - -@pytest.fixture -def query_job_class(): - from google.cloud.bigquery.job import QueryJob - - return QueryJob - - -@pytest.fixture -def row_iterator_class(): - from google.cloud.bigquery.table import RowIterator - - return RowIterator - - -@pytest.mark.skipif( - not hasattr(inspect, "signature"), - reason="inspect.signature() is not availalbe in older Python versions", -) -def test_to_arrow_method_signatures_match(query_job_class, row_iterator_class): - sig = inspect.signature(query_job_class.to_arrow) - sig2 = inspect.signature(row_iterator_class.to_arrow) - assert sig == sig2 - - -@pytest.mark.skipif( - not hasattr(inspect, "signature"), - reason="inspect.signature() is not availalbe in older Python versions", -) -def test_to_dataframe_method_signatures_match(query_job_class, row_iterator_class): - sig = inspect.signature(query_job_class.to_dataframe) - sig2 = inspect.signature(row_iterator_class.to_dataframe) - assert sig == sig2 diff --git a/bigquery/tests/unit/test_table.py b/bigquery/tests/unit/test_table.py deleted file mode 100644 index 079ec6e000d3..000000000000 --- a/bigquery/tests/unit/test_table.py +++ /dev/null @@ -1,3746 +0,0 @@ -# Copyright 2015 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import itertools -import logging -import time -import unittest -import warnings - -import mock -import pytest -import six - -import google.api_core.exceptions - -try: - from google.cloud import bigquery_storage_v1beta1 - from google.cloud.bigquery_storage_v1beta1.gapic.transports import ( - big_query_storage_grpc_transport, - ) -except ImportError: # pragma: NO COVER - bigquery_storage_v1beta1 = None - big_query_storage_grpc_transport = None - -try: - import pandas -except (ImportError, AttributeError): # pragma: NO COVER - pandas = None - -try: - import pyarrow - import pyarrow.types -except ImportError: # pragma: NO COVER - pyarrow = None - -try: - from tqdm import tqdm -except (ImportError, AttributeError): # pragma: NO COVER - tqdm = None - -from google.cloud.bigquery.dataset import DatasetReference - - -def _mock_client(): - from google.cloud.bigquery import client - - mock_client = mock.create_autospec(client.Client) - mock_client.project = "my-project" - return mock_client - - -class _SchemaBase(object): - def _verify_field(self, field, r_field): - self.assertEqual(field.name, r_field["name"]) - self.assertEqual(field.field_type, r_field["type"]) - self.assertEqual(field.mode, r_field.get("mode", "NULLABLE")) - - def _verifySchema(self, schema, resource): - r_fields = resource["schema"]["fields"] - self.assertEqual(len(schema), len(r_fields)) - - for field, r_field in zip(schema, r_fields): - self._verify_field(field, r_field) - - -class TestEncryptionConfiguration(unittest.TestCase): - KMS_KEY_NAME = "projects/1/locations/us/keyRings/1/cryptoKeys/1" - - @staticmethod - def _get_target_class(): - from google.cloud.bigquery.table import EncryptionConfiguration - - return EncryptionConfiguration - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_defaults(self): - encryption_config = self._make_one() - self.assertIsNone(encryption_config.kms_key_name) - - def test_ctor_with_key(self): - encryption_config = self._make_one(kms_key_name=self.KMS_KEY_NAME) - self.assertEqual(encryption_config.kms_key_name, self.KMS_KEY_NAME) - - -class TestTableReference(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigquery.table import TableReference - - return TableReference - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_defaults(self): - from google.cloud.bigquery.dataset import DatasetReference - - dataset_ref = DatasetReference("project_1", "dataset_1") - - table_ref = self._make_one(dataset_ref, "table_1") - self.assertEqual(table_ref.dataset_id, dataset_ref.dataset_id) - self.assertEqual(table_ref.table_id, "table_1") - - def test_to_api_repr(self): - from google.cloud.bigquery.dataset import DatasetReference - - dataset_ref = DatasetReference("project_1", "dataset_1") - table_ref = self._make_one(dataset_ref, "table_1") - - resource = table_ref.to_api_repr() - - self.assertEqual( - resource, - {"projectId": "project_1", "datasetId": "dataset_1", "tableId": "table_1"}, - ) - - def test_from_api_repr(self): - from google.cloud.bigquery.dataset import DatasetReference - from google.cloud.bigquery.table import TableReference - - dataset_ref = DatasetReference("project_1", "dataset_1") - expected = self._make_one(dataset_ref, "table_1") - - got = TableReference.from_api_repr( - {"projectId": "project_1", "datasetId": "dataset_1", "tableId": "table_1"} - ) - - self.assertEqual(expected, got) - - def test_from_string(self): - cls = self._get_target_class() - got = cls.from_string("string-project.string_dataset.string_table") - self.assertEqual(got.project, "string-project") - self.assertEqual(got.dataset_id, "string_dataset") - self.assertEqual(got.table_id, "string_table") - - def test_from_string_w_prefix(self): - cls = self._get_target_class() - got = cls.from_string("google.com:string-project.string_dataset.string_table") - self.assertEqual(got.project, "google.com:string-project") - self.assertEqual(got.dataset_id, "string_dataset") - self.assertEqual(got.table_id, "string_table") - - def test_from_string_legacy_string(self): - cls = self._get_target_class() - with self.assertRaises(ValueError): - cls.from_string("string-project:string_dataset.string_table") - - def test_from_string_w_incorrect_prefix(self): - cls = self._get_target_class() - with self.assertRaises(ValueError): - cls.from_string("google.com.string-project.string_dataset.string_table") - - def test_from_string_not_fully_qualified(self): - cls = self._get_target_class() - with self.assertRaises(ValueError): - cls.from_string("string_table") - - with self.assertRaises(ValueError): - cls.from_string("string_dataset.string_table") - - with self.assertRaises(ValueError): - cls.from_string("a.b.c.d") - - def test_from_string_with_default_project(self): - cls = self._get_target_class() - got = cls.from_string( - "string_dataset.string_table", default_project="default-project" - ) - self.assertEqual(got.project, "default-project") - self.assertEqual(got.dataset_id, "string_dataset") - self.assertEqual(got.table_id, "string_table") - - def test_from_string_ignores_default_project(self): - cls = self._get_target_class() - got = cls.from_string( - "string-project.string_dataset.string_table", - default_project="default-project", - ) - self.assertEqual(got.project, "string-project") - self.assertEqual(got.dataset_id, "string_dataset") - self.assertEqual(got.table_id, "string_table") - - def test___eq___wrong_type(self): - from google.cloud.bigquery.dataset import DatasetReference - - dataset_ref = DatasetReference("project_1", "dataset_1") - table = self._make_one(dataset_ref, "table_1") - other = object() - self.assertNotEqual(table, other) - self.assertEqual(table, mock.ANY) - - def test___eq___project_mismatch(self): - from google.cloud.bigquery.dataset import DatasetReference - - dataset = DatasetReference("project_1", "dataset_1") - other_dataset = DatasetReference("project_2", "dataset_1") - table = self._make_one(dataset, "table_1") - other = self._make_one(other_dataset, "table_1") - self.assertNotEqual(table, other) - - def test___eq___dataset_mismatch(self): - from google.cloud.bigquery.dataset import DatasetReference - - dataset = DatasetReference("project_1", "dataset_1") - other_dataset = DatasetReference("project_1", "dataset_2") - table = self._make_one(dataset, "table_1") - other = self._make_one(other_dataset, "table_1") - self.assertNotEqual(table, other) - - def test___eq___table_mismatch(self): - from google.cloud.bigquery.dataset import DatasetReference - - dataset = DatasetReference("project_1", "dataset_1") - table = self._make_one(dataset, "table_1") - other = self._make_one(dataset, "table_2") - self.assertNotEqual(table, other) - - def test___eq___equality(self): - from google.cloud.bigquery.dataset import DatasetReference - - dataset = DatasetReference("project_1", "dataset_1") - table = self._make_one(dataset, "table_1") - other = self._make_one(dataset, "table_1") - self.assertEqual(table, other) - - def test___hash__set_equality(self): - from google.cloud.bigquery.dataset import DatasetReference - - dataset = DatasetReference("project_1", "dataset_1") - table1 = self._make_one(dataset, "table1") - table2 = self._make_one(dataset, "table2") - set_one = {table1, table2} - set_two = {table1, table2} - self.assertEqual(set_one, set_two) - - def test___hash__not_equals(self): - from google.cloud.bigquery.dataset import DatasetReference - - dataset = DatasetReference("project_1", "dataset_1") - table1 = self._make_one(dataset, "table1") - table2 = self._make_one(dataset, "table2") - set_one = {table1} - set_two = {table2} - self.assertNotEqual(set_one, set_two) - - def test___repr__(self): - dataset = DatasetReference("project1", "dataset1") - table1 = self._make_one(dataset, "table1") - expected = ( - "TableReference(DatasetReference('project1', 'dataset1'), " "'table1')" - ) - self.assertEqual(repr(table1), expected) - - -class TestTable(unittest.TestCase, _SchemaBase): - - PROJECT = "prahj-ekt" - DS_ID = "dataset-name" - TABLE_NAME = "table-name" - KMS_KEY_NAME = "projects/1/locations/us/keyRings/1/cryptoKeys/1" - - @staticmethod - def _get_target_class(): - from google.cloud.bigquery.table import Table - - return Table - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def _setUpConstants(self): - import datetime - from google.cloud._helpers import UTC - - self.WHEN_TS = 1437767599.006 - self.WHEN = datetime.datetime.utcfromtimestamp(self.WHEN_TS).replace(tzinfo=UTC) - self.ETAG = "ETAG" - self.TABLE_FULL_ID = "%s:%s.%s" % (self.PROJECT, self.DS_ID, self.TABLE_NAME) - self.RESOURCE_URL = "http://example.com/path/to/resource" - self.NUM_BYTES = 12345 - self.NUM_ROWS = 67 - self.NUM_EST_BYTES = 1234 - self.NUM_EST_ROWS = 23 - - def _make_resource(self): - self._setUpConstants() - return { - "creationTime": self.WHEN_TS * 1000, - "tableReference": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.TABLE_NAME, - }, - "schema": { - "fields": [ - {"name": "full_name", "type": "STRING", "mode": "REQUIRED"}, - {"name": "age", "type": "INTEGER", "mode": "REQUIRED"}, - ] - }, - "etag": "ETAG", - "id": self.TABLE_FULL_ID, - "lastModifiedTime": self.WHEN_TS * 1000, - "location": "US", - "selfLink": self.RESOURCE_URL, - "numRows": self.NUM_ROWS, - "numBytes": self.NUM_BYTES, - "type": "TABLE", - "streamingBuffer": { - "estimatedRows": str(self.NUM_EST_ROWS), - "estimatedBytes": str(self.NUM_EST_BYTES), - "oldestEntryTime": self.WHEN_TS * 1000, - }, - "externalDataConfiguration": { - "sourceFormat": "CSV", - "csvOptions": {"allowJaggedRows": True, "encoding": "encoding"}, - }, - "labels": {"x": "y"}, - } - - def _verifyReadonlyResourceProperties(self, table, resource): - if "creationTime" in resource: - self.assertEqual(table.created, self.WHEN) - else: - self.assertIsNone(table.created) - - if "etag" in resource: - self.assertEqual(table.etag, self.ETAG) - else: - self.assertIsNone(table.etag) - - if "numRows" in resource: - self.assertEqual(table.num_rows, self.NUM_ROWS) - else: - self.assertIsNone(table.num_rows) - - if "numBytes" in resource: - self.assertEqual(table.num_bytes, self.NUM_BYTES) - else: - self.assertIsNone(table.num_bytes) - - if "selfLink" in resource: - self.assertEqual(table.self_link, self.RESOURCE_URL) - else: - self.assertIsNone(table.self_link) - - if "streamingBuffer" in resource: - self.assertEqual(table.streaming_buffer.estimated_rows, self.NUM_EST_ROWS) - self.assertEqual(table.streaming_buffer.estimated_bytes, self.NUM_EST_BYTES) - self.assertEqual(table.streaming_buffer.oldest_entry_time, self.WHEN) - else: - self.assertIsNone(table.streaming_buffer) - - self.assertEqual(table.full_table_id, self.TABLE_FULL_ID) - self.assertEqual( - table.table_type, "TABLE" if "view" not in resource else "VIEW" - ) - - def _verifyResourceProperties(self, table, resource): - - self._verifyReadonlyResourceProperties(table, resource) - - if "expirationTime" in resource: - self.assertEqual(table.expires, self.EXP_TIME) - else: - self.assertIsNone(table.expires) - - self.assertEqual(table.description, resource.get("description")) - self.assertEqual(table.friendly_name, resource.get("friendlyName")) - self.assertEqual(table.location, resource.get("location")) - - if "view" in resource: - self.assertEqual(table.view_query, resource["view"]["query"]) - self.assertEqual( - table.view_use_legacy_sql, resource["view"].get("useLegacySql", True) - ) - else: - self.assertIsNone(table.view_query) - self.assertIsNone(table.view_use_legacy_sql) - - if "schema" in resource: - self._verifySchema(table.schema, resource) - else: - self.assertEqual(table.schema, []) - - if "externalDataConfiguration" in resource: - edc = table.external_data_configuration - self.assertEqual(edc.source_format, "CSV") - self.assertEqual(edc.options.allow_jagged_rows, True) - - if "labels" in resource: - self.assertEqual(table.labels, {"x": "y"}) - else: - self.assertEqual(table.labels, {}) - - if "encryptionConfiguration" in resource: - self.assertIsNotNone(table.encryption_configuration) - self.assertEqual( - table.encryption_configuration.kms_key_name, - resource["encryptionConfiguration"]["kmsKeyName"], - ) - else: - self.assertIsNone(table.encryption_configuration) - - def test_ctor(self): - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - - self.assertEqual(table.table_id, self.TABLE_NAME) - self.assertEqual(table.project, self.PROJECT) - self.assertEqual(table.dataset_id, self.DS_ID) - self.assertEqual(table.reference.table_id, self.TABLE_NAME) - self.assertEqual(table.reference.project, self.PROJECT) - self.assertEqual(table.reference.dataset_id, self.DS_ID) - self.assertEqual( - table.path, - "/projects/%s/datasets/%s/tables/%s" - % (self.PROJECT, self.DS_ID, self.TABLE_NAME), - ) - self.assertEqual(table.schema, []) - - self.assertIsNone(table.created) - self.assertIsNone(table.etag) - self.assertIsNone(table.modified) - self.assertIsNone(table.num_bytes) - self.assertIsNone(table.num_rows) - self.assertIsNone(table.self_link) - self.assertIsNone(table.full_table_id) - self.assertIsNone(table.table_type) - self.assertIsNone(table.description) - self.assertIsNone(table.expires) - self.assertIsNone(table.friendly_name) - self.assertIsNone(table.location) - self.assertIsNone(table.view_query) - self.assertIsNone(table.view_use_legacy_sql) - self.assertIsNone(table.external_data_configuration) - self.assertEqual(table.labels, {}) - self.assertIsNone(table.encryption_configuration) - self.assertIsNone(table.time_partitioning) - self.assertIsNone(table.clustering_fields) - - def test_ctor_w_schema(self): - from google.cloud.bigquery.schema import SchemaField - - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - full_name = SchemaField("full_name", "STRING", mode="REQUIRED") - age = SchemaField("age", "INTEGER", mode="REQUIRED") - table = self._make_one(table_ref, schema=[full_name, age]) - - self.assertEqual(table.schema, [full_name, age]) - - def test_ctor_string(self): - table = self._make_one("some-project.some_dset.some_tbl") - self.assertEqual(table.project, "some-project") - self.assertEqual(table.dataset_id, "some_dset") - self.assertEqual(table.table_id, "some_tbl") - - def test_ctor_tablelistitem(self): - from google.cloud.bigquery.table import Table, TableListItem - - import datetime - from google.cloud._helpers import _millis, UTC - - self.WHEN_TS = 1437767599.125 - self.EXP_TIME = datetime.datetime(2015, 8, 1, 23, 59, 59, tzinfo=UTC) - - project = "test-project" - dataset_id = "test_dataset" - table_id = "coffee_table" - resource = { - "creationTime": self.WHEN_TS * 1000, - "expirationTime": _millis(self.EXP_TIME), - "kind": "bigquery#table", - "id": "{}:{}.{}".format(project, dataset_id, table_id), - "tableReference": { - "projectId": project, - "datasetId": dataset_id, - "tableId": table_id, - }, - "friendlyName": "Mahogany Coffee Table", - "type": "TABLE", - "timePartitioning": { - "type": "DAY", - "field": "mycolumn", - "expirationMs": "10000", - }, - "labels": {"some-stuff": "this-is-a-label"}, - "clustering": {"fields": ["string"]}, - } - - table_list_item = TableListItem(resource) - table = Table(table_list_item) - - self.assertIsNone(table.created) - self.assertEqual(table.reference.project, project) - self.assertEqual(table.reference.dataset_id, dataset_id) - self.assertEqual(table.reference.table_id, table_id) - - def test_ctor_string_wo_project_id(self): - with pytest.raises(ValueError): - # Project ID is missing. - self._make_one("some_dset.some_tbl") - - def test_num_bytes_getter(self): - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - - # Check with no value set. - self.assertIsNone(table.num_bytes) - - num_bytes = 1337 - # Check with integer value set. - table._properties = {"numBytes": num_bytes} - self.assertEqual(table.num_bytes, num_bytes) - - # Check with a string value set. - table._properties = {"numBytes": str(num_bytes)} - self.assertEqual(table.num_bytes, num_bytes) - - # Check with invalid int value. - table._properties = {"numBytes": "x"} - with self.assertRaises(ValueError): - getattr(table, "num_bytes") - - def test_num_rows_getter(self): - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - - # Check with no value set. - self.assertIsNone(table.num_rows) - - num_rows = 42 - # Check with integer value set. - table._properties = {"numRows": num_rows} - self.assertEqual(table.num_rows, num_rows) - - # Check with a string value set. - table._properties = {"numRows": str(num_rows)} - self.assertEqual(table.num_rows, num_rows) - - # Check with invalid int value. - table._properties = {"numRows": "x"} - with self.assertRaises(ValueError): - getattr(table, "num_rows") - - def test_schema_setter_non_sequence(self): - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - with self.assertRaises(TypeError): - table.schema = object() - - def test_schema_setter_invalid_field(self): - from google.cloud.bigquery.schema import SchemaField - - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - full_name = SchemaField("full_name", "STRING", mode="REQUIRED") - with self.assertRaises(ValueError): - table.schema = [full_name, object()] - - def test_schema_setter_valid_fields(self): - from google.cloud.bigquery.schema import SchemaField - - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - full_name = SchemaField("full_name", "STRING", mode="REQUIRED") - age = SchemaField("age", "INTEGER", mode="REQUIRED") - table.schema = [full_name, age] - self.assertEqual(table.schema, [full_name, age]) - - def test_schema_setter_invalid_mapping_representation(self): - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - full_name = {"name": "full_name", "type": "STRING", "mode": "REQUIRED"} - invalid_field = {"name": "full_name", "typeooo": "STRING", "mode": "REQUIRED"} - with self.assertRaises(Exception): - table.schema = [full_name, invalid_field] - - def test_schema_setter_valid_mapping_representation(self): - from google.cloud.bigquery.schema import SchemaField - - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - full_name = {"name": "full_name", "type": "STRING", "mode": "REQUIRED"} - job_status = { - "name": "is_employed", - "type": "STRUCT", - "mode": "NULLABLE", - "fields": [ - {"name": "foo", "type": "DATE", "mode": "NULLABLE"}, - {"name": "bar", "type": "BYTES", "mode": "REQUIRED"}, - ], - } - - table.schema = [full_name, job_status] - - expected_schema = [ - SchemaField("full_name", "STRING", mode="REQUIRED"), - SchemaField( - "is_employed", - "STRUCT", - mode="NULLABLE", - fields=[ - SchemaField("foo", "DATE", mode="NULLABLE"), - SchemaField("bar", "BYTES", mode="REQUIRED"), - ], - ), - ] - self.assertEqual(table.schema, expected_schema) - - def test_props_set_by_server(self): - import datetime - from google.cloud._helpers import UTC - from google.cloud._helpers import _millis - - CREATED = datetime.datetime(2015, 7, 29, 12, 13, 22, tzinfo=UTC) - MODIFIED = datetime.datetime(2015, 7, 29, 14, 47, 15, tzinfo=UTC) - TABLE_FULL_ID = "%s:%s.%s" % (self.PROJECT, self.DS_ID, self.TABLE_NAME) - URL = "http://example.com/projects/%s/datasets/%s/tables/%s" % ( - self.PROJECT, - self.DS_ID, - self.TABLE_NAME, - ) - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - table._properties["creationTime"] = _millis(CREATED) - table._properties["etag"] = "ETAG" - table._properties["lastModifiedTime"] = _millis(MODIFIED) - table._properties["numBytes"] = 12345 - table._properties["numRows"] = 66 - table._properties["selfLink"] = URL - table._properties["id"] = TABLE_FULL_ID - table._properties["type"] = "TABLE" - - self.assertEqual(table.created, CREATED) - self.assertEqual(table.etag, "ETAG") - self.assertEqual(table.modified, MODIFIED) - self.assertEqual(table.num_bytes, 12345) - self.assertEqual(table.num_rows, 66) - self.assertEqual(table.self_link, URL) - self.assertEqual(table.full_table_id, TABLE_FULL_ID) - self.assertEqual(table.table_type, "TABLE") - - def test_description_setter_bad_value(self): - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - with self.assertRaises(ValueError): - table.description = 12345 - - def test_description_setter(self): - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - table.description = "DESCRIPTION" - self.assertEqual(table.description, "DESCRIPTION") - - def test_expires_setter_bad_value(self): - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - with self.assertRaises(ValueError): - table.expires = object() - - def test_expires_setter(self): - import datetime - from google.cloud._helpers import UTC - - WHEN = datetime.datetime(2015, 7, 28, 16, 39, tzinfo=UTC) - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - table.expires = WHEN - self.assertEqual(table.expires, WHEN) - - def test_friendly_name_setter_bad_value(self): - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - with self.assertRaises(ValueError): - table.friendly_name = 12345 - - def test_friendly_name_setter(self): - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - table.friendly_name = "FRIENDLY" - self.assertEqual(table.friendly_name, "FRIENDLY") - - def test_view_query_setter_bad_value(self): - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - with self.assertRaises(ValueError): - table.view_query = 12345 - - def test_view_query_setter(self): - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - table.view_query = "select * from foo" - self.assertEqual(table.view_query, "select * from foo") - self.assertEqual(table.view_use_legacy_sql, False) - - table.view_use_legacy_sql = True - self.assertEqual(table.view_use_legacy_sql, True) - - def test_view_query_deleter(self): - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - table.view_query = "select * from foo" - del table.view_query - self.assertIsNone(table.view_query) - self.assertIsNone(table.view_use_legacy_sql) - - def test_view_use_legacy_sql_setter_bad_value(self): - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - with self.assertRaises(ValueError): - table.view_use_legacy_sql = 12345 - - def test_view_use_legacy_sql_setter(self): - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - table.view_use_legacy_sql = True - table.view_query = "select * from foo" - self.assertEqual(table.view_use_legacy_sql, True) - self.assertEqual(table.view_query, "select * from foo") - - def test_external_data_configuration_setter(self): - from google.cloud.bigquery.external_config import ExternalConfig - - external_config = ExternalConfig("CSV") - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - - table.external_data_configuration = external_config - - self.assertEqual( - table.external_data_configuration.source_format, - external_config.source_format, - ) - - def test_external_data_configuration_setter_none(self): - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - - table.external_data_configuration = None - - self.assertIsNone(table.external_data_configuration) - - def test_external_data_configuration_setter_bad_value(self): - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - with self.assertRaises(ValueError): - table.external_data_configuration = 12345 - - def test_labels_update_in_place(self): - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - del table._properties["labels"] # don't start w/ existing dict - labels = table.labels - labels["foo"] = "bar" # update in place - self.assertEqual(table.labels, {"foo": "bar"}) - - def test_labels_setter_bad_value(self): - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - with self.assertRaises(ValueError): - table.labels = 12345 - - def test_from_string(self): - cls = self._get_target_class() - got = cls.from_string("string-project.string_dataset.string_table") - self.assertEqual(got.project, "string-project") - self.assertEqual(got.dataset_id, "string_dataset") - self.assertEqual(got.table_id, "string_table") - - def test_from_string_legacy_string(self): - cls = self._get_target_class() - with self.assertRaises(ValueError): - cls.from_string("string-project:string_dataset.string_table") - - def test_from_string_not_fully_qualified(self): - cls = self._get_target_class() - with self.assertRaises(ValueError): - cls.from_string("string_dataset.string_table") - - def test_from_api_repr_missing_identity(self): - self._setUpConstants() - RESOURCE = {} - klass = self._get_target_class() - with self.assertRaises(KeyError): - klass.from_api_repr(RESOURCE) - - def test_from_api_repr_bare(self): - self._setUpConstants() - RESOURCE = { - "id": "%s:%s.%s" % (self.PROJECT, self.DS_ID, self.TABLE_NAME), - "tableReference": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.TABLE_NAME, - }, - "type": "TABLE", - } - klass = self._get_target_class() - table = klass.from_api_repr(RESOURCE) - self.assertEqual(table.table_id, self.TABLE_NAME) - self._verifyResourceProperties(table, RESOURCE) - - def test_from_api_repr_w_properties(self): - import datetime - from google.cloud._helpers import UTC - from google.cloud._helpers import _millis - - RESOURCE = self._make_resource() - RESOURCE["view"] = {"query": "select fullname, age from person_ages"} - RESOURCE["type"] = "VIEW" - RESOURCE["location"] = "EU" - self.EXP_TIME = datetime.datetime(2015, 8, 1, 23, 59, 59, tzinfo=UTC) - RESOURCE["expirationTime"] = _millis(self.EXP_TIME) - klass = self._get_target_class() - table = klass.from_api_repr(RESOURCE) - self._verifyResourceProperties(table, RESOURCE) - - def test_from_api_with_encryption(self): - self._setUpConstants() - RESOURCE = { - "id": "%s:%s.%s" % (self.PROJECT, self.DS_ID, self.TABLE_NAME), - "tableReference": { - "projectId": self.PROJECT, - "datasetId": self.DS_ID, - "tableId": self.TABLE_NAME, - }, - "encryptionConfiguration": {"kmsKeyName": self.KMS_KEY_NAME}, - "type": "TABLE", - } - klass = self._get_target_class() - table = klass.from_api_repr(RESOURCE) - self._verifyResourceProperties(table, RESOURCE) - - def test_to_api_repr_w_custom_field(self): - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - table._properties["newAlphaProperty"] = "unreleased property" - resource = table.to_api_repr() - - exp_resource = { - "tableReference": table_ref.to_api_repr(), - "labels": {}, - "newAlphaProperty": "unreleased property", - } - self.assertEqual(resource, exp_resource) - - def test__build_resource_w_custom_field(self): - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - table._properties["newAlphaProperty"] = "unreleased property" - resource = table._build_resource(["newAlphaProperty"]) - - exp_resource = {"newAlphaProperty": "unreleased property"} - self.assertEqual(resource, exp_resource) - - def test__build_resource_w_custom_field_not_in__properties(self): - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table = self._make_one(dataset.table(self.TABLE_NAME)) - table.bad = "value" - with self.assertRaises(ValueError): - table._build_resource(["bad"]) - - def test_range_partitioning(self): - from google.cloud.bigquery.table import RangePartitioning - from google.cloud.bigquery.table import PartitionRange - - table = self._make_one("proj.dset.tbl") - assert table.range_partitioning is None - - table.range_partitioning = RangePartitioning( - field="col1", range_=PartitionRange(start=-512, end=1024, interval=128) - ) - assert table.range_partitioning.field == "col1" - assert table.range_partitioning.range_.start == -512 - assert table.range_partitioning.range_.end == 1024 - assert table.range_partitioning.range_.interval == 128 - - table.range_partitioning = None - assert table.range_partitioning is None - - def test_range_partitioning_w_wrong_type(self): - object_under_test = self._make_one("proj.dset.tbl") - with pytest.raises(ValueError, match="RangePartitioning"): - object_under_test.range_partitioning = object() - - def test_require_partitioning_filter(self): - table = self._make_one("proj.dset.tbl") - assert table.require_partition_filter is None - table.require_partition_filter = True - assert table.require_partition_filter - table.require_partition_filter = False - assert table.require_partition_filter is not None - assert not table.require_partition_filter - table.require_partition_filter = None - assert table.require_partition_filter is None - - def test_time_partitioning_getter(self): - from google.cloud.bigquery.table import TimePartitioning - from google.cloud.bigquery.table import TimePartitioningType - - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - - table._properties["timePartitioning"] = { - "type": "DAY", - "field": "col1", - "expirationMs": "123456", - "requirePartitionFilter": False, - } - self.assertIsInstance(table.time_partitioning, TimePartitioning) - self.assertEqual(table.time_partitioning.type_, TimePartitioningType.DAY) - self.assertEqual(table.time_partitioning.field, "col1") - self.assertEqual(table.time_partitioning.expiration_ms, 123456) - - with warnings.catch_warnings(record=True) as warned: - self.assertFalse(table.time_partitioning.require_partition_filter) - - assert len(warned) == 1 - self.assertIs(warned[0].category, PendingDeprecationWarning) - - def test_time_partitioning_getter_w_none(self): - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - - table._properties["timePartitioning"] = None - self.assertIsNone(table.time_partitioning) - - del table._properties["timePartitioning"] - self.assertIsNone(table.time_partitioning) - - def test_time_partitioning_getter_w_empty(self): - from google.cloud.bigquery.table import TimePartitioning - - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - - # Even though there are required properties according to the API - # specification, sometimes time partitioning is populated as an empty - # object. See internal bug 131167013. - table._properties["timePartitioning"] = {} - self.assertIsInstance(table.time_partitioning, TimePartitioning) - self.assertIsNone(table.time_partitioning.type_) - self.assertIsNone(table.time_partitioning.field) - self.assertIsNone(table.time_partitioning.expiration_ms) - - with warnings.catch_warnings(record=True) as warned: - self.assertIsNone(table.time_partitioning.require_partition_filter) - - for warning in warned: - self.assertIs(warning.category, PendingDeprecationWarning) - - def test_time_partitioning_setter(self): - from google.cloud.bigquery.table import TimePartitioning - from google.cloud.bigquery.table import TimePartitioningType - - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - time_partitioning = TimePartitioning(type_=TimePartitioningType.DAY) - - table.time_partitioning = time_partitioning - - self.assertEqual(table.time_partitioning.type_, TimePartitioningType.DAY) - # Both objects point to the same properties dict - self.assertIs( - table._properties["timePartitioning"], time_partitioning._properties - ) - - time_partitioning.expiration_ms = 10000 - - # Changes to TimePartitioning object are reflected in Table properties - self.assertEqual( - table.time_partitioning.expiration_ms, time_partitioning.expiration_ms - ) - - def test_time_partitioning_setter_bad_type(self): - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - - with self.assertRaises(ValueError): - table.time_partitioning = {"timePartitioning": {"type": "DAY"}} - - def test_time_partitioning_setter_none(self): - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - - table.time_partitioning = None - - self.assertIsNone(table.time_partitioning) - - def test_partitioning_type_setter(self): - from google.cloud.bigquery.table import TimePartitioningType - - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - - with warnings.catch_warnings(record=True) as warned: - self.assertIsNone(table.partitioning_type) - - table.partitioning_type = TimePartitioningType.DAY - - self.assertEqual(table.partitioning_type, "DAY") - - self.assertEqual(len(warned), 3) - for warning in warned: - self.assertIs(warning.category, PendingDeprecationWarning) - - def test_partitioning_type_setter_w_time_partitioning_set(self): - from google.cloud.bigquery.table import TimePartitioning - - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - table.time_partitioning = TimePartitioning() - - with warnings.catch_warnings(record=True) as warned: - table.partitioning_type = "NEW_FAKE_TYPE" - - self.assertEqual(table.partitioning_type, "NEW_FAKE_TYPE") - - self.assertEqual(len(warned), 2) - for warning in warned: - self.assertIs(warning.category, PendingDeprecationWarning) - - def test_partitioning_expiration_setter_w_time_partitioning_set(self): - from google.cloud.bigquery.table import TimePartitioning - - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - table.time_partitioning = TimePartitioning() - - with warnings.catch_warnings(record=True) as warned: - table.partition_expiration = 100000 - - self.assertEqual(table.partition_expiration, 100000) - - self.assertEqual(len(warned), 2) - for warning in warned: - self.assertIs(warning.category, PendingDeprecationWarning) - - def test_partition_expiration_setter(self): - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - - with warnings.catch_warnings(record=True) as warned: - self.assertIsNone(table.partition_expiration) - - table.partition_expiration = 100 - - self.assertEqual(table.partition_expiration, 100) - # defaults to 'DAY' when expiration is set and type is not set - self.assertEqual(table.partitioning_type, "DAY") - - self.assertEqual(len(warned), 4) - for warning in warned: - self.assertIs(warning.category, PendingDeprecationWarning) - - def test_clustering_fields_setter_w_fields(self): - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - fields = ["email", "phone"] - - table.clustering_fields = fields - self.assertEqual(table.clustering_fields, fields) - self.assertEqual(table._properties["clustering"], {"fields": fields}) - - def test_clustering_fields_setter_w_none(self): - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - fields = ["email", "phone"] - - table._properties["clustering"] = {"fields": fields} - table.clustering_fields = None - self.assertEqual(table.clustering_fields, None) - self.assertFalse("clustering" in table._properties) - - def test_clustering_fields_setter_w_none_noop(self): - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - - table.clustering_fields = None - self.assertEqual(table.clustering_fields, None) - self.assertFalse("clustering" in table._properties) - - def test_encryption_configuration_setter(self): - # Previously, the EncryptionConfiguration class was in the table module, not the - # encryption_configuration module. It was moved to support models encryption. - # This test import from the table module to ensure that the previous location - # continues to function as an alias. - from google.cloud.bigquery.table import EncryptionConfiguration - - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref) - encryption_configuration = EncryptionConfiguration( - kms_key_name=self.KMS_KEY_NAME - ) - table.encryption_configuration = encryption_configuration - self.assertEqual(table.encryption_configuration.kms_key_name, self.KMS_KEY_NAME) - table.encryption_configuration = None - self.assertIsNone(table.encryption_configuration) - - def test___repr__(self): - from google.cloud.bigquery.table import TableReference - - dataset = DatasetReference("project1", "dataset1") - table1 = self._make_one(TableReference(dataset, "table1")) - expected = ( - "Table(TableReference(" - "DatasetReference('project1', 'dataset1'), " - "'table1'))" - ) - self.assertEqual(repr(table1), expected) - - -class Test_row_from_mapping(unittest.TestCase, _SchemaBase): - - PROJECT = "prahj-ekt" - DS_ID = "dataset-name" - TABLE_NAME = "table-name" - - def _call_fut(self, mapping, schema): - from google.cloud.bigquery.table import _row_from_mapping - - return _row_from_mapping(mapping, schema) - - def test__row_from_mapping_wo_schema(self): - from google.cloud.bigquery.table import Table, _TABLE_HAS_NO_SCHEMA - - MAPPING = {"full_name": "Phred Phlyntstone", "age": 32} - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = Table(table_ref) - - with self.assertRaises(ValueError) as exc: - self._call_fut(MAPPING, table.schema) - - self.assertEqual(exc.exception.args, (_TABLE_HAS_NO_SCHEMA,)) - - def test__row_from_mapping_w_invalid_schema(self): - from google.cloud.bigquery.schema import SchemaField - from google.cloud.bigquery.table import Table - - MAPPING = { - "full_name": "Phred Phlyntstone", - "age": 32, - "colors": ["red", "green"], - "bogus": "WHATEVER", - } - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - full_name = SchemaField("full_name", "STRING", mode="REQUIRED") - age = SchemaField("age", "INTEGER", mode="REQUIRED") - colors = SchemaField("colors", "DATETIME", mode="REPEATED") - bogus = SchemaField("joined", "STRING", mode="BOGUS") - table = Table(table_ref, schema=[full_name, age, colors, bogus]) - - with self.assertRaises(ValueError) as exc: - self._call_fut(MAPPING, table.schema) - - self.assertIn("Unknown field mode: BOGUS", str(exc.exception)) - - def test__row_from_mapping_w_schema(self): - from google.cloud.bigquery.schema import SchemaField - from google.cloud.bigquery.table import Table - - MAPPING = { - "full_name": "Phred Phlyntstone", - "age": 32, - "colors": ["red", "green"], - "extra": "IGNORED", - } - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - full_name = SchemaField("full_name", "STRING", mode="REQUIRED") - age = SchemaField("age", "INTEGER", mode="REQUIRED") - colors = SchemaField("colors", "DATETIME", mode="REPEATED") - joined = SchemaField("joined", "STRING", mode="NULLABLE") - table = Table(table_ref, schema=[full_name, age, colors, joined]) - - self.assertEqual( - self._call_fut(MAPPING, table.schema), - ("Phred Phlyntstone", 32, ["red", "green"], None), - ) - - -class TestTableListItem(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigquery.table import TableListItem - - return TableListItem - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def _setUpConstants(self): - import datetime - from google.cloud._helpers import UTC - - self.WHEN_TS = 1437767599.125 - self.WHEN = datetime.datetime.utcfromtimestamp(self.WHEN_TS).replace(tzinfo=UTC) - self.EXP_TIME = datetime.datetime(2015, 8, 1, 23, 59, 59, tzinfo=UTC) - - def test_ctor(self): - from google.cloud._helpers import _millis - - self._setUpConstants() - project = "test-project" - dataset_id = "test_dataset" - table_id = "coffee_table" - resource = { - "creationTime": self.WHEN_TS * 1000, - "expirationTime": _millis(self.EXP_TIME), - "kind": "bigquery#table", - "id": "{}:{}.{}".format(project, dataset_id, table_id), - "tableReference": { - "projectId": project, - "datasetId": dataset_id, - "tableId": table_id, - }, - "friendlyName": "Mahogany Coffee Table", - "type": "TABLE", - "timePartitioning": { - "type": "DAY", - "field": "mycolumn", - "expirationMs": "10000", - }, - "labels": {"some-stuff": "this-is-a-label"}, - "clustering": {"fields": ["string"]}, - } - - table = self._make_one(resource) - - self.assertEqual(table.created, self.WHEN) - self.assertEqual(table.expires, self.EXP_TIME) - self.assertEqual(table.project, project) - self.assertEqual(table.dataset_id, dataset_id) - self.assertEqual(table.table_id, table_id) - self.assertEqual( - table.full_table_id, "{}:{}.{}".format(project, dataset_id, table_id) - ) - self.assertEqual(table.reference.project, project) - self.assertEqual(table.reference.dataset_id, dataset_id) - self.assertEqual(table.reference.table_id, table_id) - self.assertEqual(table.friendly_name, "Mahogany Coffee Table") - self.assertEqual(table.table_type, "TABLE") - self.assertEqual(table.time_partitioning.type_, "DAY") - self.assertEqual(table.time_partitioning.expiration_ms, 10000) - self.assertEqual(table.time_partitioning.field, "mycolumn") - self.assertEqual(table.labels["some-stuff"], "this-is-a-label") - self.assertIsNone(table.view_use_legacy_sql) - self.assertEqual(table.clustering_fields, ["string"]) - - with warnings.catch_warnings(record=True) as warned: - self.assertEqual(table.partitioning_type, "DAY") - self.assertEqual(table.partition_expiration, 10000) - - self.assertEqual(len(warned), 2) - for warning in warned: - self.assertIs(warning.category, PendingDeprecationWarning) - - def test_ctor_view(self): - project = "test-project" - dataset_id = "test_dataset" - table_id = "just_looking" - resource = { - "kind": "bigquery#table", - "id": "{}:{}.{}".format(project, dataset_id, table_id), - "tableReference": { - "projectId": project, - "datasetId": dataset_id, - "tableId": table_id, - }, - "type": "VIEW", - } - - table = self._make_one(resource) - self.assertEqual(table.project, project) - self.assertEqual(table.dataset_id, dataset_id) - self.assertEqual(table.table_id, table_id) - self.assertEqual( - table.full_table_id, "{}:{}.{}".format(project, dataset_id, table_id) - ) - self.assertEqual(table.reference.project, project) - self.assertEqual(table.reference.dataset_id, dataset_id) - self.assertEqual(table.reference.table_id, table_id) - self.assertEqual(table.table_type, "VIEW") - # Server default for useLegacySql is True. - self.assertTrue(table.view_use_legacy_sql) - - def test_ctor_missing_properties(self): - resource = { - "tableReference": { - "projectId": "testproject", - "datasetId": "testdataset", - "tableId": "testtable", - } - } - table = self._make_one(resource) - self.assertEqual(table.project, "testproject") - self.assertEqual(table.dataset_id, "testdataset") - self.assertEqual(table.table_id, "testtable") - self.assertIsNone(table.created) - self.assertIsNone(table.expires) - self.assertIsNone(table.clustering_fields) - self.assertIsNone(table.full_table_id) - self.assertIsNone(table.friendly_name) - self.assertIsNone(table.table_type) - self.assertIsNone(table.time_partitioning) - self.assertEqual(table.labels, {}) - self.assertIsNone(table.view_use_legacy_sql) - - with warnings.catch_warnings(record=True) as warned: - self.assertIsNone(table.partitioning_type) - self.assertIsNone(table.partition_expiration) - - self.assertEqual(len(warned), 2) - for warning in warned: - self.assertIs(warning.category, PendingDeprecationWarning) - - def test_ctor_wo_project(self): - resource = { - "tableReference": {"datasetId": "testdataset", "tableId": "testtable"} - } - with self.assertRaises(ValueError): - self._make_one(resource) - - def test_ctor_wo_dataset(self): - resource = { - "tableReference": {"projectId": "testproject", "tableId": "testtable"} - } - with self.assertRaises(ValueError): - self._make_one(resource) - - def test_ctor_wo_table(self): - resource = { - "tableReference": {"projectId": "testproject", "datasetId": "testdataset"} - } - with self.assertRaises(ValueError): - self._make_one(resource) - - def test_ctor_wo_reference(self): - with self.assertRaises(ValueError): - self._make_one({}) - - def test_labels_update_in_place(self): - resource = { - "tableReference": { - "projectId": "testproject", - "datasetId": "testdataset", - "tableId": "testtable", - } - } - table = self._make_one(resource) - labels = table.labels - labels["foo"] = "bar" # update in place - self.assertEqual(table.labels, {"foo": "bar"}) - - -class TestRow(unittest.TestCase): - def test_row(self): - from google.cloud.bigquery.table import Row - - VALUES = (1, 2, 3) - row = Row(VALUES, {"a": 0, "b": 1, "c": 2}) - self.assertEqual(row.a, 1) - self.assertEqual(row[1], 2) - self.assertEqual(row["c"], 3) - self.assertEqual(len(row), 3) - self.assertEqual(row.values(), VALUES) - self.assertEqual(set(row.keys()), set({"a": 1, "b": 2, "c": 3}.keys())) - self.assertEqual(set(row.items()), set({"a": 1, "b": 2, "c": 3}.items())) - self.assertEqual(row.get("a"), 1) - self.assertEqual(row.get("d"), None) - self.assertEqual(row.get("d", ""), "") - self.assertEqual(row.get("d", default=""), "") - self.assertEqual(repr(row), "Row((1, 2, 3), {'a': 0, 'b': 1, 'c': 2})") - self.assertFalse(row != row) - self.assertFalse(row == 3) - with self.assertRaises(AttributeError): - row.z - with self.assertRaises(KeyError): - row["z"] - - -class Test_EmptyRowIterator(unittest.TestCase): - def _make_one(self): - from google.cloud.bigquery.table import _EmptyRowIterator - - return _EmptyRowIterator() - - def test_total_rows_eq_zero(self): - row_iterator = self._make_one() - self.assertEqual(row_iterator.total_rows, 0) - - @mock.patch("google.cloud.bigquery.table.pyarrow", new=None) - def test_to_arrow_error_if_pyarrow_is_none(self): - row_iterator = self._make_one() - with self.assertRaises(ValueError): - row_iterator.to_arrow() - - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - def test_to_arrow(self): - row_iterator = self._make_one() - tbl = row_iterator.to_arrow() - self.assertIsInstance(tbl, pyarrow.Table) - self.assertEqual(tbl.num_rows, 0) - - @mock.patch("google.cloud.bigquery.table.pandas", new=None) - def test_to_dataframe_error_if_pandas_is_none(self): - row_iterator = self._make_one() - with self.assertRaises(ValueError): - row_iterator.to_dataframe() - - @unittest.skipIf(pandas is None, "Requires `pandas`") - def test_to_dataframe(self): - row_iterator = self._make_one() - df = row_iterator.to_dataframe() - self.assertIsInstance(df, pandas.DataFrame) - self.assertEqual(len(df), 0) # verify the number of rows - - -class TestRowIterator(unittest.TestCase): - def _class_under_test(self): - from google.cloud.bigquery.table import RowIterator - - return RowIterator - - def _make_one( - self, - client=None, - api_request=None, - path=None, - schema=None, - table=None, - **kwargs - ): - from google.cloud.bigquery.table import TableReference - - if client is None: - client = _mock_client() - - if api_request is None: - api_request = mock.sentinel.api_request - - if path is None: - path = "/foo" - - if schema is None: - schema = [] - - if table is None: - table = TableReference.from_string("my-project.my_dataset.my_table") - - return self._class_under_test()( - client, api_request, path, schema, table=table, **kwargs - ) - - def test_constructor(self): - from google.cloud.bigquery.table import _item_to_row - from google.cloud.bigquery.table import _rows_page_start - - client = _mock_client() - path = "/some/path" - iterator = self._make_one(client=client, path=path) - - # Objects are set without copying. - self.assertIs(iterator.client, client) - self.assertIs(iterator.item_to_value, _item_to_row) - self.assertIs(iterator._page_start, _rows_page_start) - # Properties have the expect value. - self.assertEqual(iterator.extra_params, {}) - self.assertEqual(iterator._items_key, "rows") - self.assertIsNone(iterator.max_results) - self.assertEqual(iterator.path, path) - self.assertFalse(iterator._started) - self.assertIsNone(iterator.total_rows) - # Changing attributes. - self.assertEqual(iterator.page_number, 0) - self.assertIsNone(iterator.next_page_token) - self.assertEqual(iterator.num_results, 0) - - def test_constructor_with_table(self): - from google.cloud.bigquery.table import Table - - table = Table("proj.dset.tbl") - table._properties["numRows"] = 100 - - iterator = self._make_one(table=table) - - self.assertIs(iterator._table, table) - self.assertEqual(iterator.total_rows, 100) - - def test_constructor_with_dict_schema(self): - from google.cloud.bigquery.schema import SchemaField - - schema = [ - {"name": "full_name", "type": "STRING", "mode": "REQUIRED"}, - {"name": "age", "type": "INT64", "mode": "NULLABLE"}, - ] - - iterator = self._make_one(schema=schema) - - expected_schema = [ - SchemaField("full_name", "STRING", mode="REQUIRED"), - SchemaField("age", "INT64", mode="NULLABLE"), - ] - self.assertEqual(iterator.schema, expected_schema) - - def test_iterate(self): - from google.cloud.bigquery.schema import SchemaField - - schema = [ - SchemaField("name", "STRING", mode="REQUIRED"), - SchemaField("age", "INTEGER", mode="REQUIRED"), - ] - rows = [ - {"f": [{"v": "Phred Phlyntstone"}, {"v": "32"}]}, - {"f": [{"v": "Bharney Rhubble"}, {"v": "33"}]}, - ] - path = "/foo" - api_request = mock.Mock(return_value={"rows": rows}) - row_iterator = self._make_one(_mock_client(), api_request, path, schema) - self.assertEqual(row_iterator.num_results, 0) - - rows_iter = iter(row_iterator) - - val1 = six.next(rows_iter) - self.assertEqual(val1.name, "Phred Phlyntstone") - self.assertEqual(row_iterator.num_results, 1) - - val2 = six.next(rows_iter) - self.assertEqual(val2.name, "Bharney Rhubble") - self.assertEqual(row_iterator.num_results, 2) - - with self.assertRaises(StopIteration): - six.next(rows_iter) - - api_request.assert_called_once_with(method="GET", path=path, query_params={}) - - def test_page_size(self): - from google.cloud.bigquery.schema import SchemaField - - schema = [ - SchemaField("name", "STRING", mode="REQUIRED"), - SchemaField("age", "INTEGER", mode="REQUIRED"), - ] - rows = [ - {"f": [{"v": "Phred Phlyntstone"}, {"v": "32"}]}, - {"f": [{"v": "Bharney Rhubble"}, {"v": "33"}]}, - ] - path = "/foo" - api_request = mock.Mock(return_value={"rows": rows}) - - row_iterator = self._make_one( - _mock_client(), api_request, path, schema, page_size=4 - ) - row_iterator._get_next_page_response() - - api_request.assert_called_once_with( - method="GET", - path=path, - query_params={"maxResults": row_iterator._page_size}, - ) - - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - def test_to_arrow(self): - from google.cloud.bigquery.schema import SchemaField - - schema = [ - SchemaField("name", "STRING", mode="REQUIRED"), - SchemaField("age", "INTEGER", mode="REQUIRED"), - SchemaField( - "child", - "RECORD", - mode="REPEATED", - fields=[ - SchemaField("name", "STRING", mode="REQUIRED"), - SchemaField("age", "INTEGER", mode="REQUIRED"), - ], - ), - ] - rows = [ - { - "f": [ - {"v": "Bharney Rhubble"}, - {"v": "33"}, - { - "v": [ - {"v": {"f": [{"v": "Whamm-Whamm Rhubble"}, {"v": "3"}]}}, - {"v": {"f": [{"v": "Hoppy"}, {"v": "1"}]}}, - ] - }, - ] - }, - { - "f": [ - {"v": "Wylma Phlyntstone"}, - {"v": "29"}, - { - "v": [ - {"v": {"f": [{"v": "Bepples Phlyntstone"}, {"v": "0"}]}}, - {"v": {"f": [{"v": "Dino"}, {"v": "4"}]}}, - ] - }, - ] - }, - ] - path = "/foo" - api_request = mock.Mock(return_value={"rows": rows}) - row_iterator = self._make_one(_mock_client(), api_request, path, schema) - - tbl = row_iterator.to_arrow() - - self.assertIsInstance(tbl, pyarrow.Table) - self.assertEqual(tbl.num_rows, 2) - - # Check the schema. - self.assertEqual(tbl.schema[0].name, "name") - self.assertTrue(pyarrow.types.is_string(tbl.schema[0].type)) - self.assertEqual(tbl.schema[1].name, "age") - self.assertTrue(pyarrow.types.is_int64(tbl.schema[1].type)) - child_field = tbl.schema[2] - self.assertEqual(child_field.name, "child") - self.assertTrue(pyarrow.types.is_list(child_field.type)) - self.assertTrue(pyarrow.types.is_struct(child_field.type.value_type)) - self.assertEqual(child_field.type.value_type[0].name, "name") - self.assertEqual(child_field.type.value_type[1].name, "age") - - # Check the data. - tbl_data = tbl.to_pydict() - names = tbl_data["name"] - ages = tbl_data["age"] - children = tbl_data["child"] - self.assertEqual(names, ["Bharney Rhubble", "Wylma Phlyntstone"]) - self.assertEqual(ages, [33, 29]) - self.assertEqual( - children, - [ - [ - {"name": "Whamm-Whamm Rhubble", "age": 3}, - {"name": "Hoppy", "age": 1}, - ], - [{"name": "Bepples Phlyntstone", "age": 0}, {"name": "Dino", "age": 4}], - ], - ) - - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - def test_to_arrow_w_nulls(self): - from google.cloud.bigquery.schema import SchemaField - - schema = [SchemaField("name", "STRING"), SchemaField("age", "INTEGER")] - rows = [ - {"f": [{"v": "Donkey"}, {"v": 32}]}, - {"f": [{"v": "Diddy"}, {"v": 29}]}, - {"f": [{"v": "Dixie"}, {"v": None}]}, - {"f": [{"v": None}, {"v": 111}]}, - ] - path = "/foo" - api_request = mock.Mock(return_value={"rows": rows}) - row_iterator = self._make_one(_mock_client(), api_request, path, schema) - - tbl = row_iterator.to_arrow() - - self.assertIsInstance(tbl, pyarrow.Table) - self.assertEqual(tbl.num_rows, 4) - - # Check the schema. - self.assertEqual(tbl.schema[0].name, "name") - self.assertTrue(pyarrow.types.is_string(tbl.schema[0].type)) - self.assertEqual(tbl.schema[1].name, "age") - self.assertTrue(pyarrow.types.is_int64(tbl.schema[1].type)) - - # Check the data. - tbl_data = tbl.to_pydict() - names = tbl_data["name"] - ages = tbl_data["age"] - self.assertEqual(names, ["Donkey", "Diddy", "Dixie", None]) - self.assertEqual(ages, [32, 29, None, 111]) - - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - def test_to_arrow_w_unknown_type(self): - from google.cloud.bigquery.schema import SchemaField - - schema = [ - SchemaField("name", "STRING", mode="REQUIRED"), - SchemaField("age", "INTEGER", mode="REQUIRED"), - SchemaField("sport", "UNKNOWN_TYPE", mode="REQUIRED"), - ] - rows = [ - {"f": [{"v": "Bharney Rhubble"}, {"v": "33"}, {"v": "volleyball"}]}, - {"f": [{"v": "Wylma Phlyntstone"}, {"v": "29"}, {"v": "basketball"}]}, - ] - path = "/foo" - api_request = mock.Mock(return_value={"rows": rows}) - row_iterator = self._make_one(_mock_client(), api_request, path, schema) - - tbl = row_iterator.to_arrow() - - self.assertIsInstance(tbl, pyarrow.Table) - self.assertEqual(tbl.num_rows, 2) - - # Check the schema. - self.assertEqual(tbl.schema[0].name, "name") - self.assertTrue(pyarrow.types.is_string(tbl.schema[0].type)) - self.assertEqual(tbl.schema[1].name, "age") - self.assertTrue(pyarrow.types.is_int64(tbl.schema[1].type)) - self.assertEqual(tbl.schema[2].name, "sport") - - # Check the data. - tbl_data = tbl.to_pydict() - names = tbl_data["name"] - ages = tbl_data["age"] - sports = tbl_data["sport"] - self.assertEqual(names, ["Bharney Rhubble", "Wylma Phlyntstone"]) - self.assertEqual(ages, [33, 29]) - self.assertEqual(sports, ["volleyball", "basketball"]) - - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - def test_to_arrow_w_empty_table(self): - from google.cloud.bigquery.schema import SchemaField - - schema = [ - SchemaField("name", "STRING", mode="REQUIRED"), - SchemaField("age", "INTEGER", mode="REQUIRED"), - SchemaField( - "child", - "RECORD", - mode="REPEATED", - fields=[ - SchemaField("name", "STRING", mode="REQUIRED"), - SchemaField("age", "INTEGER", mode="REQUIRED"), - ], - ), - ] - rows = [] - path = "/foo" - api_request = mock.Mock(return_value={"rows": rows}) - row_iterator = self._make_one(_mock_client(), api_request, path, schema) - - tbl = row_iterator.to_arrow() - - self.assertIsInstance(tbl, pyarrow.Table) - self.assertEqual(tbl.num_rows, 0) - - # Check the schema. - self.assertEqual(tbl.schema[0].name, "name") - self.assertTrue(pyarrow.types.is_string(tbl.schema[0].type)) - self.assertEqual(tbl.schema[1].name, "age") - self.assertTrue(pyarrow.types.is_int64(tbl.schema[1].type)) - child_field = tbl.schema[2] - self.assertEqual(child_field.name, "child") - self.assertTrue(pyarrow.types.is_list(child_field.type)) - self.assertTrue(pyarrow.types.is_struct(child_field.type.value_type)) - self.assertEqual(child_field.type.value_type[0].name, "name") - self.assertEqual(child_field.type.value_type[1].name, "age") - - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - @unittest.skipIf( - bigquery_storage_v1beta1 is None, "Requires `google-cloud-bigquery-storage`" - ) - def test_to_arrow_max_results_w_create_bqstorage_warning(self): - from google.cloud.bigquery.schema import SchemaField - - schema = [ - SchemaField("name", "STRING", mode="REQUIRED"), - SchemaField("age", "INTEGER", mode="REQUIRED"), - ] - rows = [ - {"f": [{"v": "Phred Phlyntstone"}, {"v": "32"}]}, - {"f": [{"v": "Bharney Rhubble"}, {"v": "33"}]}, - ] - path = "/foo" - api_request = mock.Mock(return_value={"rows": rows}) - mock_client = _mock_client() - - row_iterator = self._make_one( - client=mock_client, - api_request=api_request, - path=path, - schema=schema, - max_results=42, - ) - - with warnings.catch_warnings(record=True) as warned: - row_iterator.to_arrow(create_bqstorage_client=True) - - matches = [ - warning - for warning in warned - if warning.category is UserWarning - and "cannot use bqstorage_client" in str(warning).lower() - and "tabledata.list" in str(warning) - ] - self.assertEqual(len(matches), 1, msg="User warning was not emitted.") - mock_client._create_bqstorage_client.assert_not_called() - - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - @unittest.skipIf( - bigquery_storage_v1beta1 is None, "Requires `google-cloud-bigquery-storage`" - ) - def test_to_arrow_w_bqstorage(self): - from google.cloud.bigquery import schema - from google.cloud.bigquery import table as mut - from google.cloud.bigquery_storage_v1beta1 import reader - - bqstorage_client = mock.create_autospec( - bigquery_storage_v1beta1.BigQueryStorageClient - ) - bqstorage_client.transport = mock.create_autospec( - big_query_storage_grpc_transport.BigQueryStorageGrpcTransport - ) - streams = [ - # Use two streams we want to check frames are read from each stream. - {"name": "/projects/proj/dataset/dset/tables/tbl/streams/1234"}, - {"name": "/projects/proj/dataset/dset/tables/tbl/streams/5678"}, - ] - session = bigquery_storage_v1beta1.types.ReadSession(streams=streams) - arrow_schema = pyarrow.schema( - [ - pyarrow.field("colA", pyarrow.int64()), - # Not alphabetical to test column order. - pyarrow.field("colC", pyarrow.float64()), - pyarrow.field("colB", pyarrow.string()), - ] - ) - session.arrow_schema.serialized_schema = arrow_schema.serialize().to_pybytes() - bqstorage_client.create_read_session.return_value = session - - mock_rowstream = mock.create_autospec(reader.ReadRowsStream) - bqstorage_client.read_rows.return_value = mock_rowstream - - mock_rows = mock.create_autospec(reader.ReadRowsIterable) - mock_rowstream.rows.return_value = mock_rows - expected_num_rows = 2 - expected_num_columns = 3 - page_items = [ - pyarrow.array([1, -1]), - pyarrow.array([2.0, 4.0]), - pyarrow.array(["abc", "def"]), - ] - - mock_page = mock.create_autospec(reader.ReadRowsPage) - mock_page.to_arrow.return_value = pyarrow.RecordBatch.from_arrays( - page_items, schema=arrow_schema - ) - mock_pages = (mock_page, mock_page, mock_page) - type(mock_rows).pages = mock.PropertyMock(return_value=mock_pages) - - schema = [ - schema.SchemaField("colA", "INTEGER"), - schema.SchemaField("colC", "FLOAT"), - schema.SchemaField("colB", "STRING"), - ] - - row_iterator = mut.RowIterator( - _mock_client(), - None, # api_request: ignored - None, # path: ignored - schema, - table=mut.TableReference.from_string("proj.dset.tbl"), - selected_fields=schema, - ) - - actual_tbl = row_iterator.to_arrow(bqstorage_client=bqstorage_client) - - # Are the columns in the expected order? - self.assertEqual(actual_tbl.num_columns, expected_num_columns) - self.assertEqual(actual_tbl.schema[0].name, "colA") - self.assertEqual(actual_tbl.schema[1].name, "colC") - self.assertEqual(actual_tbl.schema[2].name, "colB") - - # Have expected number of rows? - total_pages = len(streams) * len(mock_pages) - total_rows = expected_num_rows * total_pages - self.assertEqual(actual_tbl.num_rows, total_rows) - - # Don't close the client if it was passed in. - bqstorage_client.transport.channel.close.assert_not_called() - - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - @unittest.skipIf( - bigquery_storage_v1beta1 is None, "Requires `google-cloud-bigquery-storage`" - ) - def test_to_arrow_w_bqstorage_creates_client(self): - from google.cloud.bigquery import schema - from google.cloud.bigquery import table as mut - - mock_client = _mock_client() - bqstorage_client = mock.create_autospec( - bigquery_storage_v1beta1.BigQueryStorageClient - ) - bqstorage_client.transport = mock.create_autospec( - big_query_storage_grpc_transport.BigQueryStorageGrpcTransport - ) - mock_client._create_bqstorage_client.return_value = bqstorage_client - session = bigquery_storage_v1beta1.types.ReadSession() - bqstorage_client.create_read_session.return_value = session - row_iterator = mut.RowIterator( - mock_client, - None, # api_request: ignored - None, # path: ignored - [ - schema.SchemaField("colA", "STRING"), - schema.SchemaField("colC", "STRING"), - schema.SchemaField("colB", "STRING"), - ], - table=mut.TableReference.from_string("proj.dset.tbl"), - ) - row_iterator.to_arrow(create_bqstorage_client=True) - mock_client._create_bqstorage_client.assert_called_once() - bqstorage_client.transport.channel.close.assert_called_once() - - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - @unittest.skipIf( - bigquery_storage_v1beta1 is None, "Requires `google-cloud-bigquery-storage`" - ) - def test_to_arrow_w_bqstorage_no_streams(self): - from google.cloud.bigquery import schema - from google.cloud.bigquery import table as mut - - bqstorage_client = mock.create_autospec( - bigquery_storage_v1beta1.BigQueryStorageClient - ) - session = bigquery_storage_v1beta1.types.ReadSession() - arrow_schema = pyarrow.schema( - [ - pyarrow.field("colA", pyarrow.string()), - # Not alphabetical to test column order. - pyarrow.field("colC", pyarrow.string()), - pyarrow.field("colB", pyarrow.string()), - ] - ) - session.arrow_schema.serialized_schema = arrow_schema.serialize().to_pybytes() - bqstorage_client.create_read_session.return_value = session - - row_iterator = mut.RowIterator( - _mock_client(), - None, # api_request: ignored - None, # path: ignored - [ - schema.SchemaField("colA", "STRING"), - schema.SchemaField("colC", "STRING"), - schema.SchemaField("colB", "STRING"), - ], - table=mut.TableReference.from_string("proj.dset.tbl"), - ) - - actual_table = row_iterator.to_arrow(bqstorage_client=bqstorage_client) - self.assertEqual(actual_table.num_columns, 3) - self.assertEqual(actual_table.num_rows, 0) - self.assertEqual(actual_table.schema[0].name, "colA") - self.assertEqual(actual_table.schema[1].name, "colC") - self.assertEqual(actual_table.schema[2].name, "colB") - - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - @unittest.skipIf(tqdm is None, "Requires `tqdm`") - @mock.patch("tqdm.tqdm_gui") - @mock.patch("tqdm.tqdm_notebook") - @mock.patch("tqdm.tqdm") - def test_to_arrow_progress_bar(self, tqdm_mock, tqdm_notebook_mock, tqdm_gui_mock): - from google.cloud.bigquery.schema import SchemaField - - schema = [ - SchemaField("name", "STRING", mode="REQUIRED"), - SchemaField("age", "INTEGER", mode="REQUIRED"), - ] - rows = [ - {"f": [{"v": "Phred Phlyntstone"}, {"v": "32"}]}, - {"f": [{"v": "Bharney Rhubble"}, {"v": "33"}]}, - {"f": [{"v": "Wylma Phlyntstone"}, {"v": "29"}]}, - {"f": [{"v": "Bhettye Rhubble"}, {"v": "27"}]}, - ] - path = "/foo" - api_request = mock.Mock(return_value={"rows": rows}) - - progress_bars = ( - ("tqdm", tqdm_mock), - ("tqdm_notebook", tqdm_notebook_mock), - ("tqdm_gui", tqdm_gui_mock), - ) - - for progress_bar_type, progress_bar_mock in progress_bars: - row_iterator = self._make_one(_mock_client(), api_request, path, schema) - tbl = row_iterator.to_arrow(progress_bar_type=progress_bar_type) - - progress_bar_mock.assert_called() - progress_bar_mock().update.assert_called() - progress_bar_mock().close.assert_called_once() - self.assertEqual(tbl.num_rows, 4) - - @mock.patch("google.cloud.bigquery.table.pyarrow", new=None) - def test_to_arrow_w_pyarrow_none(self): - schema = [] - rows = [] - path = "/foo" - api_request = mock.Mock(return_value={"rows": rows}) - row_iterator = self._make_one(_mock_client(), api_request, path, schema) - - with self.assertRaises(ValueError): - row_iterator.to_arrow() - - @unittest.skipIf(pandas is None, "Requires `pandas`") - def test_to_dataframe_iterable(self): - from google.cloud.bigquery.schema import SchemaField - import types - - schema = [ - SchemaField("name", "STRING", mode="REQUIRED"), - SchemaField("age", "INTEGER", mode="REQUIRED"), - ] - - path = "/foo" - api_request = mock.Mock( - side_effect=[ - { - "rows": [{"f": [{"v": "Bengt"}, {"v": "32"}]}], - "pageToken": "NEXTPAGE", - }, - {"rows": [{"f": [{"v": "Sven"}, {"v": "33"}]}]}, - ] - ) - - row_iterator = self._make_one( - _mock_client(), api_request, path, schema, page_size=1, max_results=5 - ) - dfs = row_iterator.to_dataframe_iterable() - - self.assertIsInstance(dfs, types.GeneratorType) - - df_1 = next(dfs) - self.assertIsInstance(df_1, pandas.DataFrame) - self.assertEqual(df_1.name.dtype.name, "object") - self.assertEqual(df_1.age.dtype.name, "int64") - self.assertEqual(len(df_1), 1) # verify the number of rows - self.assertEqual( - df_1["name"][0], "Bengt" - ) # verify the first value of 'name' column - self.assertEqual(df_1["age"][0], 32) # verify the first value of 'age' column - - df_2 = next(dfs) - self.assertEqual(len(df_2), 1) # verify the number of rows - self.assertEqual(df_2["name"][0], "Sven") - self.assertEqual(df_2["age"][0], 33) - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf( - bigquery_storage_v1beta1 is None, "Requires `google-cloud-bigquery-storage`" - ) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - def test_to_dataframe_iterable_w_bqstorage(self): - from google.cloud.bigquery import schema - from google.cloud.bigquery import table as mut - from google.cloud.bigquery_storage_v1beta1 import reader - - arrow_fields = [ - pyarrow.field("colA", pyarrow.int64()), - # Not alphabetical to test column order. - pyarrow.field("colC", pyarrow.float64()), - pyarrow.field("colB", pyarrow.utf8()), - ] - arrow_schema = pyarrow.schema(arrow_fields) - - bqstorage_client = mock.create_autospec( - bigquery_storage_v1beta1.BigQueryStorageClient - ) - bqstorage_client.transport = mock.create_autospec( - big_query_storage_grpc_transport.BigQueryStorageGrpcTransport - ) - streams = [ - # Use two streams we want to check frames are read from each stream. - {"name": "/projects/proj/dataset/dset/tables/tbl/streams/1234"}, - {"name": "/projects/proj/dataset/dset/tables/tbl/streams/5678"}, - ] - session = bigquery_storage_v1beta1.types.ReadSession( - streams=streams, - arrow_schema={"serialized_schema": arrow_schema.serialize().to_pybytes()}, - ) - bqstorage_client.create_read_session.return_value = session - - mock_rowstream = mock.create_autospec(reader.ReadRowsStream) - bqstorage_client.read_rows.return_value = mock_rowstream - - mock_rows = mock.create_autospec(reader.ReadRowsIterable) - mock_rowstream.rows.return_value = mock_rows - page_dataframe = pandas.DataFrame( - {"colA": [1, -1], "colC": [2.0, 4.0], "colB": ["abc", "def"]}, - ) - mock_page = mock.create_autospec(reader.ReadRowsPage) - mock_page.to_dataframe.return_value = page_dataframe - mock_pages = (mock_page, mock_page, mock_page) - type(mock_rows).pages = mock.PropertyMock(return_value=mock_pages) - - schema = [ - schema.SchemaField("colA", "IGNORED"), - schema.SchemaField("colC", "IGNORED"), - schema.SchemaField("colB", "IGNORED"), - ] - - row_iterator = mut.RowIterator( - _mock_client(), - None, # api_request: ignored - None, # path: ignored - schema, - table=mut.TableReference.from_string("proj.dset.tbl"), - selected_fields=schema, - ) - - got = list( - row_iterator.to_dataframe_iterable(bqstorage_client=bqstorage_client) - ) - - # Have expected number of rows? - total_pages = len(streams) * len(mock_pages) - self.assertEqual(len(got), total_pages) - - # Don't close the client if it was passed in. - bqstorage_client.transport.channel.close.assert_not_called() - - @mock.patch("google.cloud.bigquery.table.pandas", new=None) - def test_to_dataframe_iterable_error_if_pandas_is_none(self): - from google.cloud.bigquery.schema import SchemaField - - schema = [ - SchemaField("name", "STRING", mode="REQUIRED"), - SchemaField("age", "INTEGER", mode="REQUIRED"), - ] - rows = [ - {"f": [{"v": "Phred Phlyntstone"}, {"v": "32"}]}, - {"f": [{"v": "Bharney Rhubble"}, {"v": "33"}]}, - ] - path = "/foo" - api_request = mock.Mock(return_value={"rows": rows}) - row_iterator = self._make_one(_mock_client(), api_request, path, schema) - - with pytest.raises(ValueError, match="pandas"): - row_iterator.to_dataframe_iterable() - - @unittest.skipIf(pandas is None, "Requires `pandas`") - def test_to_dataframe(self): - from google.cloud.bigquery.schema import SchemaField - - schema = [ - SchemaField("name", "STRING", mode="REQUIRED"), - SchemaField("age", "INTEGER", mode="REQUIRED"), - ] - rows = [ - {"f": [{"v": "Phred Phlyntstone"}, {"v": "32"}]}, - {"f": [{"v": "Bharney Rhubble"}, {"v": "33"}]}, - {"f": [{"v": "Wylma Phlyntstone"}, {"v": "29"}]}, - {"f": [{"v": "Bhettye Rhubble"}, {"v": "27"}]}, - ] - path = "/foo" - api_request = mock.Mock(return_value={"rows": rows}) - row_iterator = self._make_one(_mock_client(), api_request, path, schema) - - df = row_iterator.to_dataframe() - - self.assertIsInstance(df, pandas.DataFrame) - self.assertEqual(len(df), 4) # verify the number of rows - self.assertEqual(list(df), ["name", "age"]) # verify the column names - self.assertEqual(df.name.dtype.name, "object") - self.assertEqual(df.age.dtype.name, "int64") - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(tqdm is None, "Requires `tqdm`") - @mock.patch("tqdm.tqdm_gui") - @mock.patch("tqdm.tqdm_notebook") - @mock.patch("tqdm.tqdm") - def test_to_dataframe_progress_bar( - self, tqdm_mock, tqdm_notebook_mock, tqdm_gui_mock - ): - from google.cloud.bigquery.schema import SchemaField - - schema = [ - SchemaField("name", "STRING", mode="REQUIRED"), - SchemaField("age", "INTEGER", mode="REQUIRED"), - ] - rows = [ - {"f": [{"v": "Phred Phlyntstone"}, {"v": "32"}]}, - {"f": [{"v": "Bharney Rhubble"}, {"v": "33"}]}, - {"f": [{"v": "Wylma Phlyntstone"}, {"v": "29"}]}, - {"f": [{"v": "Bhettye Rhubble"}, {"v": "27"}]}, - ] - path = "/foo" - api_request = mock.Mock(return_value={"rows": rows}) - - progress_bars = ( - ("tqdm", tqdm_mock), - ("tqdm_notebook", tqdm_notebook_mock), - ("tqdm_gui", tqdm_gui_mock), - ) - - for progress_bar_type, progress_bar_mock in progress_bars: - row_iterator = self._make_one(_mock_client(), api_request, path, schema) - df = row_iterator.to_dataframe(progress_bar_type=progress_bar_type) - - progress_bar_mock.assert_called() - progress_bar_mock().update.assert_called() - progress_bar_mock().close.assert_called_once() - self.assertEqual(len(df), 4) - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(tqdm is None, "Requires `tqdm`") - @mock.patch("tqdm.tqdm_gui") - @mock.patch("tqdm.tqdm_notebook") - @mock.patch("tqdm.tqdm") - def test_to_dataframe_progress_bar_wo_pyarrow( - self, tqdm_mock, tqdm_notebook_mock, tqdm_gui_mock - ): - from google.cloud.bigquery.schema import SchemaField - - schema = [ - SchemaField("name", "STRING", mode="REQUIRED"), - SchemaField("age", "INTEGER", mode="REQUIRED"), - ] - rows = [ - {"f": [{"v": "Phred Phlyntstone"}, {"v": "32"}]}, - {"f": [{"v": "Bharney Rhubble"}, {"v": "33"}]}, - {"f": [{"v": "Wylma Phlyntstone"}, {"v": "29"}]}, - {"f": [{"v": "Bhettye Rhubble"}, {"v": "27"}]}, - ] - path = "/foo" - api_request = mock.Mock(return_value={"rows": rows}) - - progress_bars = ( - ("tqdm", tqdm_mock), - ("tqdm_notebook", tqdm_notebook_mock), - ("tqdm_gui", tqdm_gui_mock), - ) - - for progress_bar_type, progress_bar_mock in progress_bars: - row_iterator = self._make_one(_mock_client(), api_request, path, schema) - with mock.patch("google.cloud.bigquery.table.pyarrow", None): - df = row_iterator.to_dataframe(progress_bar_type=progress_bar_type) - - progress_bar_mock.assert_called() - progress_bar_mock().update.assert_called() - progress_bar_mock().close.assert_called_once() - self.assertEqual(len(df), 4) - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @mock.patch("google.cloud.bigquery.table.tqdm", new=None) - def test_to_dataframe_no_tqdm_no_progress_bar(self): - from google.cloud.bigquery.schema import SchemaField - - schema = [ - SchemaField("name", "STRING", mode="REQUIRED"), - SchemaField("age", "INTEGER", mode="REQUIRED"), - ] - rows = [ - {"f": [{"v": "Phred Phlyntstone"}, {"v": "32"}]}, - {"f": [{"v": "Bharney Rhubble"}, {"v": "33"}]}, - {"f": [{"v": "Wylma Phlyntstone"}, {"v": "29"}]}, - {"f": [{"v": "Bhettye Rhubble"}, {"v": "27"}]}, - ] - path = "/foo" - api_request = mock.Mock(return_value={"rows": rows}) - row_iterator = self._make_one(_mock_client(), api_request, path, schema) - - with warnings.catch_warnings(record=True) as warned: - df = row_iterator.to_dataframe() - - self.assertEqual(len(warned), 0) - self.assertEqual(len(df), 4) - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @mock.patch("google.cloud.bigquery.table.tqdm", new=None) - def test_to_dataframe_no_tqdm(self): - from google.cloud.bigquery.schema import SchemaField - - schema = [ - SchemaField("name", "STRING", mode="REQUIRED"), - SchemaField("age", "INTEGER", mode="REQUIRED"), - ] - rows = [ - {"f": [{"v": "Phred Phlyntstone"}, {"v": "32"}]}, - {"f": [{"v": "Bharney Rhubble"}, {"v": "33"}]}, - {"f": [{"v": "Wylma Phlyntstone"}, {"v": "29"}]}, - {"f": [{"v": "Bhettye Rhubble"}, {"v": "27"}]}, - ] - path = "/foo" - api_request = mock.Mock(return_value={"rows": rows}) - row_iterator = self._make_one(_mock_client(), api_request, path, schema) - - with warnings.catch_warnings(record=True) as warned: - df = row_iterator.to_dataframe(progress_bar_type="tqdm") - - self.assertEqual(len(warned), 1) - for warning in warned: - self.assertIs(warning.category, UserWarning) - - # Even though the progress bar won't show, downloading the dataframe - # should still work. - self.assertEqual(len(df), 4) - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(tqdm is None, "Requires `tqdm`") - @mock.patch("tqdm.tqdm_gui", new=None) # will raise TypeError on call - @mock.patch("tqdm.tqdm_notebook", new=None) # will raise TypeError on call - @mock.patch("tqdm.tqdm", new=None) # will raise TypeError on call - def test_to_dataframe_tqdm_error(self): - from google.cloud.bigquery.schema import SchemaField - - schema = [ - SchemaField("name", "STRING", mode="REQUIRED"), - SchemaField("age", "INTEGER", mode="REQUIRED"), - ] - rows = [ - {"f": [{"v": "Phred Phlyntstone"}, {"v": "32"}]}, - {"f": [{"v": "Bharney Rhubble"}, {"v": "33"}]}, - {"f": [{"v": "Wylma Phlyntstone"}, {"v": "29"}]}, - {"f": [{"v": "Bhettye Rhubble"}, {"v": "27"}]}, - ] - path = "/foo" - - for progress_bar_type in ("tqdm", "tqdm_notebook", "tqdm_gui"): - api_request = mock.Mock(return_value={"rows": rows}) - row_iterator = self._make_one(_mock_client(), api_request, path, schema) - - with warnings.catch_warnings(record=True) as warned: - df = row_iterator.to_dataframe(progress_bar_type=progress_bar_type) - - self.assertEqual(len(df), 4) # all should be well - - # Warn that a progress bar was requested, but creating the tqdm - # progress bar failed. - for warning in warned: - self.assertIs(warning.category, UserWarning) - - @unittest.skipIf(pandas is None, "Requires `pandas`") - def test_to_dataframe_w_empty_results(self): - from google.cloud.bigquery.schema import SchemaField - - schema = [ - SchemaField("name", "STRING", mode="REQUIRED"), - SchemaField("age", "INTEGER", mode="REQUIRED"), - ] - api_request = mock.Mock(return_value={"rows": []}) - row_iterator = self._make_one(_mock_client(), api_request, schema=schema) - - df = row_iterator.to_dataframe() - - self.assertIsInstance(df, pandas.DataFrame) - self.assertEqual(len(df), 0) # verify the number of rows - self.assertEqual(list(df), ["name", "age"]) # verify the column names - - @unittest.skipIf(pandas is None, "Requires `pandas`") - def test_to_dataframe_w_empty_results_wo_pyarrow(self): - from google.cloud.bigquery.schema import SchemaField - - with mock.patch("google.cloud.bigquery.table.pyarrow", None): - schema = [ - SchemaField("name", "STRING", mode="REQUIRED"), - SchemaField("age", "INTEGER", mode="REQUIRED"), - ] - api_request = mock.Mock(return_value={"rows": []}) - row_iterator = self._make_one(_mock_client(), api_request, schema=schema) - - df = row_iterator.to_dataframe() - - self.assertIsInstance(df, pandas.DataFrame) - self.assertEqual(len(df), 0) # verify the number of rows - self.assertEqual(list(df), ["name", "age"]) # verify the column names - - @unittest.skipIf(pandas is None, "Requires `pandas`") - def test_to_dataframe_w_no_results_wo_pyarrow(self): - from google.cloud.bigquery.schema import SchemaField - - with mock.patch("google.cloud.bigquery.table.pyarrow", None): - schema = [ - SchemaField("name", "STRING", mode="REQUIRED"), - SchemaField("age", "INTEGER", mode="REQUIRED"), - ] - api_request = mock.Mock(return_value={"rows": []}) - row_iterator = self._make_one(_mock_client(), api_request, schema=schema) - - def empty_iterable(dtypes=None): - return [] - - row_iterator.to_dataframe_iterable = empty_iterable - - df = row_iterator.to_dataframe() - - self.assertIsInstance(df, pandas.DataFrame) - self.assertEqual(len(df), 0) # verify the number of rows - self.assertEqual(list(df), ["name", "age"]) # verify the column names - - @unittest.skipIf(pandas is None, "Requires `pandas`") - def test_to_dataframe_logs_tabledata_list(self): - from google.cloud.bigquery.table import Table - - mock_logger = mock.create_autospec(logging.Logger) - api_request = mock.Mock(return_value={"rows": []}) - row_iterator = self._make_one( - _mock_client(), api_request, table=Table("debug-proj.debug_dset.debug_tbl") - ) - - with mock.patch("google.cloud.bigquery.table._LOGGER", mock_logger): - row_iterator.to_dataframe() - - mock_logger.debug.assert_any_call( - "Started reading table 'debug-proj.debug_dset.debug_tbl' with tabledata.list." - ) - - @unittest.skipIf(pandas is None, "Requires `pandas`") - def test_to_dataframe_w_various_types_nullable(self): - import datetime - from google.cloud.bigquery.schema import SchemaField - - schema = [ - SchemaField("start_timestamp", "TIMESTAMP"), - SchemaField("seconds", "INT64"), - SchemaField("miles", "FLOAT64"), - SchemaField("payment_type", "STRING"), - SchemaField("complete", "BOOL"), - SchemaField("date", "DATE"), - ] - row_data = [ - [None, None, None, None, None, None], - ["1.4338368E9", "420", "1.1", u"Cash", "true", "1999-12-01"], - ["1.3878117E9", "2580", "17.7", u"Cash", "false", "1953-06-14"], - ["1.3855653E9", "2280", "4.4", u"Credit", "true", "1981-11-04"], - ] - rows = [{"f": [{"v": field} for field in row]} for row in row_data] - path = "/foo" - api_request = mock.Mock(return_value={"rows": rows}) - row_iterator = self._make_one(_mock_client(), api_request, path, schema) - - df = row_iterator.to_dataframe() - - self.assertIsInstance(df, pandas.DataFrame) - self.assertEqual(len(df), 4) # verify the number of rows - exp_columns = [field.name for field in schema] - self.assertEqual(list(df), exp_columns) # verify the column names - - for index, row in df.iterrows(): - if index == 0: - self.assertTrue(row.isnull().all()) - else: - self.assertIsInstance(row.start_timestamp, pandas.Timestamp) - self.assertIsInstance(row.seconds, float) - self.assertIsInstance(row.payment_type, six.string_types) - self.assertIsInstance(row.complete, bool) - self.assertIsInstance(row.date, datetime.date) - - @unittest.skipIf(pandas is None, "Requires `pandas`") - def test_to_dataframe_column_dtypes(self): - from google.cloud.bigquery.schema import SchemaField - - schema = [ - SchemaField("start_timestamp", "TIMESTAMP"), - SchemaField("seconds", "INT64"), - SchemaField("miles", "FLOAT64"), - SchemaField("km", "FLOAT64"), - SchemaField("payment_type", "STRING"), - SchemaField("complete", "BOOL"), - SchemaField("date", "DATE"), - ] - row_data = [ - ["1.4338368E9", "420", "1.1", "1.77", u"Cash", "true", "1999-12-01"], - ["1.3878117E9", "2580", "17.7", "28.5", u"Cash", "false", "1953-06-14"], - ["1.3855653E9", "2280", "4.4", "7.1", u"Credit", "true", "1981-11-04"], - ] - rows = [{"f": [{"v": field} for field in row]} for row in row_data] - path = "/foo" - api_request = mock.Mock(return_value={"rows": rows}) - row_iterator = self._make_one(_mock_client(), api_request, path, schema) - - df = row_iterator.to_dataframe(dtypes={"km": "float16"}) - - self.assertIsInstance(df, pandas.DataFrame) - self.assertEqual(len(df), 3) # verify the number of rows - exp_columns = [field.name for field in schema] - self.assertEqual(list(df), exp_columns) # verify the column names - - self.assertEqual(df.start_timestamp.dtype.name, "datetime64[ns, UTC]") - self.assertEqual(df.seconds.dtype.name, "int64") - self.assertEqual(df.miles.dtype.name, "float64") - self.assertEqual(df.km.dtype.name, "float16") - self.assertEqual(df.payment_type.dtype.name, "object") - self.assertEqual(df.complete.dtype.name, "bool") - self.assertEqual(df.date.dtype.name, "object") - - @mock.patch("google.cloud.bigquery.table.pandas", new=None) - def test_to_dataframe_error_if_pandas_is_none(self): - from google.cloud.bigquery.schema import SchemaField - - schema = [ - SchemaField("name", "STRING", mode="REQUIRED"), - SchemaField("age", "INTEGER", mode="REQUIRED"), - ] - rows = [ - {"f": [{"v": "Phred Phlyntstone"}, {"v": "32"}]}, - {"f": [{"v": "Bharney Rhubble"}, {"v": "33"}]}, - ] - path = "/foo" - api_request = mock.Mock(return_value={"rows": rows}) - row_iterator = self._make_one(_mock_client(), api_request, path, schema) - - with self.assertRaises(ValueError): - row_iterator.to_dataframe() - - @unittest.skipIf(pandas is None, "Requires `pandas`") - def test_to_dataframe_max_results_w_bqstorage_warning(self): - from google.cloud.bigquery.schema import SchemaField - - schema = [ - SchemaField("name", "STRING", mode="REQUIRED"), - SchemaField("age", "INTEGER", mode="REQUIRED"), - ] - rows = [ - {"f": [{"v": "Phred Phlyntstone"}, {"v": "32"}]}, - {"f": [{"v": "Bharney Rhubble"}, {"v": "33"}]}, - ] - path = "/foo" - api_request = mock.Mock(return_value={"rows": rows}) - bqstorage_client = mock.Mock() - - row_iterator = self._make_one( - client=_mock_client(), - api_request=api_request, - path=path, - schema=schema, - max_results=42, - ) - - with warnings.catch_warnings(record=True) as warned: - row_iterator.to_dataframe(bqstorage_client=bqstorage_client) - - matches = [ - warning - for warning in warned - if warning.category is UserWarning - and "cannot use bqstorage_client" in str(warning).lower() - and "tabledata.list" in str(warning) - ] - self.assertEqual(len(matches), 1, msg="User warning was not emitted.") - - @unittest.skipIf(pandas is None, "Requires `pandas`") - def test_to_dataframe_max_results_w_create_bqstorage_warning(self): - from google.cloud.bigquery.schema import SchemaField - - schema = [ - SchemaField("name", "STRING", mode="REQUIRED"), - SchemaField("age", "INTEGER", mode="REQUIRED"), - ] - rows = [ - {"f": [{"v": "Phred Phlyntstone"}, {"v": "32"}]}, - {"f": [{"v": "Bharney Rhubble"}, {"v": "33"}]}, - ] - path = "/foo" - api_request = mock.Mock(return_value={"rows": rows}) - mock_client = _mock_client() - - row_iterator = self._make_one( - client=mock_client, - api_request=api_request, - path=path, - schema=schema, - max_results=42, - ) - - with warnings.catch_warnings(record=True) as warned: - row_iterator.to_dataframe(create_bqstorage_client=True) - - matches = [ - warning - for warning in warned - if warning.category is UserWarning - and "cannot use bqstorage_client" in str(warning).lower() - and "tabledata.list" in str(warning) - ] - self.assertEqual(len(matches), 1, msg="User warning was not emitted.") - mock_client._create_bqstorage_client.assert_not_called() - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf( - bigquery_storage_v1beta1 is None, "Requires `google-cloud-bigquery-storage`" - ) - def test_to_dataframe_w_bqstorage_creates_client(self): - from google.cloud.bigquery import schema - from google.cloud.bigquery import table as mut - - mock_client = _mock_client() - bqstorage_client = mock.create_autospec( - bigquery_storage_v1beta1.BigQueryStorageClient - ) - bqstorage_client.transport = mock.create_autospec( - big_query_storage_grpc_transport.BigQueryStorageGrpcTransport - ) - mock_client._create_bqstorage_client.return_value = bqstorage_client - session = bigquery_storage_v1beta1.types.ReadSession() - bqstorage_client.create_read_session.return_value = session - row_iterator = mut.RowIterator( - mock_client, - None, # api_request: ignored - None, # path: ignored - [ - schema.SchemaField("colA", "STRING"), - schema.SchemaField("colC", "STRING"), - schema.SchemaField("colB", "STRING"), - ], - table=mut.TableReference.from_string("proj.dset.tbl"), - ) - row_iterator.to_dataframe(create_bqstorage_client=True) - mock_client._create_bqstorage_client.assert_called_once() - bqstorage_client.transport.channel.close.assert_called_once() - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf( - bigquery_storage_v1beta1 is None, "Requires `google-cloud-bigquery-storage`" - ) - def test_to_dataframe_w_bqstorage_no_streams(self): - from google.cloud.bigquery import schema - from google.cloud.bigquery import table as mut - - bqstorage_client = mock.create_autospec( - bigquery_storage_v1beta1.BigQueryStorageClient - ) - session = bigquery_storage_v1beta1.types.ReadSession() - bqstorage_client.create_read_session.return_value = session - - row_iterator = mut.RowIterator( - _mock_client(), - api_request=None, - path=None, - schema=[ - schema.SchemaField("colA", "INTEGER"), - schema.SchemaField("colC", "FLOAT"), - schema.SchemaField("colB", "STRING"), - ], - table=mut.TableReference.from_string("proj.dset.tbl"), - ) - - got = row_iterator.to_dataframe(bqstorage_client) - column_names = ["colA", "colC", "colB"] - self.assertEqual(list(got), column_names) - self.assertTrue(got.empty) - - @unittest.skipIf( - bigquery_storage_v1beta1 is None, "Requires `google-cloud-bigquery-storage`" - ) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - def test_to_dataframe_w_bqstorage_logs_session(self): - from google.cloud.bigquery.table import Table - - bqstorage_client = mock.create_autospec( - bigquery_storage_v1beta1.BigQueryStorageClient - ) - session = bigquery_storage_v1beta1.types.ReadSession() - session.name = "projects/test-proj/locations/us/sessions/SOMESESSION" - bqstorage_client.create_read_session.return_value = session - mock_logger = mock.create_autospec(logging.Logger) - row_iterator = self._make_one( - _mock_client(), table=Table("debug-proj.debug_dset.debug_tbl") - ) - - with mock.patch("google.cloud.bigquery._pandas_helpers._LOGGER", mock_logger): - row_iterator.to_dataframe(bqstorage_client=bqstorage_client) - - mock_logger.debug.assert_any_call( - "Started reading table 'debug-proj.debug_dset.debug_tbl' " - "with BQ Storage API session 'projects/test-proj/locations/us/sessions/SOMESESSION'." - ) - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf( - bigquery_storage_v1beta1 is None, "Requires `google-cloud-bigquery-storage`" - ) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - def test_to_dataframe_w_bqstorage_empty_streams(self): - from google.cloud.bigquery import schema - from google.cloud.bigquery import table as mut - from google.cloud.bigquery_storage_v1beta1 import reader - - arrow_fields = [ - pyarrow.field("colA", pyarrow.int64()), - # Not alphabetical to test column order. - pyarrow.field("colC", pyarrow.float64()), - pyarrow.field("colB", pyarrow.utf8()), - ] - arrow_schema = pyarrow.schema(arrow_fields) - - bqstorage_client = mock.create_autospec( - bigquery_storage_v1beta1.BigQueryStorageClient - ) - session = bigquery_storage_v1beta1.types.ReadSession( - streams=[{"name": "/projects/proj/dataset/dset/tables/tbl/streams/1234"}], - arrow_schema={"serialized_schema": arrow_schema.serialize().to_pybytes()}, - ) - bqstorage_client.create_read_session.return_value = session - - mock_rowstream = mock.create_autospec(reader.ReadRowsStream) - bqstorage_client.read_rows.return_value = mock_rowstream - - mock_rows = mock.create_autospec(reader.ReadRowsIterable) - mock_rowstream.rows.return_value = mock_rows - mock_pages = mock.PropertyMock(return_value=()) - type(mock_rows).pages = mock_pages - - # Schema is required when there are no record batches in the stream. - schema = [ - schema.SchemaField("colA", "INTEGER"), - schema.SchemaField("colC", "FLOAT"), - schema.SchemaField("colB", "STRING"), - ] - - row_iterator = mut.RowIterator( - _mock_client(), - None, # api_request: ignored - None, # path: ignored - schema, - table=mut.TableReference.from_string("proj.dset.tbl"), - selected_fields=schema, - ) - - got = row_iterator.to_dataframe(bqstorage_client) - - column_names = ["colA", "colC", "colB"] - self.assertEqual(list(got), column_names) - self.assertTrue(got.empty) - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf( - bigquery_storage_v1beta1 is None, "Requires `google-cloud-bigquery-storage`" - ) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - def test_to_dataframe_w_bqstorage_nonempty(self): - from google.cloud.bigquery import schema - from google.cloud.bigquery import table as mut - from google.cloud.bigquery_storage_v1beta1 import reader - - arrow_fields = [ - pyarrow.field("colA", pyarrow.int64()), - # Not alphabetical to test column order. - pyarrow.field("colC", pyarrow.float64()), - pyarrow.field("colB", pyarrow.utf8()), - ] - arrow_schema = pyarrow.schema(arrow_fields) - - bqstorage_client = mock.create_autospec( - bigquery_storage_v1beta1.BigQueryStorageClient - ) - bqstorage_client.transport = mock.create_autospec( - big_query_storage_grpc_transport.BigQueryStorageGrpcTransport - ) - streams = [ - # Use two streams we want to check frames are read from each stream. - {"name": "/projects/proj/dataset/dset/tables/tbl/streams/1234"}, - {"name": "/projects/proj/dataset/dset/tables/tbl/streams/5678"}, - ] - session = bigquery_storage_v1beta1.types.ReadSession( - streams=streams, - arrow_schema={"serialized_schema": arrow_schema.serialize().to_pybytes()}, - ) - bqstorage_client.create_read_session.return_value = session - - mock_rowstream = mock.create_autospec(reader.ReadRowsStream) - bqstorage_client.read_rows.return_value = mock_rowstream - - mock_rows = mock.create_autospec(reader.ReadRowsIterable) - mock_rowstream.rows.return_value = mock_rows - page_items = [ - pyarrow.array([1, -1]), - pyarrow.array([2.0, 4.0]), - pyarrow.array(["abc", "def"]), - ] - page_record_batch = pyarrow.RecordBatch.from_arrays( - page_items, schema=arrow_schema - ) - mock_page = mock.create_autospec(reader.ReadRowsPage) - mock_page.to_arrow.return_value = page_record_batch - mock_pages = (mock_page, mock_page, mock_page) - type(mock_rows).pages = mock.PropertyMock(return_value=mock_pages) - - schema = [ - schema.SchemaField("colA", "IGNORED"), - schema.SchemaField("colC", "IGNORED"), - schema.SchemaField("colB", "IGNORED"), - ] - - row_iterator = mut.RowIterator( - _mock_client(), - None, # api_request: ignored - None, # path: ignored - schema, - table=mut.TableReference.from_string("proj.dset.tbl"), - selected_fields=schema, - ) - - got = row_iterator.to_dataframe(bqstorage_client=bqstorage_client) - - # Are the columns in the expected order? - column_names = ["colA", "colC", "colB"] - self.assertEqual(list(got), column_names) - - # Have expected number of rows? - total_pages = len(streams) * len(mock_pages) - total_rows = len(page_items[0]) * total_pages - self.assertEqual(len(got.index), total_rows) - - # Don't close the client if it was passed in. - bqstorage_client.transport.channel.close.assert_not_called() - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf( - bigquery_storage_v1beta1 is None, "Requires `google-cloud-bigquery-storage`" - ) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - def test_to_dataframe_w_bqstorage_multiple_streams_return_unique_index(self): - from google.cloud.bigquery import schema - from google.cloud.bigquery import table as mut - from google.cloud.bigquery_storage_v1beta1 import reader - - arrow_fields = [pyarrow.field("colA", pyarrow.int64())] - arrow_schema = pyarrow.schema(arrow_fields) - - streams = [ - {"name": "/projects/proj/dataset/dset/tables/tbl/streams/1234"}, - {"name": "/projects/proj/dataset/dset/tables/tbl/streams/5678"}, - ] - session = bigquery_storage_v1beta1.types.ReadSession( - streams=streams, - arrow_schema={"serialized_schema": arrow_schema.serialize().to_pybytes()}, - ) - - bqstorage_client = mock.create_autospec( - bigquery_storage_v1beta1.BigQueryStorageClient - ) - bqstorage_client.create_read_session.return_value = session - - mock_rowstream = mock.create_autospec(reader.ReadRowsStream) - bqstorage_client.read_rows.return_value = mock_rowstream - - mock_rows = mock.create_autospec(reader.ReadRowsIterable) - mock_rowstream.rows.return_value = mock_rows - - page_items = [ - pyarrow.array([1, -1]), - ] - page_record_batch = pyarrow.RecordBatch.from_arrays( - page_items, schema=arrow_schema - ) - mock_page = mock.create_autospec(reader.ReadRowsPage) - mock_page.to_arrow.return_value = page_record_batch - mock_pages = (mock_page, mock_page, mock_page) - type(mock_rows).pages = mock.PropertyMock(return_value=mock_pages) - - row_iterator = self._make_one( - schema=[schema.SchemaField("colA", "IGNORED")], - table=mut.TableReference.from_string("proj.dset.tbl"), - ) - got = row_iterator.to_dataframe(bqstorage_client=bqstorage_client) - - self.assertEqual(list(got), ["colA"]) - total_pages = len(streams) * len(mock_pages) - total_rows = len(page_items[0]) * total_pages - self.assertEqual(len(got.index), total_rows) - self.assertTrue(got.index.is_unique) - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf( - bigquery_storage_v1beta1 is None, "Requires `google-cloud-bigquery-storage`" - ) - @unittest.skipIf(tqdm is None, "Requires `tqdm`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - @mock.patch("tqdm.tqdm") - def test_to_dataframe_w_bqstorage_updates_progress_bar(self, tqdm_mock): - from google.cloud.bigquery import schema - from google.cloud.bigquery import table as mut - from google.cloud.bigquery_storage_v1beta1 import reader - - # Speed up testing. - mut._PROGRESS_INTERVAL = 0.01 - - arrow_fields = [pyarrow.field("testcol", pyarrow.int64())] - arrow_schema = pyarrow.schema(arrow_fields) - - bqstorage_client = mock.create_autospec( - bigquery_storage_v1beta1.BigQueryStorageClient - ) - streams = [ - # Use two streams we want to check that progress bar updates are - # sent from each stream. - {"name": "/projects/proj/dataset/dset/tables/tbl/streams/1234"}, - {"name": "/projects/proj/dataset/dset/tables/tbl/streams/5678"}, - ] - session = bigquery_storage_v1beta1.types.ReadSession( - streams=streams, - arrow_schema={"serialized_schema": arrow_schema.serialize().to_pybytes()}, - ) - bqstorage_client.create_read_session.return_value = session - - mock_rowstream = mock.create_autospec(reader.ReadRowsStream) - bqstorage_client.read_rows.return_value = mock_rowstream - - mock_rows = mock.create_autospec(reader.ReadRowsIterable) - mock_rowstream.rows.return_value = mock_rows - mock_page = mock.create_autospec(reader.ReadRowsPage) - page_items = [-1, 0, 1] - type(mock_page).num_items = mock.PropertyMock(return_value=len(page_items)) - - def blocking_to_arrow(*args, **kwargs): - # Sleep for longer than the waiting interval so that we know we're - # only reading one page per loop at most. - time.sleep(2 * mut._PROGRESS_INTERVAL) - return pyarrow.RecordBatch.from_arrays( - [pyarrow.array(page_items)], schema=arrow_schema - ) - - mock_page.to_arrow.side_effect = blocking_to_arrow - mock_pages = (mock_page, mock_page, mock_page, mock_page, mock_page) - type(mock_rows).pages = mock.PropertyMock(return_value=mock_pages) - - schema = [schema.SchemaField("testcol", "IGNORED")] - - row_iterator = mut.RowIterator( - _mock_client(), - None, # api_request: ignored - None, # path: ignored - schema, - table=mut.TableReference.from_string("proj.dset.tbl"), - selected_fields=schema, - ) - - row_iterator.to_dataframe( - bqstorage_client=bqstorage_client, progress_bar_type="tqdm" - ) - - # Make sure that this test updated the progress bar once per page from - # each stream. - total_pages = len(streams) * len(mock_pages) - expected_total_rows = total_pages * len(page_items) - progress_updates = [ - args[0] for args, kwargs in tqdm_mock().update.call_args_list - ] - # Should have sent >1 update due to delay in blocking_to_arrow. - self.assertGreater(len(progress_updates), 1) - self.assertEqual(sum(progress_updates), expected_total_rows) - tqdm_mock().close.assert_called_once() - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf( - bigquery_storage_v1beta1 is None, "Requires `google-cloud-bigquery-storage`" - ) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - def test_to_dataframe_w_bqstorage_exits_on_keyboardinterrupt(self): - from google.cloud.bigquery import schema - from google.cloud.bigquery import table as mut - from google.cloud.bigquery_storage_v1beta1 import reader - - # Speed up testing. - mut._PROGRESS_INTERVAL = 0.01 - - arrow_fields = [ - pyarrow.field("colA", pyarrow.int64()), - # Not alphabetical to test column order. - pyarrow.field("colC", pyarrow.float64()), - pyarrow.field("colB", pyarrow.utf8()), - ] - arrow_schema = pyarrow.schema(arrow_fields) - - bqstorage_client = mock.create_autospec( - bigquery_storage_v1beta1.BigQueryStorageClient - ) - session = bigquery_storage_v1beta1.types.ReadSession( - streams=[ - # Use two streams because one will fail with a - # KeyboardInterrupt, and we want to check that the other stream - # ends early. - {"name": "/projects/proj/dataset/dset/tables/tbl/streams/1234"}, - {"name": "/projects/proj/dataset/dset/tables/tbl/streams/5678"}, - ], - arrow_schema={"serialized_schema": arrow_schema.serialize().to_pybytes()}, - ) - bqstorage_client.create_read_session.return_value = session - page_items = [ - pyarrow.array([1, -1]), - pyarrow.array([2.0, 4.0]), - pyarrow.array(["abc", "def"]), - ] - - def blocking_to_arrow(*args, **kwargs): - # Sleep for longer than the waiting interval so that we know we're - # only reading one page per loop at most. - time.sleep(2 * mut._PROGRESS_INTERVAL) - return pyarrow.RecordBatch.from_arrays(page_items, schema=arrow_schema) - - mock_page = mock.create_autospec(reader.ReadRowsPage) - mock_page.to_arrow.side_effect = blocking_to_arrow - mock_rows = mock.create_autospec(reader.ReadRowsIterable) - mock_pages = mock.PropertyMock(return_value=(mock_page, mock_page, mock_page)) - type(mock_rows).pages = mock_pages - mock_rowstream = mock.create_autospec(reader.ReadRowsStream) - mock_rowstream.rows.return_value = mock_rows - - mock_cancelled_rows = mock.create_autospec(reader.ReadRowsIterable) - mock_cancelled_pages = mock.PropertyMock(side_effect=KeyboardInterrupt) - type(mock_cancelled_rows).pages = mock_cancelled_pages - mock_cancelled_rowstream = mock.create_autospec(reader.ReadRowsStream) - mock_cancelled_rowstream.rows.return_value = mock_cancelled_rows - - bqstorage_client.read_rows.side_effect = ( - mock_cancelled_rowstream, - mock_rowstream, - ) - - schema = [ - schema.SchemaField("colA", "IGNORED"), - schema.SchemaField("colB", "IGNORED"), - schema.SchemaField("colC", "IGNORED"), - ] - - row_iterator = mut.RowIterator( - _mock_client(), - None, # api_request: ignored - None, # path: ignored - schema, - table=mut.TableReference.from_string("proj.dset.tbl"), - selected_fields=schema, - ) - - with pytest.raises(KeyboardInterrupt): - row_iterator.to_dataframe(bqstorage_client=bqstorage_client) - - # Should not have fetched the third page of results because exit_early - # should have been set. - self.assertLessEqual(mock_page.to_dataframe.call_count, 2) - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf( - bigquery_storage_v1beta1 is None, "Requires `google-cloud-bigquery-storage`" - ) - def test_to_dataframe_w_bqstorage_fallback_to_tabledata_list(self): - from google.cloud.bigquery import schema - from google.cloud.bigquery import table as mut - - bqstorage_client = mock.create_autospec( - bigquery_storage_v1beta1.BigQueryStorageClient - ) - bqstorage_client.create_read_session.side_effect = google.api_core.exceptions.InternalServerError( - "can't read with bqstorage_client" - ) - iterator_schema = [ - schema.SchemaField("name", "STRING", mode="REQUIRED"), - schema.SchemaField("age", "INTEGER", mode="REQUIRED"), - ] - rows = [ - {"f": [{"v": "Phred Phlyntstone"}, {"v": "32"}]}, - {"f": [{"v": "Bharney Rhubble"}, {"v": "33"}]}, - {"f": [{"v": "Wylma Phlyntstone"}, {"v": "29"}]}, - {"f": [{"v": "Bhettye Rhubble"}, {"v": "27"}]}, - ] - path = "/foo" - api_request = mock.Mock(return_value={"rows": rows}) - row_iterator = mut.RowIterator( - _mock_client(), - api_request, - path, - iterator_schema, - table=mut.Table("proj.dset.tbl"), - ) - - df = row_iterator.to_dataframe(bqstorage_client=bqstorage_client) - - self.assertIsInstance(df, pandas.DataFrame) - self.assertEqual(len(df), 4) # verify the number of rows - self.assertEqual(list(df), ["name", "age"]) # verify the column names - self.assertEqual(df.name.dtype.name, "object") - self.assertEqual(df.age.dtype.name, "int64") - - @unittest.skipIf(pandas is None, "Requires `pandas`") - def test_to_dataframe_tabledata_list_w_multiple_pages_return_unique_index(self): - from google.cloud.bigquery import schema - from google.cloud.bigquery import table as mut - - iterator_schema = [schema.SchemaField("name", "STRING", mode="REQUIRED")] - path = "/foo" - api_request = mock.Mock( - side_effect=[ - {"rows": [{"f": [{"v": "Bengt"}]}], "pageToken": "NEXTPAGE"}, - {"rows": [{"f": [{"v": "Sven"}]}]}, - ] - ) - row_iterator = mut.RowIterator( - _mock_client(), - api_request, - path, - iterator_schema, - table=mut.Table("proj.dset.tbl"), - ) - - df = row_iterator.to_dataframe(bqstorage_client=None) - - self.assertIsInstance(df, pandas.DataFrame) - self.assertEqual(len(df), 2) - self.assertEqual(list(df), ["name"]) - self.assertEqual(df.name.dtype.name, "object") - self.assertTrue(df.index.is_unique) - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf( - bigquery_storage_v1beta1 is None, "Requires `google-cloud-bigquery-storage`" - ) - def test_to_dataframe_w_bqstorage_raises_auth_error(self): - from google.cloud.bigquery import table as mut - - bqstorage_client = mock.create_autospec( - bigquery_storage_v1beta1.BigQueryStorageClient - ) - bqstorage_client.create_read_session.side_effect = google.api_core.exceptions.Forbidden( - "TEST BigQuery Storage API not enabled. TEST" - ) - path = "/foo" - api_request = mock.Mock(return_value={"rows": []}) - row_iterator = mut.RowIterator( - _mock_client(), api_request, path, [], table=mut.Table("proj.dset.tbl") - ) - - with pytest.raises(google.api_core.exceptions.Forbidden): - row_iterator.to_dataframe(bqstorage_client=bqstorage_client) - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf( - bigquery_storage_v1beta1 is None, "Requires `google-cloud-bigquery-storage`" - ) - def test_to_dataframe_w_bqstorage_raises_import_error(self): - from google.cloud.bigquery import table as mut - - bqstorage_client = mock.create_autospec( - bigquery_storage_v1beta1.BigQueryStorageClient - ) - path = "/foo" - api_request = mock.Mock(return_value={"rows": []}) - row_iterator = mut.RowIterator( - _mock_client(), api_request, path, [], table=mut.Table("proj.dset.tbl") - ) - - with mock.patch.object(mut, "bigquery_storage_v1beta1", None), pytest.raises( - ValueError - ) as exc_context: - row_iterator.to_dataframe(bqstorage_client=bqstorage_client) - assert mut._NO_BQSTORAGE_ERROR in str(exc_context.value) - - @unittest.skipIf( - bigquery_storage_v1beta1 is None, "Requires `google-cloud-bigquery-storage`" - ) - def test_to_dataframe_w_bqstorage_partition(self): - from google.cloud.bigquery import schema - from google.cloud.bigquery import table as mut - - bqstorage_client = mock.create_autospec( - bigquery_storage_v1beta1.BigQueryStorageClient - ) - - row_iterator = mut.RowIterator( - _mock_client(), - None, # api_request: ignored - None, # path: ignored - [schema.SchemaField("colA", "IGNORED")], - table=mut.TableReference.from_string("proj.dset.tbl$20181225"), - ) - - with pytest.raises(ValueError): - row_iterator.to_dataframe(bqstorage_client) - - @unittest.skipIf( - bigquery_storage_v1beta1 is None, "Requires `google-cloud-bigquery-storage`" - ) - def test_to_dataframe_w_bqstorage_snapshot(self): - from google.cloud.bigquery import schema - from google.cloud.bigquery import table as mut - - bqstorage_client = mock.create_autospec( - bigquery_storage_v1beta1.BigQueryStorageClient - ) - - row_iterator = mut.RowIterator( - _mock_client(), - None, # api_request: ignored - None, # path: ignored - [schema.SchemaField("colA", "IGNORED")], - table=mut.TableReference.from_string("proj.dset.tbl@1234567890000"), - ) - - with pytest.raises(ValueError): - row_iterator.to_dataframe(bqstorage_client) - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf( - bigquery_storage_v1beta1 is None, "Requires `google-cloud-bigquery-storage`" - ) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - def test_to_dataframe_concat_categorical_dtype_w_pyarrow(self): - from google.cloud.bigquery import schema - from google.cloud.bigquery import table as mut - from google.cloud.bigquery_storage_v1beta1 import reader - - arrow_fields = [ - # Not alphabetical to test column order. - pyarrow.field("col_str", pyarrow.utf8()), - # The backend returns strings, and without other info, pyarrow contains - # string data in categorical columns, too (and not maybe the Dictionary - # type that corresponds to pandas.Categorical). - pyarrow.field("col_category", pyarrow.utf8()), - ] - arrow_schema = pyarrow.schema(arrow_fields) - - # create a mock BQ storage client - bqstorage_client = mock.create_autospec( - bigquery_storage_v1beta1.BigQueryStorageClient - ) - bqstorage_client.transport = mock.create_autospec( - big_query_storage_grpc_transport.BigQueryStorageGrpcTransport - ) - session = bigquery_storage_v1beta1.types.ReadSession( - streams=[{"name": "/projects/proj/dataset/dset/tables/tbl/streams/1234"}], - arrow_schema={"serialized_schema": arrow_schema.serialize().to_pybytes()}, - ) - bqstorage_client.create_read_session.return_value = session - - mock_rowstream = mock.create_autospec(reader.ReadRowsStream) - bqstorage_client.read_rows.return_value = mock_rowstream - - # prepare the iterator over mocked rows - mock_rows = mock.create_autospec(reader.ReadRowsIterable) - mock_rowstream.rows.return_value = mock_rows - page_items = [ - [ - pyarrow.array(["foo", "bar", "baz"]), # col_str - pyarrow.array(["low", "medium", "low"]), # col_category - ], - [ - pyarrow.array(["foo_page2", "bar_page2", "baz_page2"]), # col_str - pyarrow.array(["medium", "high", "low"]), # col_category - ], - ] - - mock_pages = [] - - for record_list in page_items: - page_record_batch = pyarrow.RecordBatch.from_arrays( - record_list, schema=arrow_schema - ) - mock_page = mock.create_autospec(reader.ReadRowsPage) - mock_page.to_arrow.return_value = page_record_batch - mock_pages.append(mock_page) - - type(mock_rows).pages = mock.PropertyMock(return_value=mock_pages) - - schema = [ - schema.SchemaField("col_str", "IGNORED"), - schema.SchemaField("col_category", "IGNORED"), - ] - - row_iterator = mut.RowIterator( - _mock_client(), - None, # api_request: ignored - None, # path: ignored - schema, - table=mut.TableReference.from_string("proj.dset.tbl"), - selected_fields=schema, - ) - - # run the method under test - got = row_iterator.to_dataframe( - bqstorage_client=bqstorage_client, - dtypes={ - "col_category": pandas.core.dtypes.dtypes.CategoricalDtype( - categories=["low", "medium", "high"], ordered=False, - ), - }, - ) - - # Are the columns in the expected order? - column_names = ["col_str", "col_category"] - self.assertEqual(list(got), column_names) - - # Have expected number of rows? - total_pages = len(mock_pages) # we have a single stream, thus these two equal - total_rows = len(page_items[0][0]) * total_pages - self.assertEqual(len(got.index), total_rows) - - # Are column types correct? - expected_dtypes = [ - pandas.core.dtypes.dtypes.np.dtype("O"), # the default for string data - pandas.core.dtypes.dtypes.CategoricalDtype( - categories=["low", "medium", "high"], ordered=False, - ), - ] - self.assertEqual(list(got.dtypes), expected_dtypes) - - # And the data in the categorical column? - self.assertEqual( - list(got["col_category"]), - ["low", "medium", "low", "medium", "high", "low"], - ) - - # Don't close the client if it was passed in. - bqstorage_client.transport.channel.close.assert_not_called() - - @unittest.skipIf(pandas is None, "Requires `pandas`") - def test_to_dataframe_concat_categorical_dtype_wo_pyarrow(self): - from google.cloud.bigquery.schema import SchemaField - - schema = [ - SchemaField("col_str", "STRING"), - SchemaField("col_category", "STRING"), - ] - row_data = [ - [u"foo", u"low"], - [u"bar", u"medium"], - [u"baz", u"low"], - [u"foo_page2", u"medium"], - [u"bar_page2", u"high"], - [u"baz_page2", u"low"], - ] - path = "/foo" - - rows = [{"f": [{"v": field} for field in row]} for row in row_data[:3]] - rows_page2 = [{"f": [{"v": field} for field in row]} for row in row_data[3:]] - api_request = mock.Mock( - side_effect=[{"rows": rows, "pageToken": "NEXTPAGE"}, {"rows": rows_page2}] - ) - - row_iterator = self._make_one(_mock_client(), api_request, path, schema) - - with mock.patch("google.cloud.bigquery.table.pyarrow", None): - got = row_iterator.to_dataframe( - dtypes={ - "col_category": pandas.core.dtypes.dtypes.CategoricalDtype( - categories=["low", "medium", "high"], ordered=False, - ), - }, - ) - - self.assertIsInstance(got, pandas.DataFrame) - self.assertEqual(len(got), 6) # verify the number of rows - expected_columns = [field.name for field in schema] - self.assertEqual(list(got), expected_columns) # verify the column names - - # Are column types correct? - expected_dtypes = [ - pandas.core.dtypes.dtypes.np.dtype("O"), # the default for string data - pandas.core.dtypes.dtypes.CategoricalDtype( - categories=["low", "medium", "high"], ordered=False, - ), - ] - self.assertEqual(list(got.dtypes), expected_dtypes) - - # And the data in the categorical column? - self.assertEqual( - list(got["col_category"]), - ["low", "medium", "low", "medium", "high", "low"], - ) - - -class TestPartitionRange(unittest.TestCase): - def _get_target_class(self): - from google.cloud.bigquery.table import PartitionRange - - return PartitionRange - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_constructor_defaults(self): - object_under_test = self._make_one() - assert object_under_test.start is None - assert object_under_test.end is None - assert object_under_test.interval is None - - def test_constructor_w_properties(self): - object_under_test = self._make_one(start=1, end=10, interval=2) - assert object_under_test.start == 1 - assert object_under_test.end == 10 - assert object_under_test.interval == 2 - - def test_constructor_w_resource(self): - object_under_test = self._make_one( - _properties={"start": -1234567890, "end": 1234567890, "interval": 1000000} - ) - assert object_under_test.start == -1234567890 - assert object_under_test.end == 1234567890 - assert object_under_test.interval == 1000000 - - def test_repr(self): - object_under_test = self._make_one(start=1, end=10, interval=2) - assert repr(object_under_test) == "PartitionRange(end=10, interval=2, start=1)" - - -class TestRangePartitioning(unittest.TestCase): - def _get_target_class(self): - from google.cloud.bigquery.table import RangePartitioning - - return RangePartitioning - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_constructor_defaults(self): - object_under_test = self._make_one() - assert object_under_test.field is None - assert object_under_test.range_.start is None - assert object_under_test.range_.end is None - assert object_under_test.range_.interval is None - - def test_constructor_w_properties(self): - from google.cloud.bigquery.table import PartitionRange - - object_under_test = self._make_one( - range_=PartitionRange(start=1, end=10, interval=2), field="integer_col" - ) - assert object_under_test.field == "integer_col" - assert object_under_test.range_.start == 1 - assert object_under_test.range_.end == 10 - assert object_under_test.range_.interval == 2 - - def test_constructor_w_resource(self): - object_under_test = self._make_one( - _properties={ - "field": "some_column", - "range": {"start": -1234567890, "end": 1234567890, "interval": 1000000}, - } - ) - assert object_under_test.field == "some_column" - assert object_under_test.range_.start == -1234567890 - assert object_under_test.range_.end == 1234567890 - assert object_under_test.range_.interval == 1000000 - - def test_range_w_wrong_type(self): - object_under_test = self._make_one() - with pytest.raises(ValueError, match="PartitionRange"): - object_under_test.range_ = object() - - def test_repr(self): - from google.cloud.bigquery.table import PartitionRange - - object_under_test = self._make_one( - range_=PartitionRange(start=1, end=10, interval=2), field="integer_col" - ) - assert ( - repr(object_under_test) - == "RangePartitioning(field='integer_col', range_=PartitionRange(end=10, interval=2, start=1))" - ) - - -class TestTimePartitioning(unittest.TestCase): - def _get_target_class(self): - from google.cloud.bigquery.table import TimePartitioning - - return TimePartitioning - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_constructor_defaults(self): - time_partitioning = self._make_one() - self.assertEqual(time_partitioning.type_, "DAY") - self.assertIsNone(time_partitioning.field) - self.assertIsNone(time_partitioning.expiration_ms) - - def test_constructor_explicit(self): - from google.cloud.bigquery.table import TimePartitioningType - - time_partitioning = self._make_one( - type_=TimePartitioningType.DAY, field="name", expiration_ms=10000 - ) - - self.assertEqual(time_partitioning.type_, "DAY") - self.assertEqual(time_partitioning.field, "name") - self.assertEqual(time_partitioning.expiration_ms, 10000) - - def test_require_partition_filter_warns_deprecation(self): - object_under_test = self._make_one() - - with warnings.catch_warnings(record=True) as warned: - assert object_under_test.require_partition_filter is None - object_under_test.require_partition_filter = True - assert object_under_test.require_partition_filter - - assert len(warned) == 3 - for warning in warned: - self.assertIs(warning.category, PendingDeprecationWarning) - - def test_from_api_repr_empty(self): - klass = self._get_target_class() - - # Even though there are required properties according to the API - # specification, sometimes time partitioning is populated as an empty - # object. See internal bug 131167013. - api_repr = {} - time_partitioning = klass.from_api_repr(api_repr) - - self.assertIsNone(time_partitioning.type_) - self.assertIsNone(time_partitioning.field) - self.assertIsNone(time_partitioning.expiration_ms) - - def test_from_api_repr_minimal(self): - from google.cloud.bigquery.table import TimePartitioningType - - klass = self._get_target_class() - api_repr = {"type": "DAY"} - time_partitioning = klass.from_api_repr(api_repr) - - self.assertEqual(time_partitioning.type_, TimePartitioningType.DAY) - self.assertIsNone(time_partitioning.field) - self.assertIsNone(time_partitioning.expiration_ms) - - def test_from_api_repr_doesnt_override_type(self): - klass = self._get_target_class() - api_repr = {"type": "HOUR"} - time_partitioning = klass.from_api_repr(api_repr) - self.assertEqual(time_partitioning.type_, "HOUR") - - def test_from_api_repr_explicit(self): - from google.cloud.bigquery.table import TimePartitioningType - - klass = self._get_target_class() - api_repr = { - "type": "DAY", - "field": "name", - "expirationMs": "10000", - "requirePartitionFilter": True, - } - time_partitioning = klass.from_api_repr(api_repr) - - self.assertEqual(time_partitioning.type_, TimePartitioningType.DAY) - self.assertEqual(time_partitioning.field, "name") - self.assertEqual(time_partitioning.expiration_ms, 10000) - - with warnings.catch_warnings(record=True) as warned: - self.assertTrue(time_partitioning.require_partition_filter) - - self.assertIs(warned[0].category, PendingDeprecationWarning) - - def test_to_api_repr_defaults(self): - time_partitioning = self._make_one() - expected = {"type": "DAY"} - self.assertEqual(time_partitioning.to_api_repr(), expected) - - def test_to_api_repr_explicit(self): - from google.cloud.bigquery.table import TimePartitioningType - - time_partitioning = self._make_one( - type_=TimePartitioningType.DAY, field="name", expiration_ms=10000 - ) - - with warnings.catch_warnings(record=True) as warned: - time_partitioning.require_partition_filter = True - - self.assertIs(warned[0].category, PendingDeprecationWarning) - - expected = { - "type": "DAY", - "field": "name", - "expirationMs": "10000", - "requirePartitionFilter": True, - } - self.assertEqual(time_partitioning.to_api_repr(), expected) - - def test___eq___wrong_type(self): - time_partitioning = self._make_one() - other = object() - self.assertNotEqual(time_partitioning, other) - self.assertEqual(time_partitioning, mock.ANY) - - def test___eq___type__mismatch(self): - time_partitioning = self._make_one() - other = self._make_one(type_="HOUR") - self.assertNotEqual(time_partitioning, other) - - def test___eq___field_mismatch(self): - time_partitioning = self._make_one(field="foo") - other = self._make_one(field="bar") - self.assertNotEqual(time_partitioning, other) - - def test___eq___expiration_ms_mismatch(self): - time_partitioning = self._make_one(field="foo", expiration_ms=100000) - other = self._make_one(field="foo", expiration_ms=200000) - self.assertNotEqual(time_partitioning, other) - - def test___eq___require_partition_filter_mismatch(self): - time_partitioning = self._make_one(field="foo", expiration_ms=100000) - other = self._make_one(field="foo", expiration_ms=100000) - with warnings.catch_warnings(record=True) as warned: - time_partitioning.require_partition_filter = True - other.require_partition_filter = False - - assert len(warned) == 2 - for warning in warned: - self.assertIs(warning.category, PendingDeprecationWarning) - - self.assertNotEqual(time_partitioning, other) - - def test___eq___hit(self): - time_partitioning = self._make_one(field="foo", expiration_ms=100000) - other = self._make_one(field="foo", expiration_ms=100000) - self.assertEqual(time_partitioning, other) - - def test___ne___wrong_type(self): - time_partitioning = self._make_one() - other = object() - self.assertNotEqual(time_partitioning, other) - self.assertEqual(time_partitioning, mock.ANY) - - def test___ne___same_value(self): - time_partitioning1 = self._make_one() - time_partitioning2 = self._make_one() - # unittest ``assertEqual`` uses ``==`` not ``!=``. - comparison_val = time_partitioning1 != time_partitioning2 - self.assertFalse(comparison_val) - - def test___ne___different_values(self): - time_partitioning1 = self._make_one() - time_partitioning2 = self._make_one(type_="HOUR") - self.assertNotEqual(time_partitioning1, time_partitioning2) - - def test___hash__set_equality(self): - time_partitioning1 = self._make_one(field="foo") - time_partitioning2 = self._make_one(field="foo") - set_one = {time_partitioning1, time_partitioning2} - set_two = {time_partitioning1, time_partitioning2} - self.assertEqual(set_one, set_two) - - def test___hash__not_equals(self): - time_partitioning1 = self._make_one(field="foo") - time_partitioning2 = self._make_one(field="bar") - set_one = {time_partitioning1} - set_two = {time_partitioning2} - self.assertNotEqual(set_one, set_two) - - def test___repr___minimal(self): - time_partitioning = self._make_one() - expected = "TimePartitioning(type=DAY)" - self.assertEqual(repr(time_partitioning), expected) - - def test___repr___explicit(self): - from google.cloud.bigquery.table import TimePartitioningType - - time_partitioning = self._make_one( - type_=TimePartitioningType.DAY, field="name", expiration_ms=10000 - ) - expected = "TimePartitioning(" "expirationMs=10000," "field=name," "type=DAY)" - self.assertEqual(repr(time_partitioning), expected) - - def test_set_expiration_w_none(self): - time_partitioning = self._make_one() - time_partitioning.expiration_ms = None - assert time_partitioning._properties["expirationMs"] is None - - -@pytest.mark.skipif( - bigquery_storage_v1beta1 is None, reason="Requires `google-cloud-bigquery-storage`" -) -def test_table_reference_to_bqstorage(): - from google.cloud.bigquery import table as mut - - # Can't use parametrized pytest because bigquery_storage_v1beta1 may not be - # available. - expected = bigquery_storage_v1beta1.types.TableReference( - project_id="my-project", dataset_id="my_dataset", table_id="my_table" - ) - cases = ( - "my-project.my_dataset.my_table", - "my-project.my_dataset.my_table$20181225", - "my-project.my_dataset.my_table@1234567890", - "my-project.my_dataset.my_table$20181225@1234567890", - ) - - classes = (mut.TableReference, mut.Table, mut.TableListItem) - - for case, cls in itertools.product(cases, classes): - got = cls.from_string(case).to_bqstorage() - assert got == expected - - -@unittest.skipIf( - bigquery_storage_v1beta1 is None, "Requires `google-cloud-bigquery-storage`" -) -def test_table_reference_to_bqstorage_raises_import_error(): - from google.cloud.bigquery import table as mut - - classes = (mut.TableReference, mut.Table, mut.TableListItem) - for cls in classes: - with mock.patch.object(mut, "bigquery_storage_v1beta1", None), pytest.raises( - ValueError - ) as exc_context: - cls.from_string("my-project.my_dataset.my_table").to_bqstorage() - assert mut._NO_BQSTORAGE_ERROR in str(exc_context.value) From 9faa50273211eb65944cd4cd2f46e0ac799abdfa Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Fri, 7 Feb 2020 09:23:35 +0000 Subject: [PATCH 2/3] chore: remove error_reporting from the monorepo --- .kokoro/continuous/error_reporting.cfg | 7 - .kokoro/docs/error_reporting.cfg | 7 - .kokoro/presubmit/error_reporting.cfg | 7 - .kokoro/release/error_reporting.cfg | 7 - README.rst | 2 +- error_reporting/.coveragerc | 19 - error_reporting/.flake8 | 14 - error_reporting/.repo-metadata.json | 13 - error_reporting/CHANGELOG.md | 163 -- error_reporting/LICENSE | 202 --- error_reporting/MANIFEST.in | 5 - error_reporting/README.rst | 91 - error_reporting/docs/README.rst | 1 - error_reporting/docs/_static/custom.css | 4 - error_reporting/docs/_templates/layout.html | 50 - error_reporting/docs/changelog.md | 1 - error_reporting/docs/client.rst | 6 - error_reporting/docs/conf.py | 356 ---- error_reporting/docs/gapic/v1beta1/api.rst | 6 - error_reporting/docs/gapic/v1beta1/types.rst | 5 - error_reporting/docs/index.rst | 32 - error_reporting/docs/usage.rst | 115 -- error_reporting/docs/util.rst | 6 - error_reporting/google/__init__.py | 22 - error_reporting/google/cloud/__init__.py | 22 - .../google/cloud/error_reporting/__init__.py | 26 - .../google/cloud/error_reporting/_gapic.py | 68 - .../google/cloud/error_reporting/_logging.py | 91 - .../google/cloud/error_reporting/client.py | 390 ----- .../google/cloud/error_reporting/util.py | 47 - .../cloud/errorreporting_v1beta1/__init__.py | 45 - .../errorreporting_v1beta1/gapic/__init__.py | 0 .../errorreporting_v1beta1/gapic/enums.py | 89 - .../gapic/error_group_service_client.py | 342 ---- .../error_group_service_client_config.py | 33 - .../gapic/error_stats_service_client.py | 559 ------ .../error_stats_service_client_config.py | 38 - .../gapic/report_errors_service_client.py | 285 ---- .../report_errors_service_client_config.py | 28 - .../gapic/transports/__init__.py | 0 .../error_group_service_grpc_transport.py | 143 -- .../error_stats_service_grpc_transport.py | 155 -- .../report_errors_service_grpc_transport.py | 129 -- .../errorreporting_v1beta1/proto/__init__.py | 0 .../errorreporting_v1beta1/proto/common.proto | 164 -- .../proto/common_pb2.py | 825 --------- .../proto/common_pb2_grpc.py | 2 - .../proto/error_group_service.proto | 65 - .../proto/error_group_service_pb2.py | 207 --- .../proto/error_group_service_pb2_grpc.py | 71 - .../proto/error_stats_service.proto | 345 ---- .../proto/error_stats_service_pb2.py | 1513 ----------------- .../proto/error_stats_service_pb2_grpc.py | 86 - .../proto/report_errors_service.proto | 85 - .../proto/report_errors_service_pb2.py | 330 ---- .../proto/report_errors_service_pb2_grpc.py | 58 - .../proto/synth.metadata | 3 - .../cloud/errorreporting_v1beta1/types.py | 57 - error_reporting/noxfile.py | 160 -- error_reporting/pylint.config.py | 25 - error_reporting/setup.cfg | 3 - error_reporting/setup.py | 86 - error_reporting/synth.metadata | 39 - error_reporting/synth.py | 69 - error_reporting/tests/__init__.py | 0 ...st_system_report_errors_service_v1beta1.py | 48 - error_reporting/tests/system/test_system.py | 124 -- error_reporting/tests/unit/__init__.py | 13 - ...test_error_group_service_client_v1beta1.py | 142 -- ...test_error_stats_service_client_v1beta1.py | 207 --- ...st_report_errors_service_client_v1beta1.py | 104 -- error_reporting/tests/unit/test__gapic.py | 82 - error_reporting/tests/unit/test__logging.py | 83 - error_reporting/tests/unit/test_client.py | 222 --- error_reporting/tests/unit/test_util.py | 46 - 75 files changed, 1 insertion(+), 8894 deletions(-) delete mode 100644 .kokoro/continuous/error_reporting.cfg delete mode 100644 .kokoro/docs/error_reporting.cfg delete mode 100644 .kokoro/presubmit/error_reporting.cfg delete mode 100644 .kokoro/release/error_reporting.cfg delete mode 100644 error_reporting/.coveragerc delete mode 100644 error_reporting/.flake8 delete mode 100644 error_reporting/.repo-metadata.json delete mode 100644 error_reporting/CHANGELOG.md delete mode 100644 error_reporting/LICENSE delete mode 100644 error_reporting/MANIFEST.in delete mode 100644 error_reporting/README.rst delete mode 120000 error_reporting/docs/README.rst delete mode 100644 error_reporting/docs/_static/custom.css delete mode 100644 error_reporting/docs/_templates/layout.html delete mode 120000 error_reporting/docs/changelog.md delete mode 100644 error_reporting/docs/client.rst delete mode 100644 error_reporting/docs/conf.py delete mode 100644 error_reporting/docs/gapic/v1beta1/api.rst delete mode 100644 error_reporting/docs/gapic/v1beta1/types.rst delete mode 100644 error_reporting/docs/index.rst delete mode 100644 error_reporting/docs/usage.rst delete mode 100644 error_reporting/docs/util.rst delete mode 100644 error_reporting/google/__init__.py delete mode 100644 error_reporting/google/cloud/__init__.py delete mode 100644 error_reporting/google/cloud/error_reporting/__init__.py delete mode 100644 error_reporting/google/cloud/error_reporting/_gapic.py delete mode 100644 error_reporting/google/cloud/error_reporting/_logging.py delete mode 100644 error_reporting/google/cloud/error_reporting/client.py delete mode 100644 error_reporting/google/cloud/error_reporting/util.py delete mode 100644 error_reporting/google/cloud/errorreporting_v1beta1/__init__.py delete mode 100644 error_reporting/google/cloud/errorreporting_v1beta1/gapic/__init__.py delete mode 100644 error_reporting/google/cloud/errorreporting_v1beta1/gapic/enums.py delete mode 100644 error_reporting/google/cloud/errorreporting_v1beta1/gapic/error_group_service_client.py delete mode 100644 error_reporting/google/cloud/errorreporting_v1beta1/gapic/error_group_service_client_config.py delete mode 100644 error_reporting/google/cloud/errorreporting_v1beta1/gapic/error_stats_service_client.py delete mode 100644 error_reporting/google/cloud/errorreporting_v1beta1/gapic/error_stats_service_client_config.py delete mode 100644 error_reporting/google/cloud/errorreporting_v1beta1/gapic/report_errors_service_client.py delete mode 100644 error_reporting/google/cloud/errorreporting_v1beta1/gapic/report_errors_service_client_config.py delete mode 100644 error_reporting/google/cloud/errorreporting_v1beta1/gapic/transports/__init__.py delete mode 100644 error_reporting/google/cloud/errorreporting_v1beta1/gapic/transports/error_group_service_grpc_transport.py delete mode 100644 error_reporting/google/cloud/errorreporting_v1beta1/gapic/transports/error_stats_service_grpc_transport.py delete mode 100644 error_reporting/google/cloud/errorreporting_v1beta1/gapic/transports/report_errors_service_grpc_transport.py delete mode 100644 error_reporting/google/cloud/errorreporting_v1beta1/proto/__init__.py delete mode 100644 error_reporting/google/cloud/errorreporting_v1beta1/proto/common.proto delete mode 100644 error_reporting/google/cloud/errorreporting_v1beta1/proto/common_pb2.py delete mode 100644 error_reporting/google/cloud/errorreporting_v1beta1/proto/common_pb2_grpc.py delete mode 100644 error_reporting/google/cloud/errorreporting_v1beta1/proto/error_group_service.proto delete mode 100644 error_reporting/google/cloud/errorreporting_v1beta1/proto/error_group_service_pb2.py delete mode 100644 error_reporting/google/cloud/errorreporting_v1beta1/proto/error_group_service_pb2_grpc.py delete mode 100644 error_reporting/google/cloud/errorreporting_v1beta1/proto/error_stats_service.proto delete mode 100644 error_reporting/google/cloud/errorreporting_v1beta1/proto/error_stats_service_pb2.py delete mode 100644 error_reporting/google/cloud/errorreporting_v1beta1/proto/error_stats_service_pb2_grpc.py delete mode 100644 error_reporting/google/cloud/errorreporting_v1beta1/proto/report_errors_service.proto delete mode 100644 error_reporting/google/cloud/errorreporting_v1beta1/proto/report_errors_service_pb2.py delete mode 100644 error_reporting/google/cloud/errorreporting_v1beta1/proto/report_errors_service_pb2_grpc.py delete mode 100644 error_reporting/google/cloud/errorreporting_v1beta1/proto/synth.metadata delete mode 100644 error_reporting/google/cloud/errorreporting_v1beta1/types.py delete mode 100644 error_reporting/noxfile.py delete mode 100644 error_reporting/pylint.config.py delete mode 100644 error_reporting/setup.cfg delete mode 100644 error_reporting/setup.py delete mode 100644 error_reporting/synth.metadata delete mode 100644 error_reporting/synth.py delete mode 100644 error_reporting/tests/__init__.py delete mode 100644 error_reporting/tests/system/gapic/v1beta1/test_system_report_errors_service_v1beta1.py delete mode 100644 error_reporting/tests/system/test_system.py delete mode 100644 error_reporting/tests/unit/__init__.py delete mode 100644 error_reporting/tests/unit/gapic/v1beta1/test_error_group_service_client_v1beta1.py delete mode 100644 error_reporting/tests/unit/gapic/v1beta1/test_error_stats_service_client_v1beta1.py delete mode 100644 error_reporting/tests/unit/gapic/v1beta1/test_report_errors_service_client_v1beta1.py delete mode 100644 error_reporting/tests/unit/test__gapic.py delete mode 100644 error_reporting/tests/unit/test__logging.py delete mode 100644 error_reporting/tests/unit/test_client.py delete mode 100644 error_reporting/tests/unit/test_util.py diff --git a/.kokoro/continuous/error_reporting.cfg b/.kokoro/continuous/error_reporting.cfg deleted file mode 100644 index 99a86c522b9a..000000000000 --- a/.kokoro/continuous/error_reporting.cfg +++ /dev/null @@ -1,7 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Tell the trampoline which build file to use. -env_vars: { - key: "PACKAGE" - value: "error_reporting" -} diff --git a/.kokoro/docs/error_reporting.cfg b/.kokoro/docs/error_reporting.cfg deleted file mode 100644 index 99a86c522b9a..000000000000 --- a/.kokoro/docs/error_reporting.cfg +++ /dev/null @@ -1,7 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Tell the trampoline which build file to use. -env_vars: { - key: "PACKAGE" - value: "error_reporting" -} diff --git a/.kokoro/presubmit/error_reporting.cfg b/.kokoro/presubmit/error_reporting.cfg deleted file mode 100644 index 99a86c522b9a..000000000000 --- a/.kokoro/presubmit/error_reporting.cfg +++ /dev/null @@ -1,7 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Tell the trampoline which build file to use. -env_vars: { - key: "PACKAGE" - value: "error_reporting" -} diff --git a/.kokoro/release/error_reporting.cfg b/.kokoro/release/error_reporting.cfg deleted file mode 100644 index 99a86c522b9a..000000000000 --- a/.kokoro/release/error_reporting.cfg +++ /dev/null @@ -1,7 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Tell the trampoline which build file to use. -env_vars: { - key: "PACKAGE" - value: "error_reporting" -} diff --git a/README.rst b/README.rst index 0c25bb43786b..8c767a12493c 100644 --- a/README.rst +++ b/README.rst @@ -264,7 +264,7 @@ The following client libraries have **alpha** support: .. _Grafeas Documentation: https://googleapis.dev/python/grafeas/latest .. _Stackdriver Error Reporting: https://pypi.org/project/google-cloud-error-reporting/ -.. _Error Reporting README: https://github.com/googleapis/google-cloud-python/tree/master/error_reporting +.. _Error Reporting README: https://github.com/googleapis/python-error-reporting#python-client-for-stackdriver-error-reporting .. _Error Reporting Documentation: https://googleapis.dev/python/clouderrorreporting/latest .. _Stackdriver Monitoring: https://pypi.org/project/google-cloud-monitoring/ diff --git a/error_reporting/.coveragerc b/error_reporting/.coveragerc deleted file mode 100644 index b178b094aa1d..000000000000 --- a/error_reporting/.coveragerc +++ /dev/null @@ -1,19 +0,0 @@ -# Generated by synthtool. DO NOT EDIT! -[run] -branch = True - -[report] -fail_under = 100 -show_missing = True -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ - # Ignore abstract methods - raise NotImplementedError -omit = - */gapic/*.py - */proto/*.py - */core/*.py - */site-packages/*.py \ No newline at end of file diff --git a/error_reporting/.flake8 b/error_reporting/.flake8 deleted file mode 100644 index 0268ecc9c55c..000000000000 --- a/error_reporting/.flake8 +++ /dev/null @@ -1,14 +0,0 @@ -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - *_pb2.py - - # Standard linting exemptions. - __pycache__, - .git, - *.pyc, - conf.py diff --git a/error_reporting/.repo-metadata.json b/error_reporting/.repo-metadata.json deleted file mode 100644 index 291301df028c..000000000000 --- a/error_reporting/.repo-metadata.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "name": "clouderrorreporting", - "name_pretty": "Stackdriver Error Reporting", - "product_documentation": "https://cloud.google.com/error-reporting", - "client_documentation": "https://googleapis.dev/python/clouderrorreporting/latest", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559780", - "release_level": "alpha", - "language": "python", - "repo": "googleapis/google-cloud-python", - "distribution_name": "google-cloud-error-reporting", - "api_id": "clouderrorreporting.googleapis.com", - "requires_billing": false -} \ No newline at end of file diff --git a/error_reporting/CHANGELOG.md b/error_reporting/CHANGELOG.md deleted file mode 100644 index aa3924d1f596..000000000000 --- a/error_reporting/CHANGELOG.md +++ /dev/null @@ -1,163 +0,0 @@ -# Changelog - -[PyPI History][1] - -[1]: https://pypi.org/project/google-cloud-error-reporting/#history - -## 0.33.0 - -10-22-2019 12:10 PDT - -### New Features -- Add `client_options` to constructor ([#9152](https://github.com/googleapis/google-cloud-python/pull/9152)) - -### Dependencies -- Pin `google-cloud-logging >= 1.14.0, < 2.0.0dev`. ([#9476](https://github.com/googleapis/google-cloud-python/pull/9476)) - -### Documentation -- Remove references to the old authentication credentials. ([#9456](https://github.com/googleapis/google-cloud-python/pull/9456)) -- Fix intersphinx reference to requests. ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) -- Remove CI for `gh-pages`, use googleapis.dev for `api_core` refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) - -### Internal / Testing Changes -- Harden `test_report_exception` systest by increasing `max_tries`. ([#9396](https://github.com/googleapis/google-cloud-python/pull/9396)) - -## 0.32.1 - -08-23-2019 10:12 PDT - -### Implementation Changes -- Remove send/recv msg size limit (via synth). ([#8954](https://github.com/googleapis/google-cloud-python/pull/8954)) - -### Documentation -- Fix documentation links for iam and error-reporting. ([#9073](https://github.com/googleapis/google-cloud-python/pull/9073)) -- Remove compatability badges from READMEs. ([#9035](https://github.com/googleapis/google-cloud-python/pull/9035)) -- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) - -## 0.32.0 - -07-24-2019 16:17 PDT - - -### Implementation Changes -- Allow kwargs to be passed to create_channel (via synth). ([#8389](https://github.com/googleapis/google-cloud-python/pull/8389)) -- Fix typo in non-gRPC import. ([#8028](https://github.com/googleapis/google-cloud-python/pull/8028)) - -### New Features -- Add 'client_options' support, update list method docstrings (via synth). ([#8508](https://github.com/googleapis/google-cloud-python/pull/8508)) - -### Documentation -- Fix docs navigation issues. ([#8723](https://github.com/googleapis/google-cloud-python/pull/8723)) -- Link to googleapis.dev documentation in READMEs. ([#8705](https://github.com/googleapis/google-cloud-python/pull/8705)) -- Add compatibility check badges to READMEs. ([#8288](https://github.com/googleapis/google-cloud-python/pull/8288)) -- Fix mistake in documentation ([#8271](https://github.com/googleapis/google-cloud-python/pull/8271)) - -### Internal / Testing Changes -- Pin black version (via synth). ([#8582](https://github.com/googleapis/google-cloud-python/pull/8582)) -- Add docs job to publish to googleapis.dev. ([#8464](https://github.com/googleapis/google-cloud-python/pull/8464)) -- Declare encoding as utf-8 in pb2 files (via synth). ([#8351](https://github.com/googleapis/google-cloud-python/pull/8351)) -- Add disclaimer to auto-generated template files (via synth). ([#8313](https://github.com/googleapis/google-cloud-python/pull/8313)) -- Suppress checking 'cov-fail-under' in nox default session (via synth). ([#8240](https://github.com/googleapis/google-cloud-python/pull/8240)) -- Blacken noxfile.py, setup.py (via synth). ([#8122](https://github.com/googleapis/google-cloud-python/pull/8122)) -- Add empty lines (via synth). ([#8057](https://github.com/googleapis/google-cloud-python/pull/8057)) - -## 0.31.0 - -05-17-2019 08:23 PDT - -### Implementation Changes -- Add routing header to method metadata (via synth). ([#7594](https://github.com/googleapis/google-cloud-python/pull/7594)) -- Remove classifier for Python 3.4 for end-of-life. ([#7535](https://github.com/googleapis/google-cloud-python/pull/7535)) - -### New Features -- Add `client_info` support to Client. ([#7903](https://github.com/googleapis/google-cloud-python/pull/7903)) - -### Dependencies -- Pin `google-cloud-logging >= 1.11.0`. ([#8015](https://github.com/googleapis/google-cloud-python/pull/8015)) - -### Documentation -- Update client library documentation URLs. ([#7307](https://github.com/googleapis/google-cloud-python/pull/7307)) - -### Internal / Testing Changes -- Add nox session `docs` (via synth). ([#7770](https://github.com/googleapis/google-cloud-python/pull/7770)) -- Fix docstring replace in synth ([#7458](https://github.com/googleapis/google-cloud-python/pull/7458)) -- Copy lintified proto files (via synth). ([#7447](https://github.com/googleapis/google-cloud-python/pull/7447)) -- Add clarifying comment to blacken nox target (via synth). ([#7391](https://github.com/googleapis/google-cloud-python/pull/7391)) -- Add protos as an artifact to library ([#7205](https://github.com/googleapis/google-cloud-python/pull/7205)) -- Update copyright headers. ([#7144](https://github.com/googleapis/google-cloud-python/pull/7144)) -- Protoc-generated serialization update. ([#7082](https://github.com/googleapis/google-cloud-python/pull/7082)) -- Pick up stub docstring fix in GAPIC generator. ([#6970](https://github.com/googleapis/google-cloud-python/pull/6970)) -- Fix formatting ([#7002](https://github.com/googleapis/google-cloud-python/pull/7002)) - -## 0.30.1 - -12-17-2018 18:17 PST - - -### Implementation Changes -- Pick up fixes to GAPIC generator. ([#6522](https://github.com/googleapis/google-cloud-python/pull/6522)) -- Fix `client_info` bug, update docstrings via synth. ([#6442](https://github.com/googleapis/google-cloud-python/pull/6442)) - -### Documentation -- Document Python 2 deprecation ([#6910](https://github.com/googleapis/google-cloud-python/pull/6910)) -- Fix [#6321](https://github.com/googleapis/google-cloud-python/pull/6321) Update README service links in quickstart guides. ([#6322](https://github.com/googleapis/google-cloud-python/pull/6322)) -- Prep docs for repo split. ([#6155](https://github.com/googleapis/google-cloud-python/pull/6155)) -- Replace links to `/stable/` with `/latest/`. ([#5901](https://github.com/googleapis/google-cloud-python/pull/5901)) - -### Internal / Testing Changes -- Update noxfile. -- Blacken all gen'd libs ([#6792](https://github.com/googleapis/google-cloud-python/pull/6792)) -- Omit local deps ([#6701](https://github.com/googleapis/google-cloud-python/pull/6701)) -- Run black at end of synth.py ([#6698](https://github.com/googleapis/google-cloud-python/pull/6698)) -- Run Black on Generated libraries ([#6666](https://github.com/googleapis/google-cloud-python/pull/6666)) -- Add templates for flake8, coveragerc, noxfile, and black. ([#6642](https://github.com/googleapis/google-cloud-python/pull/6642)) -- Add synth metadata. ([#6566](https://github.com/googleapis/google-cloud-python/pull/6566)) -- Use new Nox ([#6175](https://github.com/googleapis/google-cloud-python/pull/6175)) -- Add 'synth.py'. ([#6082](https://github.com/googleapis/google-cloud-python/pull/6082)) -- Use Nox inplace installs ([#5865](https://github.com/googleapis/google-cloud-python/pull/5865)) - -## 0.30.0 - -### Implementation Changes -- Make dependency on logging less restrictive in error_reporting (#5345) - -### Internal / Testing Changes -- Modify system tests to use prerelease versions of grpcio (#5304) -- Add Test runs for Python 3.7 and remove 3.4 (#5295) -- Fix bad trove classifier - -## 0.29.1 - -### Dependencies - -- Update dependency range for api-core to include v1.0.0 releases (#4944) -- Fix missing extra in api-core dependency (#4764) - -### Testing and internal changes - -- Install local dependencies when running lint (#4936) -- Re-enable lint for tests, remove usage of pylint (#4921) -- Normalize all setup.py files (#4909) - -## 0.29.0 - -### Breaking changes - -- The underlying autogenerated client library was re-generated to pick up new - features and resolve bugs, this may change the exceptions raised from various - methods. (#4695) - -## 0.28.0 - -### Documentation - -- Added link to "Python Development Environment Setup Guide" in - project README (#4187, h/t to @michaelawyu) - -### Dependencies - -- Upgrading to `google-cloud-core >= 0.28.0` and adding dependency - on `google-api-core` (#4221, #4280) -- Upgrading to `google-cloud-logging >= 1.4.0` (#4296) - -PyPI: https://pypi.org/project/google-cloud-error-reporting/0.28.0/ diff --git a/error_reporting/LICENSE b/error_reporting/LICENSE deleted file mode 100644 index d64569567334..000000000000 --- a/error_reporting/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/error_reporting/MANIFEST.in b/error_reporting/MANIFEST.in deleted file mode 100644 index 9cbf175afe6b..000000000000 --- a/error_reporting/MANIFEST.in +++ /dev/null @@ -1,5 +0,0 @@ -include README.rst LICENSE -recursive-include google *.json *.proto -recursive-include tests * -global-exclude *.py[co] -global-exclude __pycache__ diff --git a/error_reporting/README.rst b/error_reporting/README.rst deleted file mode 100644 index bce80fd49e14..000000000000 --- a/error_reporting/README.rst +++ /dev/null @@ -1,91 +0,0 @@ -Python Client for Stackdriver Error Reporting -============================================= - -|pypi| |versions| - -The Stackdriver `Error Reporting`_ API counts, analyzes and aggregates the -crashes in your running cloud services. A centralized error management -interface displays the results with sorting and filtering capabilities. A -dedicated view shows the error details: time chart, occurrences, affected user -count, first and last seen dates and a cleaned exception stack trace. Opt-in -to receive email and mobile alerts on new errors. - -- `Client Library Documentation`_ -- `Product Documentation`_ - -.. _Error Reporting: https://cloud.google.com/error-reporting/ -.. _Client Library Documentation: https://googleapis.dev/python/clouderrorreporting/latest -.. _Product Documentation: https://cloud.google.com/error-reporting/reference/ -.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-error-reporting.svg - :target: https://pypi.org/project/google-cloud-error-reporting/ -.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-error-reporting.svg - :target: https://pypi.org/project/google-cloud-error-reporting/ - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. `Enable the Google Cloud Error Reporting API.`_ -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Enable the Google Cloud Error Reporting API.: https://cloud.google.com/error-reporting -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Supported Python Versions -^^^^^^^^^^^^^^^^^^^^^^^^^ -Python >= 3.5 - -Deprecated Python Versions -^^^^^^^^^^^^^^^^^^^^^^^^^^ -Python == 2.7. Python 2.7 support will be removed on January 1, 2020. - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - pip install virtualenv - virtualenv - source /bin/activate - /bin/pip install google-cloud-error-reporting - - -Windows -^^^^^^^ - -.. code-block:: console - - pip install virtualenv - virtualenv - \Scripts\activate - \Scripts\pip.exe install google-cloud-error-reporting - - - -Next Steps -~~~~~~~~~~ - -- Read the `Client Library Documentation`_ for Google Cloud Datastore - API to see other available methods on the client. -- Read the `Product documentation`_ to learn - more about the product and see How-to Guides. diff --git a/error_reporting/docs/README.rst b/error_reporting/docs/README.rst deleted file mode 120000 index 38473f99d57f..000000000000 --- a/error_reporting/docs/README.rst +++ /dev/null @@ -1 +0,0 @@ -../../error_reporting/README.rst \ No newline at end of file diff --git a/error_reporting/docs/_static/custom.css b/error_reporting/docs/_static/custom.css deleted file mode 100644 index 0abaf229fce3..000000000000 --- a/error_reporting/docs/_static/custom.css +++ /dev/null @@ -1,4 +0,0 @@ -div#python2-eol { - border-color: red; - border-width: medium; -} \ No newline at end of file diff --git a/error_reporting/docs/_templates/layout.html b/error_reporting/docs/_templates/layout.html deleted file mode 100644 index 228529efe2d2..000000000000 --- a/error_reporting/docs/_templates/layout.html +++ /dev/null @@ -1,50 +0,0 @@ - -{% extends "!layout.html" %} -{%- block content %} -{%- if theme_fixed_sidebar|lower == 'true' %} -
- {{ sidebar() }} - {%- block document %} -
- {%- if render_sidebar %} -
- {%- endif %} - - {%- block relbar_top %} - {%- if theme_show_relbar_top|tobool %} - - {%- endif %} - {% endblock %} - -
-
- On January 1, 2020 this library will no longer support Python 2 on the latest released version. - Previously released library versions will continue to be available. For more information please - visit Python 2 support on Google Cloud. -
- {% block body %} {% endblock %} -
- - {%- block relbar_bottom %} - {%- if theme_show_relbar_bottom|tobool %} - - {%- endif %} - {% endblock %} - - {%- if render_sidebar %} -
- {%- endif %} -
- {%- endblock %} -
-
-{%- else %} -{{ super() }} -{%- endif %} -{%- endblock %} diff --git a/error_reporting/docs/changelog.md b/error_reporting/docs/changelog.md deleted file mode 120000 index 04c99a55caae..000000000000 --- a/error_reporting/docs/changelog.md +++ /dev/null @@ -1 +0,0 @@ -../CHANGELOG.md \ No newline at end of file diff --git a/error_reporting/docs/client.rst b/error_reporting/docs/client.rst deleted file mode 100644 index 93fcff3bf071..000000000000 --- a/error_reporting/docs/client.rst +++ /dev/null @@ -1,6 +0,0 @@ -Error Reporting Client -======================= - -.. automodule:: google.cloud.error_reporting.client - :members: - :show-inheritance: diff --git a/error_reporting/docs/conf.py b/error_reporting/docs/conf.py deleted file mode 100644 index 4a2abf0f5bc1..000000000000 --- a/error_reporting/docs/conf.py +++ /dev/null @@ -1,356 +0,0 @@ -# -*- coding: utf-8 -*- -# -# google-cloud-error-reporting documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "1.6.3" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# source_suffix = ['.rst', '.md'] -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = "index" - -# General information about the project. -project = u"google-cloud-error-reporting" -copyright = u"2017, Google" -author = u"Google APIs" - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-error-reporting-doc" - -# -- Options for warnings ------------------------------------------------------ - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - #'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - #'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - #'preamble': '', - # Latex figure (float) alignment - #'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - master_doc, - "google-cloud-error-reporting.tex", - u"google-cloud-error-reporting Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - master_doc, - "google-cloud-error-reporting", - u"google-cloud-error-reporting Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - master_doc, - "google-cloud-error-reporting", - u"google-cloud-error-reporting Documentation", - author, - "google-cloud-error-reporting", - "GAPIC library for the {metadata.shortName} v1beta1 service", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://requests.kennethreitz.org/en/stable/", None), - "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), - "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), -} - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/error_reporting/docs/gapic/v1beta1/api.rst b/error_reporting/docs/gapic/v1beta1/api.rst deleted file mode 100644 index 37ead7d3d055..000000000000 --- a/error_reporting/docs/gapic/v1beta1/api.rst +++ /dev/null @@ -1,6 +0,0 @@ -Client for Stackdriver Error Reporting API -========================================== - -.. automodule:: google.cloud.errorreporting_v1beta1 - :members: - :inherited-members: \ No newline at end of file diff --git a/error_reporting/docs/gapic/v1beta1/types.rst b/error_reporting/docs/gapic/v1beta1/types.rst deleted file mode 100644 index 989f48a7d15e..000000000000 --- a/error_reporting/docs/gapic/v1beta1/types.rst +++ /dev/null @@ -1,5 +0,0 @@ -Types for Stackdriver Error Reporting API Client -================================================ - -.. automodule:: google.cloud.errorreporting_v1beta1.types - :members: \ No newline at end of file diff --git a/error_reporting/docs/index.rst b/error_reporting/docs/index.rst deleted file mode 100644 index 3fd35c32a475..000000000000 --- a/error_reporting/docs/index.rst +++ /dev/null @@ -1,32 +0,0 @@ -.. include:: README.rst - -Usage Documentation -------------------- - -.. toctree:: - :maxdepth: 2 - - usage - - -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - - client - util - gapic/v1beta1/api - gapic/v1beta1/types - - -Changelog ---------- - -For a list of all ``google-cloud-error-reporting`` releases: - -.. toctree:: - :maxdepth: 2 - - changelog diff --git a/error_reporting/docs/usage.rst b/error_reporting/docs/usage.rst deleted file mode 100644 index 4e971d368f52..000000000000 --- a/error_reporting/docs/usage.rst +++ /dev/null @@ -1,115 +0,0 @@ -Using Stackdriver Error Reporting -================================= - -After configuring your environment, create a -:class:`Client ` - -.. code-block:: python - - from google.cloud import error_reporting - - client = error_reporting.Client() - -or pass in ``credentials`` and ``project`` explicitly - -.. code-block:: python - - from google.cloud import error_reporting - - client = error_reporting.Client(project='my-project', credentials=creds) - -Error Reporting associates errors with a service, which is an identifier for -an executable, App Engine service, or job. The default service is "python", -but a default can be specified for the client on construction time. You can -also optionally specify a version for that service, which defaults to -"default." - -.. code-block:: python - - from google.cloud import error_reporting - - client = error_reporting.Client( - project='my-project', service="login_service", version="0.1.0") - - -Reporting an exception ------------------------ - -Report a stacktrace to Stackdriver Error Reporting after an exception: - -.. code-block:: python - - from google.cloud import error_reporting - - client = error_reporting.Client() - try: - raise NameError - except Exception: - client.report_exception() - - -By default, the client will report the error using the service specified in -the client's constructor, or the default service of "python". - -The user and HTTP context can also be included in the exception. The HTTP -context can be constructed using -:class:`google.cloud.error_reporting.HTTPContext`. This will be used by -Stackdriver Error Reporting to help group exceptions. - -.. code-block:: python - - from google.cloud import error_reporting - - client = error_reporting.Client() - user = 'example@gmail.com' - http_context = error_reporting.HTTPContext( - method='GET', url='/', user_agent='test agent', - referrer='example.com', response_status_code=500, - remote_ip='1.2.3.4') - try: - raise NameError - except Exception: - client.report_exception(http_context=http_context, user=user)) - -An automatic helper to build the HTTP Context from a Flask (Werkzeug) request -object is provided. - -.. code-block:: python - - from google.cloud.error_reporting import build_flask_context - - @app.errorhandler(HTTPException) - def handle_error(exc): - client.report_exception( - http_context=build_flask_context(request)) - # rest of error response code here - - -Reporting an error without an exception ------------------------------------------ - -Errors can also be reported to Stackdriver Error Reporting outside the context -of an exception. The library will include the file path, function name, and -line number of the location where the error was reported. - -.. code-block:: python - - from google.cloud import error_reporting - - client = error_reporting.Client() - client.report("Found an error!") - -As with reporting an exception, the user and HTTP context can be provided: - -.. code-block:: python - - from google.cloud import error_reporting - - client = error_reporting.Client() - user = 'example@gmail.com' - http_context = error_reporting.HTTPContext( - method='GET', url='/', user_agent='test agent', - referrer='example.com', response_status_code=500, - remote_ip='1.2.3.4') - client.report( - "Found an error!", http_context=http_context, user=user)) diff --git a/error_reporting/docs/util.rst b/error_reporting/docs/util.rst deleted file mode 100644 index 9bc8aad51367..000000000000 --- a/error_reporting/docs/util.rst +++ /dev/null @@ -1,6 +0,0 @@ -Error Reporting Utilities -========================= - -.. automodule:: google.cloud.error_reporting.util - :members: - :show-inheritance: diff --git a/error_reporting/google/__init__.py b/error_reporting/google/__init__.py deleted file mode 100644 index 0e1bc5131ba6..000000000000 --- a/error_reporting/google/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - - __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/error_reporting/google/cloud/__init__.py b/error_reporting/google/cloud/__init__.py deleted file mode 100644 index 0e1bc5131ba6..000000000000 --- a/error_reporting/google/cloud/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - - __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/error_reporting/google/cloud/error_reporting/__init__.py b/error_reporting/google/cloud/error_reporting/__init__.py deleted file mode 100644 index 9443b99e3d24..000000000000 --- a/error_reporting/google/cloud/error_reporting/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Client library for Stackdriver Error Reporting""" - - -from pkg_resources import get_distribution - -__version__ = get_distribution("google-cloud-error-reporting").version - -from google.cloud.error_reporting.client import Client -from google.cloud.error_reporting.client import HTTPContext -from google.cloud.error_reporting.util import build_flask_context - -__all__ = ["__version__", "Client", "HTTPContext", "build_flask_context"] diff --git a/error_reporting/google/cloud/error_reporting/_gapic.py b/error_reporting/google/cloud/error_reporting/_gapic.py deleted file mode 100644 index 0c6ec9e60a1a..000000000000 --- a/error_reporting/google/cloud/error_reporting/_gapic.py +++ /dev/null @@ -1,68 +0,0 @@ -# Copyright 2016 Google LLC All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""GAX wrapper for Error Reporting API requests.""" - -from google.cloud.errorreporting_v1beta1.gapic import report_errors_service_client -from google.cloud.errorreporting_v1beta1.proto import report_errors_service_pb2 -from google.protobuf.json_format import ParseDict - - -def make_report_error_api(client): - """Create an instance of the gapic Logging API. - - :type client::class:`google.cloud.error_reporting.Client` - :param client: Error Reporting client. - - :rtype: :class:_ErrorReportingGapicApi - :returns: An Error Reporting API instance. - """ - gapic_api = report_errors_service_client.ReportErrorsServiceClient( - credentials=client._credentials, - client_info=client._client_info, - client_options=client._client_options, - ) - return _ErrorReportingGapicApi(gapic_api, client.project) - - -class _ErrorReportingGapicApi(object): - """Helper mapping Error Reporting-related APIs - - :type gapic: - :class:`report_errors_service_client.ReportErrorsServiceClient` - :param gapic: API object used to make RPCs. - - :type project: str - :param project: Google Cloud Project ID - """ - - def __init__(self, gapic_api, project): - self._gapic_api = gapic_api - self._project = project - - def report_error_event(self, error_report): - """Uses the gapic client to report the error. - - :type error_report: dict - :param error_report: - payload of the error report formatted according to - https://cloud.google.com/error-reporting/docs/formatting-error-messages - This object should be built using - Use - :meth:~`google.cloud.error_reporting.client._build_error_report` - """ - project_name = self._gapic_api.project_path(self._project) - error_report_payload = report_errors_service_pb2.ReportedErrorEvent() - ParseDict(error_report, error_report_payload) - self._gapic_api.report_error_event(project_name, error_report_payload) diff --git a/error_reporting/google/cloud/error_reporting/_logging.py b/error_reporting/google/cloud/error_reporting/_logging.py deleted file mode 100644 index 5832cc7e2d24..000000000000 --- a/error_reporting/google/cloud/error_reporting/_logging.py +++ /dev/null @@ -1,91 +0,0 @@ -# Copyright 2016 Google LLC All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Interact with Stackdriver Error Reporting via Logging API. - -It's possible to report Stackdriver Error Reporting errors by formatting -structured log messages in Stackdriver Logging in a given format. This -client provides a mechanism to report errors using that technique. -""" - -import google.cloud.logging.client - - -class _ErrorReportingLoggingAPI(object): - """Report to Stackdriver Error Reporting via Logging API - - :type project: str - :param project: the project which the client acts on behalf of. If not - passed falls back to the default inferred from the - environment. - - :type credentials: :class:`google.auth.credentials.Credentials` or - :class:`NoneType` - :param credentials: The authorization credentials to attach to requests. - These credentials identify this application to the service. - If none are specified, the client will attempt to ascertain - the credentials from the environment. - - :type _http: :class:`~requests.Session` - :param _http: (Optional) HTTP object to make requests. Can be any object - that defines ``request()`` with the same interface as - :meth:`requests.Session.request`. If not passed, an - ``_http`` object is created that is bound to the - ``credentials`` for the current object. - This parameter should be considered private, and could - change in the future. - - :type client_info: - :class:`google.api_core.client_info.ClientInfo` or - :class:`google.api_core.gapic_v1.client_info.ClientInfo` - :param client_info: - The client info used to send a user-agent string along with API - requests. If ``None``, then default info will be used. Generally, - you only need to set this if you're developing your own library - or partner tool. - - :type client_options: :class:`~google.api_core.client_options.ClientOptions` - or :class:`dict` - :param client_options: (Optional) Client options used to set user options - on the client. API Endpoint should be set through client_options. - """ - - def __init__( - self, - project, - credentials=None, - _http=None, - client_info=None, - client_options=None, - ): - self.logging_client = google.cloud.logging.client.Client( - project, - credentials, - _http=_http, - client_info=client_info, - client_options=client_options, - ) - - def report_error_event(self, error_report): - """Report error payload. - - :type error_report: dict - :param: error_report: - dict payload of the error report formatted according to - https://cloud.google.com/error-reporting/docs/formatting-error-messages - This object should be built using - :meth:~`google.cloud.error_reporting.client._build_error_report` - """ - logger = self.logging_client.logger("errors") - logger.log_struct(error_report) diff --git a/error_reporting/google/cloud/error_reporting/client.py b/error_reporting/google/cloud/error_reporting/client.py deleted file mode 100644 index c4cb816ead75..000000000000 --- a/error_reporting/google/cloud/error_reporting/client.py +++ /dev/null @@ -1,390 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Client for interacting with the Stackdriver Error Reporting API""" - -import os -import traceback - -import six - -try: - from google.cloud.error_reporting._gapic import make_report_error_api -except ImportError: # pragma: NO COVER - from google.api_core import client_info # noqa - - _HAVE_GRPC = False -else: - from google.api_core.gapic_v1 import client_info - - _HAVE_GRPC = True - -from google.cloud.client import ClientWithProject -from google.cloud.error_reporting import __version__ -from google.cloud.error_reporting._logging import _ErrorReportingLoggingAPI -from google.cloud.environment_vars import DISABLE_GRPC - -_DISABLE_GRPC = os.getenv(DISABLE_GRPC, False) -_USE_GRPC = _HAVE_GRPC and not _DISABLE_GRPC -_CLIENT_INFO = client_info.ClientInfo(client_library_version=__version__) - - -class HTTPContext(object): - """HTTPContext defines an object that captures the parameter for the - httpRequest part of Error Reporting API - - :type method: str - :param method: The type of HTTP request, such as GET, POST, etc. - - :type url: str - :param url: The URL of the request - - :type user_agent: str - :param user_agent: The user agent information that is provided with the - request. - - :type referrer: str - :param referrer: The referrer information that is provided with the - request. - - :type response_status_code: int - :param response_status_code: The HTTP response status code for the request. - - :type remote_ip: str - :param remote_ip: The IP address from which the request originated. This - can be IPv4, IPv6, or a token which is derived from - the IP address, depending on the data that has been - provided in the error report. - """ - - def __init__( - self, - method=None, - url=None, - user_agent=None, - referrer=None, - response_status_code=None, - remote_ip=None, - ): - self.method = method - self.url = url - # intentionally camel case for mapping to JSON API expects - # pylint: disable=invalid-name - self.userAgent = user_agent - self.referrer = referrer - self.responseStatusCode = response_status_code - self.remoteIp = remote_ip - - -class Client(ClientWithProject): - """Error Reporting client. Currently Error Reporting is done by creating - a Logging client. - - :type project: str - :param project: the project which the client acts on behalf of. If not - passed falls back to the default inferred from the - environment. - - :type credentials: :class:`google.auth.credentials.Credentials` or - :class:`NoneType` - :param credentials: The authorization credentials to attach to requests. - These credentials identify this application to the service. - If none are specified, the client will attempt to ascertain - the credentials from the environment. - - :type _http: :class:`~requests.Session` - :param _http: (Optional) HTTP object to make requests. Can be any object - that defines ``request()`` with the same interface as - :meth:`requests.Session.request`. If not passed, an - ``_http`` object is created that is bound to the - ``credentials`` for the current object. - This parameter should be considered private, and could - change in the future. - - :type service: str - :param service: An identifier of the service, such as the name of the - executable, job, or Google App Engine service name. This - field is expected to have a low number of values that are - relatively stable over time, as opposed to version, - which can be changed whenever new code is deployed. - - - :type version: str - :param version: Represents the source code version that the developer - provided, which could represent a version label or a Git - SHA-1 hash, for example. If the developer did not provide - a version, the value is set to default. - - :type _use_grpc: bool - :param _use_grpc: (Optional) Explicitly specifies whether - to use the gRPC transport or HTTP. If unset, - falls back to the ``GOOGLE_CLOUD_DISABLE_GRPC`` - environment variable. - This parameter should be considered private, and could - change in the future. - - :type client_info: - :class:`google.api_core.client_info.ClientInfo` or - :class:`google.api_core.gapic_v1.client_info.ClientInfo` - :param client_info: - The client info used to send a user-agent string along with API - requests. If ``None``, then default info will be used. Generally, - you only need to set this if you're developing your own library - or partner tool. - - :type client_options: :class:`~google.api_core.client_options.ClientOptions` - or :class:`dict` - :param client_options: (Optional) Client options used to set user options - on the client. API Endpoint should be set through client_options. - - :raises: :class:`ValueError` if the project is neither passed in nor - set in the environment. - """ - - SCOPE = ("https://www.googleapis.com/auth/cloud-platform",) - """The scopes required for authenticating as an API consumer.""" - - def __init__( - self, - project=None, - credentials=None, - _http=None, - service=None, - version=None, - client_info=_CLIENT_INFO, - client_options=None, - _use_grpc=None, - ): - super(Client, self).__init__( - project=project, credentials=credentials, _http=_http - ) - self._report_errors_api = None - - self.service = service if service else self.DEFAULT_SERVICE - self.version = version - self._client_info = client_info - self._client_options = client_options - - if _use_grpc is None: - self._use_grpc = _USE_GRPC - else: - self._use_grpc = _use_grpc - - DEFAULT_SERVICE = "python" - - @property - def report_errors_api(self): - """Helper for logging-related API calls. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs - - :rtype: - :class:`_gapic._ErrorReportingGapicApi` - or - :class:`._logging._ErrorReportingLoggingAPI` - :returns: A class that implements the report errors API. - """ - if self._report_errors_api is None: - if self._use_grpc: - self._report_errors_api = make_report_error_api(self) - else: - self._report_errors_api = _ErrorReportingLoggingAPI( - self.project, - self._credentials, - self._http, - self._client_info, - self._client_options, - ) - return self._report_errors_api - - def _build_error_report( - self, message, report_location=None, http_context=None, user=None - ): - """Builds the Error Reporting object to report. - - This builds the object according to - - https://cloud.google.com/error-reporting/docs/formatting-error-messages - - :type message: str - :param message: The stack trace that was reported or logged by the - service. - - :type report_location: dict - :param report_location: The location in the source code where the - decision was made to report the error, usually the place - where it was logged. For a logged exception this would be the - source line where the exception is logged, usually close to - the place where it was caught. - - This should be a Python dict that contains the keys 'filePath', - 'lineNumber', and 'functionName' - - :type http_context: :class`google.cloud.error_reporting.HTTPContext` - :param http_context: The HTTP request which was processed when the - error was triggered. - - :type user: str - :param user: The user who caused or was affected by the crash. This can - be a user ID, an email address, or an arbitrary token that - uniquely identifies the user. When sending an error - report, leave this field empty if the user was not - logged in. In this case the Error Reporting system will - use other data, such as remote IP address, - to distinguish affected users. - :rtype: dict - :returns: A dict payload ready to be serialized to JSON and sent to - the API. - """ - payload = { - "serviceContext": {"service": self.service}, - "message": "{0}".format(message), - } - - if self.version: - payload["serviceContext"]["version"] = self.version - - if report_location or http_context or user: - payload["context"] = {} - - if report_location: - payload["context"]["reportLocation"] = report_location - - if http_context: - http_context_dict = http_context.__dict__ - # strip out None values - payload["context"]["httpRequest"] = { - key: value - for key, value in six.iteritems(http_context_dict) - if value is not None - } - if user: - payload["context"]["user"] = user - return payload - - def _send_error_report( - self, message, report_location=None, http_context=None, user=None - ): - """Makes the call to the Error Reporting API. - - This is the lower-level interface to build and send the payload, - generally users will use either report() or report_exception() to - automatically gather the parameters for this method. - - :type message: str - :param message: The stack trace that was reported or logged by the - service. - - :type report_location: dict - :param report_location: The location in the source code where the - decision was made to report the error, usually the place - where it was logged. For a logged exception this would be the - source line where the exception is logged, usually close to - the place where it was caught. - - This should be a Python dict that contains the keys 'filePath', - 'lineNumber', and 'functionName' - - :type http_context: :class`google.cloud.error_reporting.HTTPContext` - :param http_context: The HTTP request which was processed when the - error was triggered. - - :type user: str - :param user: The user who caused or was affected by the crash. This can - be a user ID, an email address, or an arbitrary token that - uniquely identifies the user. When sending an error - report, leave this field empty if the user was not - logged in. In this case the Error Reporting system will - use other data, such as remote IP address, - to distinguish affected users. - """ - error_report = self._build_error_report( - message, report_location, http_context, user - ) - self.report_errors_api.report_error_event(error_report) - - def report(self, message, http_context=None, user=None): - """ Reports a message to Stackdriver Error Reporting - - https://cloud.google.com/error-reporting/docs/formatting-error-messages - - :type message: str - :param message: A user-supplied message to report - - :type http_context: :class`google.cloud.error_reporting.HTTPContext` - :param http_context: The HTTP request which was processed when the - error was triggered. - - :type user: str - :param user: The user who caused or was affected by the crash. This - can be a user ID, an email address, or an arbitrary - token that uniquely identifies the user. When sending - an error report, leave this field empty if the user - was not logged in. In this case the Error Reporting - system will use other data, such as remote IP address, - to distinguish affected users. - - Example: - - .. code-block:: python - - >>> client.report("Something went wrong!") - """ - stack = traceback.extract_stack() - last_call = stack[-2] - file_path = last_call[0] - line_number = last_call[1] - function_name = last_call[2] - report_location = { - "filePath": file_path, - "lineNumber": line_number, - "functionName": function_name, - } - - self._send_error_report( - message, - http_context=http_context, - user=user, - report_location=report_location, - ) - - def report_exception(self, http_context=None, user=None): - """ Reports the details of the latest exceptions to Stackdriver Error - Reporting. - - :type http_context: :class`google.cloud.error_reporting.HTTPContext` - :param http_context: The HTTP request which was processed when the - error was triggered. - - :type user: str - :param user: The user who caused or was affected by the crash. This - can be a user ID, an email address, or an arbitrary - token that uniquely identifies the user. When sending an - error report, leave this field empty if the user was - not logged in. In this case the Error Reporting system - will use other data, such as remote IP address, - to distinguish affected users. - - Example:: - - >>> try: - >>> raise NameError - >>> except Exception: - >>> client.report_exception() - """ - self._send_error_report( - traceback.format_exc(), http_context=http_context, user=user - ) diff --git a/error_reporting/google/cloud/error_reporting/util.py b/error_reporting/google/cloud/error_reporting/util.py deleted file mode 100644 index 151540cdb855..000000000000 --- a/error_reporting/google/cloud/error_reporting/util.py +++ /dev/null @@ -1,47 +0,0 @@ -# Copyright 2016 Google LLC All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Utility functions for Stackdriver Error Reporting.""" - -from google.cloud.error_reporting.client import HTTPContext - - -def build_flask_context(request): - """Builds an HTTP context object from a Flask (Werkzeug) request object. - - This helper method extracts the relevant HTTP context from a Flask request - object into an object ready to be sent to Error Reporting. - - .. code-block:: python - - >>> @app.errorhandler(HTTPException) - ... def handle_error(exc): - ... client.report_exception( - ... http_context=build_flask_context(request)) - ... # rest of error response code here - - :type request: :class:`werkzeug.wrappers.request` - :param request: The Flask request object to convert. - - :rtype: :class:`~google.cloud.error_reporting.client.HTTPContext` - :returns: An HTTPContext object ready to be sent to the Stackdriver Error - Reporting API. - """ - return HTTPContext( - url=request.url, - method=request.method, - user_agent=request.user_agent.string, - referrer=request.referrer, - remote_ip=request.remote_addr, - ) diff --git a/error_reporting/google/cloud/errorreporting_v1beta1/__init__.py b/error_reporting/google/cloud/errorreporting_v1beta1/__init__.py deleted file mode 100644 index 9553e4a4738c..000000000000 --- a/error_reporting/google/cloud/errorreporting_v1beta1/__init__.py +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import - -from google.cloud.errorreporting_v1beta1 import types -from google.cloud.errorreporting_v1beta1.gapic import enums -from google.cloud.errorreporting_v1beta1.gapic import error_group_service_client -from google.cloud.errorreporting_v1beta1.gapic import error_stats_service_client -from google.cloud.errorreporting_v1beta1.gapic import report_errors_service_client - - -class ErrorGroupServiceClient(error_group_service_client.ErrorGroupServiceClient): - __doc__ = error_group_service_client.ErrorGroupServiceClient.__doc__ - enums = enums - - -class ErrorStatsServiceClient(error_stats_service_client.ErrorStatsServiceClient): - __doc__ = error_stats_service_client.ErrorStatsServiceClient.__doc__ - enums = enums - - -class ReportErrorsServiceClient(report_errors_service_client.ReportErrorsServiceClient): - __doc__ = report_errors_service_client.ReportErrorsServiceClient.__doc__ - enums = enums - - -__all__ = ( - "enums", - "types", - "ErrorGroupServiceClient", - "ErrorStatsServiceClient", - "ReportErrorsServiceClient", -) diff --git a/error_reporting/google/cloud/errorreporting_v1beta1/gapic/__init__.py b/error_reporting/google/cloud/errorreporting_v1beta1/gapic/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/error_reporting/google/cloud/errorreporting_v1beta1/gapic/enums.py b/error_reporting/google/cloud/errorreporting_v1beta1/gapic/enums.py deleted file mode 100644 index 890ea7e7e4c2..000000000000 --- a/error_reporting/google/cloud/errorreporting_v1beta1/gapic/enums.py +++ /dev/null @@ -1,89 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Wrappers for protocol buffer enum types.""" - -import enum - - -class ErrorGroupOrder(enum.IntEnum): - """ - A sorting order of error groups. - - Attributes: - GROUP_ORDER_UNSPECIFIED (int): No group order specified. - COUNT_DESC (int): Total count of errors in the given time window in descending order. - LAST_SEEN_DESC (int): Timestamp when the group was last seen in the given time window - in descending order. - CREATED_DESC (int): Timestamp when the group was created in descending order. - AFFECTED_USERS_DESC (int): Number of affected users in the given time window in descending order. - """ - - GROUP_ORDER_UNSPECIFIED = 0 - COUNT_DESC = 1 - LAST_SEEN_DESC = 2 - CREATED_DESC = 3 - AFFECTED_USERS_DESC = 4 - - -class TimedCountAlignment(enum.IntEnum): - """ - Specifies how the time periods of error group counts are aligned. - - Attributes: - ERROR_COUNT_ALIGNMENT_UNSPECIFIED (int): No alignment specified. - ALIGNMENT_EQUAL_ROUNDED (int): The time periods shall be consecutive, have width equal to the requested - duration, and be aligned at the ``alignment_time`` provided in the - request. The ``alignment_time`` does not have to be inside the query - period but even if it is outside, only time periods are returned which - overlap with the query period. A rounded alignment will typically result - in a different size of the first or the last time period. - ALIGNMENT_EQUAL_AT_END (int): The time periods shall be consecutive, have width equal to the - requested duration, and be aligned at the end of the requested time - period. This can result in a different size of the - first time period. - """ - - ERROR_COUNT_ALIGNMENT_UNSPECIFIED = 0 - ALIGNMENT_EQUAL_ROUNDED = 1 - ALIGNMENT_EQUAL_AT_END = 2 - - -class QueryTimeRange(object): - class Period(enum.IntEnum): - """ - The supported time ranges. - - Attributes: - PERIOD_UNSPECIFIED (int): Do not use. - PERIOD_1_HOUR (int): Retrieve data for the last hour. - Recommended minimum timed count duration: 1 min. - PERIOD_6_HOURS (int): Retrieve data for the last 6 hours. - Recommended minimum timed count duration: 10 min. - PERIOD_1_DAY (int): Retrieve data for the last day. - Recommended minimum timed count duration: 1 hour. - PERIOD_1_WEEK (int): Retrieve data for the last week. - Recommended minimum timed count duration: 6 hours. - PERIOD_30_DAYS (int): Retrieve data for the last 30 days. - Recommended minimum timed count duration: 1 day. - """ - - PERIOD_UNSPECIFIED = 0 - PERIOD_1_HOUR = 1 - PERIOD_6_HOURS = 2 - PERIOD_1_DAY = 3 - PERIOD_1_WEEK = 4 - PERIOD_30_DAYS = 5 diff --git a/error_reporting/google/cloud/errorreporting_v1beta1/gapic/error_group_service_client.py b/error_reporting/google/cloud/errorreporting_v1beta1/gapic/error_group_service_client.py deleted file mode 100644 index af60931f74e9..000000000000 --- a/error_reporting/google/cloud/errorreporting_v1beta1/gapic/error_group_service_client.py +++ /dev/null @@ -1,342 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.devtools.clouderrorreporting.v1beta1 ErrorGroupService API.""" - -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import google.api_core.path_template -import grpc - -from google.cloud.errorreporting_v1beta1.gapic import enums -from google.cloud.errorreporting_v1beta1.gapic import error_group_service_client_config -from google.cloud.errorreporting_v1beta1.gapic.transports import ( - error_group_service_grpc_transport, -) -from google.cloud.errorreporting_v1beta1.proto import common_pb2 -from google.cloud.errorreporting_v1beta1.proto import error_group_service_pb2 -from google.cloud.errorreporting_v1beta1.proto import error_group_service_pb2_grpc - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - "google-cloud-error-reporting" -).version - - -class ErrorGroupServiceClient(object): - """Service for retrieving and updating individual error groups.""" - - SERVICE_ADDRESS = "clouderrorreporting.googleapis.com:443" - """The default address of the service.""" - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.devtools.clouderrorreporting.v1beta1.ErrorGroupService" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ErrorGroupServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @classmethod - def group_path(cls, project, group): - """Return a fully-qualified group string.""" - return google.api_core.path_template.expand( - "projects/{project}/groups/{group}", project=project, group=group - ) - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.ErrorGroupServiceGrpcTransport, - Callable[[~.Credentials, type], ~.ErrorGroupServiceGrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = error_group_service_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=error_group_service_grpc_transport.ErrorGroupServiceGrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = error_group_service_grpc_transport.ErrorGroupServiceGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def get_group( - self, - group_name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Get the specified group. - - Example: - >>> from google.cloud import errorreporting_v1beta1 - >>> - >>> client = errorreporting_v1beta1.ErrorGroupServiceClient() - >>> - >>> group_name = client.group_path('[PROJECT]', '[GROUP]') - >>> - >>> response = client.get_group(group_name) - - Args: - group_name (str): [Required] The group resource name. Written as - projects/projectID/groups/group\_name. Call groupStats.list to return a - list of groups belonging to this project. - - Example: projects/my-project-123/groups/my-group - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.errorreporting_v1beta1.types.ErrorGroup` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_group" not in self._inner_api_calls: - self._inner_api_calls[ - "get_group" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_group, - default_retry=self._method_configs["GetGroup"].retry, - default_timeout=self._method_configs["GetGroup"].timeout, - client_info=self._client_info, - ) - - request = error_group_service_pb2.GetGroupRequest(group_name=group_name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("group_name", group_name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_group"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def update_group( - self, - group, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Replace the data for the specified group. - Fails if the group does not exist. - - Example: - >>> from google.cloud import errorreporting_v1beta1 - >>> - >>> client = errorreporting_v1beta1.ErrorGroupServiceClient() - >>> - >>> # TODO: Initialize `group`: - >>> group = {} - >>> - >>> response = client.update_group(group) - - Args: - group (Union[dict, ~google.cloud.errorreporting_v1beta1.types.ErrorGroup]): [Required] The group which replaces the resource on the server. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.errorreporting_v1beta1.types.ErrorGroup` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.errorreporting_v1beta1.types.ErrorGroup` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_group" not in self._inner_api_calls: - self._inner_api_calls[ - "update_group" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_group, - default_retry=self._method_configs["UpdateGroup"].retry, - default_timeout=self._method_configs["UpdateGroup"].timeout, - client_info=self._client_info, - ) - - request = error_group_service_pb2.UpdateGroupRequest(group=group) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("group.name", group.name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_group"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/error_reporting/google/cloud/errorreporting_v1beta1/gapic/error_group_service_client_config.py b/error_reporting/google/cloud/errorreporting_v1beta1/gapic/error_group_service_client_config.py deleted file mode 100644 index e70ec324be62..000000000000 --- a/error_reporting/google/cloud/errorreporting_v1beta1/gapic/error_group_service_client_config.py +++ /dev/null @@ -1,33 +0,0 @@ -config = { - "interfaces": { - "google.devtools.clouderrorreporting.v1beta1.ErrorGroupService": { - "retry_codes": { - "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "non_idempotent": [], - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 20000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 20000, - "total_timeout_millis": 600000, - } - }, - "methods": { - "GetGroup": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "UpdateGroup": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - }, - } - } -} diff --git a/error_reporting/google/cloud/errorreporting_v1beta1/gapic/error_stats_service_client.py b/error_reporting/google/cloud/errorreporting_v1beta1/gapic/error_stats_service_client.py deleted file mode 100644 index 692f0329e8de..000000000000 --- a/error_reporting/google/cloud/errorreporting_v1beta1/gapic/error_stats_service_client.py +++ /dev/null @@ -1,559 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.devtools.clouderrorreporting.v1beta1 ErrorStatsService API.""" - -import functools -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import google.api_core.page_iterator -import google.api_core.path_template -import grpc - -from google.cloud.errorreporting_v1beta1.gapic import enums -from google.cloud.errorreporting_v1beta1.gapic import error_stats_service_client_config -from google.cloud.errorreporting_v1beta1.gapic.transports import ( - error_stats_service_grpc_transport, -) -from google.cloud.errorreporting_v1beta1.proto import common_pb2 -from google.cloud.errorreporting_v1beta1.proto import error_group_service_pb2 -from google.cloud.errorreporting_v1beta1.proto import error_group_service_pb2_grpc -from google.cloud.errorreporting_v1beta1.proto import error_stats_service_pb2 -from google.cloud.errorreporting_v1beta1.proto import error_stats_service_pb2_grpc -from google.protobuf import duration_pb2 -from google.protobuf import timestamp_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - "google-cloud-error-reporting" -).version - - -class ErrorStatsServiceClient(object): - """ - An API for retrieving and managing error statistics as well as data for - individual events. - """ - - SERVICE_ADDRESS = "clouderrorreporting.googleapis.com:443" - """The default address of the service.""" - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.devtools.clouderrorreporting.v1beta1.ErrorStatsService" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ErrorStatsServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @classmethod - def project_path(cls, project): - """Return a fully-qualified project string.""" - return google.api_core.path_template.expand( - "projects/{project}", project=project - ) - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.ErrorStatsServiceGrpcTransport, - Callable[[~.Credentials, type], ~.ErrorStatsServiceGrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = error_stats_service_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=error_stats_service_grpc_transport.ErrorStatsServiceGrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = error_stats_service_grpc_transport.ErrorStatsServiceGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def list_group_stats( - self, - project_name, - time_range, - group_id=None, - service_filter=None, - timed_count_duration=None, - alignment=None, - alignment_time=None, - order=None, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists the specified groups. - - Example: - >>> from google.cloud import errorreporting_v1beta1 - >>> - >>> client = errorreporting_v1beta1.ErrorStatsServiceClient() - >>> - >>> project_name = client.project_path('[PROJECT]') - >>> - >>> # TODO: Initialize `time_range`: - >>> time_range = {} - >>> - >>> # Iterate over all results - >>> for element in client.list_group_stats(project_name, time_range): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_group_stats(project_name, time_range).pages: - ... for element in page: - ... # process element - ... pass - - Args: - project_name (str): [Required] The resource name of the Google Cloud Platform project. - Written as projects/ plus the Google Cloud Platform project ID. - - Example: projects/my-project-123. - time_range (Union[dict, ~google.cloud.errorreporting_v1beta1.types.QueryTimeRange]): [Optional] List data for the given time range. If not set a default time - range is used. The field time\_range\_begin in the response will specify - the beginning of this time range. Only ErrorGroupStats with a non-zero - count in the given time range are returned, unless the request contains - an explicit group\_id list. If a group\_id list is given, also - ErrorGroupStats with zero occurrences are returned. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.errorreporting_v1beta1.types.QueryTimeRange` - group_id (list[str]): [Optional] List all ErrorGroupStats with these IDs. - service_filter (Union[dict, ~google.cloud.errorreporting_v1beta1.types.ServiceContextFilter]): [Optional] List only ErrorGroupStats which belong to a service context - that matches the filter. Data for all service contexts is returned if - this field is not specified. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.errorreporting_v1beta1.types.ServiceContextFilter` - timed_count_duration (Union[dict, ~google.cloud.errorreporting_v1beta1.types.Duration]): [Optional] The preferred duration for a single returned ``TimedCount``. - If not set, no timed counts are returned. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.errorreporting_v1beta1.types.Duration` - alignment (~google.cloud.errorreporting_v1beta1.types.TimedCountAlignment): [Optional] The alignment of the timed counts to be returned. Default is - ``ALIGNMENT_EQUAL_AT_END``. - alignment_time (Union[dict, ~google.cloud.errorreporting_v1beta1.types.Timestamp]): [Optional] Time where the timed counts shall be aligned if rounded - alignment is chosen. Default is 00:00 UTC. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.errorreporting_v1beta1.types.Timestamp` - order (~google.cloud.errorreporting_v1beta1.types.ErrorGroupOrder): [Optional] The sort order in which the results are returned. Default is - ``COUNT_DESC``. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.errorreporting_v1beta1.types.ErrorGroupStats` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_group_stats" not in self._inner_api_calls: - self._inner_api_calls[ - "list_group_stats" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_group_stats, - default_retry=self._method_configs["ListGroupStats"].retry, - default_timeout=self._method_configs["ListGroupStats"].timeout, - client_info=self._client_info, - ) - - request = error_stats_service_pb2.ListGroupStatsRequest( - project_name=project_name, - time_range=time_range, - group_id=group_id, - service_filter=service_filter, - timed_count_duration=timed_count_duration, - alignment=alignment, - alignment_time=alignment_time, - order=order, - page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("project_name", project_name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_group_stats"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="error_group_stats", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def list_events( - self, - project_name, - group_id, - service_filter=None, - time_range=None, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists the specified events. - - Example: - >>> from google.cloud import errorreporting_v1beta1 - >>> - >>> client = errorreporting_v1beta1.ErrorStatsServiceClient() - >>> - >>> project_name = client.project_path('[PROJECT]') - >>> - >>> # TODO: Initialize `group_id`: - >>> group_id = '' - >>> - >>> # Iterate over all results - >>> for element in client.list_events(project_name, group_id): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_events(project_name, group_id).pages: - ... for element in page: - ... # process element - ... pass - - Args: - project_name (str): [Required] The resource name of the Google Cloud Platform project. - Written as ``projects/`` plus the `Google Cloud Platform project - ID `__. Example: - ``projects/my-project-123``. - group_id (str): [Required] The group for which events shall be returned. - service_filter (Union[dict, ~google.cloud.errorreporting_v1beta1.types.ServiceContextFilter]): [Optional] List only ErrorGroups which belong to a service context that - matches the filter. Data for all service contexts is returned if this - field is not specified. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.errorreporting_v1beta1.types.ServiceContextFilter` - time_range (Union[dict, ~google.cloud.errorreporting_v1beta1.types.QueryTimeRange]): [Optional] List only data for the given time range. If not set a default - time range is used. The field time\_range\_begin in the response will - specify the beginning of this time range. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.errorreporting_v1beta1.types.QueryTimeRange` - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.errorreporting_v1beta1.types.ErrorEvent` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_events" not in self._inner_api_calls: - self._inner_api_calls[ - "list_events" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_events, - default_retry=self._method_configs["ListEvents"].retry, - default_timeout=self._method_configs["ListEvents"].timeout, - client_info=self._client_info, - ) - - request = error_stats_service_pb2.ListEventsRequest( - project_name=project_name, - group_id=group_id, - service_filter=service_filter, - time_range=time_range, - page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("project_name", project_name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_events"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="error_events", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def delete_events( - self, - project_name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes all error events of a given project. - - Example: - >>> from google.cloud import errorreporting_v1beta1 - >>> - >>> client = errorreporting_v1beta1.ErrorStatsServiceClient() - >>> - >>> project_name = client.project_path('[PROJECT]') - >>> - >>> response = client.delete_events(project_name) - - Args: - project_name (str): [Required] The resource name of the Google Cloud Platform project. - Written as ``projects/`` plus the `Google Cloud Platform project - ID `__. Example: - ``projects/my-project-123``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.errorreporting_v1beta1.types.DeleteEventsResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_events" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_events" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_events, - default_retry=self._method_configs["DeleteEvents"].retry, - default_timeout=self._method_configs["DeleteEvents"].timeout, - client_info=self._client_info, - ) - - request = error_stats_service_pb2.DeleteEventsRequest(project_name=project_name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("project_name", project_name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["delete_events"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/error_reporting/google/cloud/errorreporting_v1beta1/gapic/error_stats_service_client_config.py b/error_reporting/google/cloud/errorreporting_v1beta1/gapic/error_stats_service_client_config.py deleted file mode 100644 index fead94c5b733..000000000000 --- a/error_reporting/google/cloud/errorreporting_v1beta1/gapic/error_stats_service_client_config.py +++ /dev/null @@ -1,38 +0,0 @@ -config = { - "interfaces": { - "google.devtools.clouderrorreporting.v1beta1.ErrorStatsService": { - "retry_codes": { - "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "non_idempotent": [], - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 20000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 20000, - "total_timeout_millis": 600000, - } - }, - "methods": { - "ListGroupStats": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListEvents": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "DeleteEvents": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - }, - } - } -} diff --git a/error_reporting/google/cloud/errorreporting_v1beta1/gapic/report_errors_service_client.py b/error_reporting/google/cloud/errorreporting_v1beta1/gapic/report_errors_service_client.py deleted file mode 100644 index 7ad37cc1cd36..000000000000 --- a/error_reporting/google/cloud/errorreporting_v1beta1/gapic/report_errors_service_client.py +++ /dev/null @@ -1,285 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.devtools.clouderrorreporting.v1beta1 ReportErrorsService API.""" - -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import google.api_core.path_template -import grpc - -from google.cloud.errorreporting_v1beta1.gapic import enums -from google.cloud.errorreporting_v1beta1.gapic import ( - report_errors_service_client_config, -) -from google.cloud.errorreporting_v1beta1.gapic.transports import ( - report_errors_service_grpc_transport, -) -from google.cloud.errorreporting_v1beta1.proto import common_pb2 -from google.cloud.errorreporting_v1beta1.proto import error_group_service_pb2 -from google.cloud.errorreporting_v1beta1.proto import error_group_service_pb2_grpc -from google.cloud.errorreporting_v1beta1.proto import error_stats_service_pb2 -from google.cloud.errorreporting_v1beta1.proto import error_stats_service_pb2_grpc -from google.cloud.errorreporting_v1beta1.proto import report_errors_service_pb2 -from google.cloud.errorreporting_v1beta1.proto import report_errors_service_pb2_grpc -from google.protobuf import duration_pb2 -from google.protobuf import timestamp_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - "google-cloud-error-reporting" -).version - - -class ReportErrorsServiceClient(object): - """An API for reporting error events.""" - - SERVICE_ADDRESS = "clouderrorreporting.googleapis.com:443" - """The default address of the service.""" - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.devtools.clouderrorreporting.v1beta1.ReportErrorsService" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ReportErrorsServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @classmethod - def project_path(cls, project): - """Return a fully-qualified project string.""" - return google.api_core.path_template.expand( - "projects/{project}", project=project - ) - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.ReportErrorsServiceGrpcTransport, - Callable[[~.Credentials, type], ~.ReportErrorsServiceGrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = report_errors_service_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=report_errors_service_grpc_transport.ReportErrorsServiceGrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = report_errors_service_grpc_transport.ReportErrorsServiceGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def report_error_event( - self, - project_name, - event, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Report an individual error event. - - Example: - >>> from google.cloud import errorreporting_v1beta1 - >>> - >>> client = errorreporting_v1beta1.ReportErrorsServiceClient() - >>> - >>> project_name = client.project_path('[PROJECT]') - >>> - >>> # TODO: Initialize `event`: - >>> event = {} - >>> - >>> response = client.report_error_event(project_name, event) - - Args: - project_name (str): [Required] The resource name of the Google Cloud Platform project. - Written as ``projects/`` plus the `Google Cloud Platform project - ID `__. Example: - ``projects/my-project-123``. - event (Union[dict, ~google.cloud.errorreporting_v1beta1.types.ReportedErrorEvent]): [Required] The error event to be reported. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.errorreporting_v1beta1.types.ReportedErrorEvent` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.errorreporting_v1beta1.types.ReportErrorEventResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "report_error_event" not in self._inner_api_calls: - self._inner_api_calls[ - "report_error_event" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.report_error_event, - default_retry=self._method_configs["ReportErrorEvent"].retry, - default_timeout=self._method_configs["ReportErrorEvent"].timeout, - client_info=self._client_info, - ) - - request = report_errors_service_pb2.ReportErrorEventRequest( - project_name=project_name, event=event - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("project_name", project_name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["report_error_event"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/error_reporting/google/cloud/errorreporting_v1beta1/gapic/report_errors_service_client_config.py b/error_reporting/google/cloud/errorreporting_v1beta1/gapic/report_errors_service_client_config.py deleted file mode 100644 index cfefc7eb95f2..000000000000 --- a/error_reporting/google/cloud/errorreporting_v1beta1/gapic/report_errors_service_client_config.py +++ /dev/null @@ -1,28 +0,0 @@ -config = { - "interfaces": { - "google.devtools.clouderrorreporting.v1beta1.ReportErrorsService": { - "retry_codes": { - "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "non_idempotent": [], - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 20000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 20000, - "total_timeout_millis": 600000, - } - }, - "methods": { - "ReportErrorEvent": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - } - }, - } - } -} diff --git a/error_reporting/google/cloud/errorreporting_v1beta1/gapic/transports/__init__.py b/error_reporting/google/cloud/errorreporting_v1beta1/gapic/transports/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/error_reporting/google/cloud/errorreporting_v1beta1/gapic/transports/error_group_service_grpc_transport.py b/error_reporting/google/cloud/errorreporting_v1beta1/gapic/transports/error_group_service_grpc_transport.py deleted file mode 100644 index cde299b8edb0..000000000000 --- a/error_reporting/google/cloud/errorreporting_v1beta1/gapic/transports/error_group_service_grpc_transport.py +++ /dev/null @@ -1,143 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers - -from google.cloud.errorreporting_v1beta1.proto import error_group_service_pb2_grpc - - -class ErrorGroupServiceGrpcTransport(object): - """gRPC transport class providing stubs for - google.devtools.clouderrorreporting.v1beta1 ErrorGroupService API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) - - def __init__( - self, - channel=None, - credentials=None, - address="clouderrorreporting.googleapis.com:443", - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "error_group_service_stub": error_group_service_pb2_grpc.ErrorGroupServiceStub( - channel - ) - } - - @classmethod - def create_channel( - cls, - address="clouderrorreporting.googleapis.com:443", - credentials=None, - **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def get_group(self): - """Return the gRPC stub for :meth:`ErrorGroupServiceClient.get_group`. - - Get the specified group. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["error_group_service_stub"].GetGroup - - @property - def update_group(self): - """Return the gRPC stub for :meth:`ErrorGroupServiceClient.update_group`. - - Replace the data for the specified group. - Fails if the group does not exist. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["error_group_service_stub"].UpdateGroup diff --git a/error_reporting/google/cloud/errorreporting_v1beta1/gapic/transports/error_stats_service_grpc_transport.py b/error_reporting/google/cloud/errorreporting_v1beta1/gapic/transports/error_stats_service_grpc_transport.py deleted file mode 100644 index 2ef5f2126019..000000000000 --- a/error_reporting/google/cloud/errorreporting_v1beta1/gapic/transports/error_stats_service_grpc_transport.py +++ /dev/null @@ -1,155 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers - -from google.cloud.errorreporting_v1beta1.proto import error_stats_service_pb2_grpc - - -class ErrorStatsServiceGrpcTransport(object): - """gRPC transport class providing stubs for - google.devtools.clouderrorreporting.v1beta1 ErrorStatsService API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) - - def __init__( - self, - channel=None, - credentials=None, - address="clouderrorreporting.googleapis.com:443", - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "error_stats_service_stub": error_stats_service_pb2_grpc.ErrorStatsServiceStub( - channel - ) - } - - @classmethod - def create_channel( - cls, - address="clouderrorreporting.googleapis.com:443", - credentials=None, - **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def list_group_stats(self): - """Return the gRPC stub for :meth:`ErrorStatsServiceClient.list_group_stats`. - - Lists the specified groups. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["error_stats_service_stub"].ListGroupStats - - @property - def list_events(self): - """Return the gRPC stub for :meth:`ErrorStatsServiceClient.list_events`. - - Lists the specified events. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["error_stats_service_stub"].ListEvents - - @property - def delete_events(self): - """Return the gRPC stub for :meth:`ErrorStatsServiceClient.delete_events`. - - Deletes all error events of a given project. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["error_stats_service_stub"].DeleteEvents diff --git a/error_reporting/google/cloud/errorreporting_v1beta1/gapic/transports/report_errors_service_grpc_transport.py b/error_reporting/google/cloud/errorreporting_v1beta1/gapic/transports/report_errors_service_grpc_transport.py deleted file mode 100644 index e367e49c4c39..000000000000 --- a/error_reporting/google/cloud/errorreporting_v1beta1/gapic/transports/report_errors_service_grpc_transport.py +++ /dev/null @@ -1,129 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers - -from google.cloud.errorreporting_v1beta1.proto import report_errors_service_pb2_grpc - - -class ReportErrorsServiceGrpcTransport(object): - """gRPC transport class providing stubs for - google.devtools.clouderrorreporting.v1beta1 ReportErrorsService API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) - - def __init__( - self, - channel=None, - credentials=None, - address="clouderrorreporting.googleapis.com:443", - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "report_errors_service_stub": report_errors_service_pb2_grpc.ReportErrorsServiceStub( - channel - ) - } - - @classmethod - def create_channel( - cls, - address="clouderrorreporting.googleapis.com:443", - credentials=None, - **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def report_error_event(self): - """Return the gRPC stub for :meth:`ReportErrorsServiceClient.report_error_event`. - - Report an individual error event. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["report_errors_service_stub"].ReportErrorEvent diff --git a/error_reporting/google/cloud/errorreporting_v1beta1/proto/__init__.py b/error_reporting/google/cloud/errorreporting_v1beta1/proto/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/error_reporting/google/cloud/errorreporting_v1beta1/proto/common.proto b/error_reporting/google/cloud/errorreporting_v1beta1/proto/common.proto deleted file mode 100644 index 132f1a646f87..000000000000 --- a/error_reporting/google/cloud/errorreporting_v1beta1/proto/common.proto +++ /dev/null @@ -1,164 +0,0 @@ -// Copyright 2016 Google Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.devtools.clouderrorreporting.v1beta1; - -import "google/api/annotations.proto"; -import "google/api/monitored_resource.proto"; -import "google/protobuf/timestamp.proto"; - -option csharp_namespace = "Google.Cloud.ErrorReporting.V1Beta1"; -option go_package = "google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1;clouderrorreporting"; -option java_multiple_files = true; -option java_outer_classname = "CommonProto"; -option java_package = "com.google.devtools.clouderrorreporting.v1beta1"; -option php_namespace = "Google\\Cloud\\ErrorReporting\\V1beta1"; - -// Description of a group of similar error events. -message ErrorGroup { - // The group resource name. - // Example: projects/my-project-123/groups/my-groupid - string name = 1; - - // Group IDs are unique for a given project. If the same kind of error - // occurs in different service contexts, it will receive the same group ID. - string group_id = 2; - - // Associated tracking issues. - repeated TrackingIssue tracking_issues = 3; -} - -// Information related to tracking the progress on resolving the error. -message TrackingIssue { - // A URL pointing to a related entry in an issue tracking system. - // Example: https://github.com/user/project/issues/4 - string url = 1; -} - -// An error event which is returned by the Error Reporting system. -message ErrorEvent { - // Time when the event occurred as provided in the error report. - // If the report did not contain a timestamp, the time the error was received - // by the Error Reporting system is used. - google.protobuf.Timestamp event_time = 1; - - // The `ServiceContext` for which this error was reported. - ServiceContext service_context = 2; - - // The stack trace that was reported or logged by the service. - string message = 3; - - // Data about the context in which the error occurred. - ErrorContext context = 5; -} - -// Describes a running service that sends errors. -// Its version changes over time and multiple versions can run in parallel. -message ServiceContext { - // An identifier of the service, such as the name of the - // executable, job, or Google App Engine service name. This field is expected - // to have a low number of values that are relatively stable over time, as - // opposed to `version`, which can be changed whenever new code is deployed. - // - // Contains the service name for error reports extracted from Google - // App Engine logs or `default` if the App Engine default service is used. - string service = 2; - - // Represents the source code version that the developer provided, - // which could represent a version label or a Git SHA-1 hash, for example. - string version = 3; - - // Type of the MonitoredResource. List of possible values: - // https://cloud.google.com/monitoring/api/resources - // - // Value is set automatically for incoming errors and must not be set when - // reporting errors. - string resource_type = 4; -} - -// A description of the context in which an error occurred. -// This data should be provided by the application when reporting an error, -// unless the -// error report has been generated automatically from Google App Engine logs. -message ErrorContext { - // The HTTP request which was processed when the error was - // triggered. - HttpRequestContext http_request = 1; - - // The user who caused or was affected by the crash. - // This can be a user ID, an email address, or an arbitrary token that - // uniquely identifies the user. - // When sending an error report, leave this field empty if the user was not - // logged in. In this case the - // Error Reporting system will use other data, such as remote IP address, to - // distinguish affected users. See `affected_users_count` in - // `ErrorGroupStats`. - string user = 2; - - // The location in the source code where the decision was made to - // report the error, usually the place where it was logged. - // For a logged exception this would be the source line where the - // exception is logged, usually close to the place where it was - // caught. This value is in contrast to `Exception.cause_location`, - // which describes the source line where the exception was thrown. - SourceLocation report_location = 3; -} - -// HTTP request data that is related to a reported error. -// This data should be provided by the application when reporting an error, -// unless the -// error report has been generated automatically from Google App Engine logs. -message HttpRequestContext { - // The type of HTTP request, such as `GET`, `POST`, etc. - string method = 1; - - // The URL of the request. - string url = 2; - - // The user agent information that is provided with the request. - string user_agent = 3; - - // The referrer information that is provided with the request. - string referrer = 4; - - // The HTTP response status code for the request. - int32 response_status_code = 5; - - // The IP address from which the request originated. - // This can be IPv4, IPv6, or a token which is derived from the - // IP address, depending on the data that has been provided - // in the error report. - string remote_ip = 6; -} - -// Indicates a location in the source code of the service for which -// errors are reported. -// This data should be provided by the application when reporting an error, -// unless the error report has been generated automatically from Google App -// Engine logs. All fields are optional. -message SourceLocation { - // The source code filename, which can include a truncated relative - // path, or a full path from a production machine. - string file_path = 1; - - // 1-based. 0 indicates that the line number is unknown. - int32 line_number = 2; - - // Human-readable name of a function or method. - // The value can include optional context like the class or package name. - // For example, `my.package.MyClass.method` in case of Java. - string function_name = 4; -} diff --git a/error_reporting/google/cloud/errorreporting_v1beta1/proto/common_pb2.py b/error_reporting/google/cloud/errorreporting_v1beta1/proto/common_pb2.py deleted file mode 100644 index c9ebc4c6d279..000000000000 --- a/error_reporting/google/cloud/errorreporting_v1beta1/proto/common_pb2.py +++ /dev/null @@ -1,825 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/devtools/clouderrorreporting_v1beta1/proto/common.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import ( - monitored_resource_pb2 as google_dot_api_dot_monitored__resource__pb2, -) -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/devtools/clouderrorreporting_v1beta1/proto/common.proto", - package="google.devtools.clouderrorreporting.v1beta1", - syntax="proto3", - serialized_options=_b( - "\n/com.google.devtools.clouderrorreporting.v1beta1B\013CommonProtoP\001Z^google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1;clouderrorreporting\252\002#Google.Cloud.ErrorReporting.V1Beta1\312\002#Google\\Cloud\\ErrorReporting\\V1beta1" - ), - serialized_pb=_b( - '\n>google/devtools/clouderrorreporting_v1beta1/proto/common.proto\x12+google.devtools.clouderrorreporting.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a#google/api/monitored_resource.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\x81\x01\n\nErrorGroup\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x10\n\x08group_id\x18\x02 \x01(\t\x12S\n\x0ftracking_issues\x18\x03 \x03(\x0b\x32:.google.devtools.clouderrorreporting.v1beta1.TrackingIssue"\x1c\n\rTrackingIssue\x12\x0b\n\x03url\x18\x01 \x01(\t"\xef\x01\n\nErrorEvent\x12.\n\nevent_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12T\n\x0fservice_context\x18\x02 \x01(\x0b\x32;.google.devtools.clouderrorreporting.v1beta1.ServiceContext\x12\x0f\n\x07message\x18\x03 \x01(\t\x12J\n\x07\x63ontext\x18\x05 \x01(\x0b\x32\x39.google.devtools.clouderrorreporting.v1beta1.ErrorContext"I\n\x0eServiceContext\x12\x0f\n\x07service\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\t\x12\x15\n\rresource_type\x18\x04 \x01(\t"\xc9\x01\n\x0c\x45rrorContext\x12U\n\x0chttp_request\x18\x01 \x01(\x0b\x32?.google.devtools.clouderrorreporting.v1beta1.HttpRequestContext\x12\x0c\n\x04user\x18\x02 \x01(\t\x12T\n\x0freport_location\x18\x03 \x01(\x0b\x32;.google.devtools.clouderrorreporting.v1beta1.SourceLocation"\x88\x01\n\x12HttpRequestContext\x12\x0e\n\x06method\x18\x01 \x01(\t\x12\x0b\n\x03url\x18\x02 \x01(\t\x12\x12\n\nuser_agent\x18\x03 \x01(\t\x12\x10\n\x08referrer\x18\x04 \x01(\t\x12\x1c\n\x14response_status_code\x18\x05 \x01(\x05\x12\x11\n\tremote_ip\x18\x06 \x01(\t"O\n\x0eSourceLocation\x12\x11\n\tfile_path\x18\x01 \x01(\t\x12\x13\n\x0bline_number\x18\x02 \x01(\x05\x12\x15\n\rfunction_name\x18\x04 \x01(\tB\xec\x01\n/com.google.devtools.clouderrorreporting.v1beta1B\x0b\x43ommonProtoP\x01Z^google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1;clouderrorreporting\xaa\x02#Google.Cloud.ErrorReporting.V1Beta1\xca\x02#Google\\Cloud\\ErrorReporting\\V1beta1b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_monitored__resource__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - ], -) - - -_ERRORGROUP = _descriptor.Descriptor( - name="ErrorGroup", - full_name="google.devtools.clouderrorreporting.v1beta1.ErrorGroup", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.devtools.clouderrorreporting.v1beta1.ErrorGroup.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="group_id", - full_name="google.devtools.clouderrorreporting.v1beta1.ErrorGroup.group_id", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="tracking_issues", - full_name="google.devtools.clouderrorreporting.v1beta1.ErrorGroup.tracking_issues", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=212, - serialized_end=341, -) - - -_TRACKINGISSUE = _descriptor.Descriptor( - name="TrackingIssue", - full_name="google.devtools.clouderrorreporting.v1beta1.TrackingIssue", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="url", - full_name="google.devtools.clouderrorreporting.v1beta1.TrackingIssue.url", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=343, - serialized_end=371, -) - - -_ERROREVENT = _descriptor.Descriptor( - name="ErrorEvent", - full_name="google.devtools.clouderrorreporting.v1beta1.ErrorEvent", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="event_time", - full_name="google.devtools.clouderrorreporting.v1beta1.ErrorEvent.event_time", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="service_context", - full_name="google.devtools.clouderrorreporting.v1beta1.ErrorEvent.service_context", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="message", - full_name="google.devtools.clouderrorreporting.v1beta1.ErrorEvent.message", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="context", - full_name="google.devtools.clouderrorreporting.v1beta1.ErrorEvent.context", - index=3, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=374, - serialized_end=613, -) - - -_SERVICECONTEXT = _descriptor.Descriptor( - name="ServiceContext", - full_name="google.devtools.clouderrorreporting.v1beta1.ServiceContext", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="service", - full_name="google.devtools.clouderrorreporting.v1beta1.ServiceContext.service", - index=0, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="version", - full_name="google.devtools.clouderrorreporting.v1beta1.ServiceContext.version", - index=1, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="resource_type", - full_name="google.devtools.clouderrorreporting.v1beta1.ServiceContext.resource_type", - index=2, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=615, - serialized_end=688, -) - - -_ERRORCONTEXT = _descriptor.Descriptor( - name="ErrorContext", - full_name="google.devtools.clouderrorreporting.v1beta1.ErrorContext", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="http_request", - full_name="google.devtools.clouderrorreporting.v1beta1.ErrorContext.http_request", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="user", - full_name="google.devtools.clouderrorreporting.v1beta1.ErrorContext.user", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="report_location", - full_name="google.devtools.clouderrorreporting.v1beta1.ErrorContext.report_location", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=691, - serialized_end=892, -) - - -_HTTPREQUESTCONTEXT = _descriptor.Descriptor( - name="HttpRequestContext", - full_name="google.devtools.clouderrorreporting.v1beta1.HttpRequestContext", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="method", - full_name="google.devtools.clouderrorreporting.v1beta1.HttpRequestContext.method", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="url", - full_name="google.devtools.clouderrorreporting.v1beta1.HttpRequestContext.url", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="user_agent", - full_name="google.devtools.clouderrorreporting.v1beta1.HttpRequestContext.user_agent", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="referrer", - full_name="google.devtools.clouderrorreporting.v1beta1.HttpRequestContext.referrer", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="response_status_code", - full_name="google.devtools.clouderrorreporting.v1beta1.HttpRequestContext.response_status_code", - index=4, - number=5, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="remote_ip", - full_name="google.devtools.clouderrorreporting.v1beta1.HttpRequestContext.remote_ip", - index=5, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=895, - serialized_end=1031, -) - - -_SOURCELOCATION = _descriptor.Descriptor( - name="SourceLocation", - full_name="google.devtools.clouderrorreporting.v1beta1.SourceLocation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="file_path", - full_name="google.devtools.clouderrorreporting.v1beta1.SourceLocation.file_path", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="line_number", - full_name="google.devtools.clouderrorreporting.v1beta1.SourceLocation.line_number", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="function_name", - full_name="google.devtools.clouderrorreporting.v1beta1.SourceLocation.function_name", - index=2, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1033, - serialized_end=1112, -) - -_ERRORGROUP.fields_by_name["tracking_issues"].message_type = _TRACKINGISSUE -_ERROREVENT.fields_by_name[ - "event_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_ERROREVENT.fields_by_name["service_context"].message_type = _SERVICECONTEXT -_ERROREVENT.fields_by_name["context"].message_type = _ERRORCONTEXT -_ERRORCONTEXT.fields_by_name["http_request"].message_type = _HTTPREQUESTCONTEXT -_ERRORCONTEXT.fields_by_name["report_location"].message_type = _SOURCELOCATION -DESCRIPTOR.message_types_by_name["ErrorGroup"] = _ERRORGROUP -DESCRIPTOR.message_types_by_name["TrackingIssue"] = _TRACKINGISSUE -DESCRIPTOR.message_types_by_name["ErrorEvent"] = _ERROREVENT -DESCRIPTOR.message_types_by_name["ServiceContext"] = _SERVICECONTEXT -DESCRIPTOR.message_types_by_name["ErrorContext"] = _ERRORCONTEXT -DESCRIPTOR.message_types_by_name["HttpRequestContext"] = _HTTPREQUESTCONTEXT -DESCRIPTOR.message_types_by_name["SourceLocation"] = _SOURCELOCATION -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -ErrorGroup = _reflection.GeneratedProtocolMessageType( - "ErrorGroup", - (_message.Message,), - dict( - DESCRIPTOR=_ERRORGROUP, - __module__="google.devtools.clouderrorreporting_v1beta1.proto.common_pb2", - __doc__="""Description of a group of similar error events. - - - Attributes: - name: - The group resource name. Example: projects/my- - project-123/groups/my-groupid - group_id: - Group IDs are unique for a given project. If the same kind of - error occurs in different service contexts, it will receive - the same group ID. - tracking_issues: - Associated tracking issues. - """, - # @@protoc_insertion_point(class_scope:google.devtools.clouderrorreporting.v1beta1.ErrorGroup) - ), -) -_sym_db.RegisterMessage(ErrorGroup) - -TrackingIssue = _reflection.GeneratedProtocolMessageType( - "TrackingIssue", - (_message.Message,), - dict( - DESCRIPTOR=_TRACKINGISSUE, - __module__="google.devtools.clouderrorreporting_v1beta1.proto.common_pb2", - __doc__="""Information related to tracking the progress on resolving the error. - - - Attributes: - url: - A URL pointing to a related entry in an issue tracking system. - Example: https://github.com/user/project/issues/4 - """, - # @@protoc_insertion_point(class_scope:google.devtools.clouderrorreporting.v1beta1.TrackingIssue) - ), -) -_sym_db.RegisterMessage(TrackingIssue) - -ErrorEvent = _reflection.GeneratedProtocolMessageType( - "ErrorEvent", - (_message.Message,), - dict( - DESCRIPTOR=_ERROREVENT, - __module__="google.devtools.clouderrorreporting_v1beta1.proto.common_pb2", - __doc__="""An error event which is returned by the Error Reporting system. - - - Attributes: - event_time: - Time when the event occurred as provided in the error report. - If the report did not contain a timestamp, the time the error - was received by the Error Reporting system is used. - service_context: - The ``ServiceContext`` for which this error was reported. - message: - The stack trace that was reported or logged by the service. - context: - Data about the context in which the error occurred. - """, - # @@protoc_insertion_point(class_scope:google.devtools.clouderrorreporting.v1beta1.ErrorEvent) - ), -) -_sym_db.RegisterMessage(ErrorEvent) - -ServiceContext = _reflection.GeneratedProtocolMessageType( - "ServiceContext", - (_message.Message,), - dict( - DESCRIPTOR=_SERVICECONTEXT, - __module__="google.devtools.clouderrorreporting_v1beta1.proto.common_pb2", - __doc__="""Describes a running service that sends errors. Its version changes over - time and multiple versions can run in parallel. - - - Attributes: - service: - An identifier of the service, such as the name of the - executable, job, or Google App Engine service name. This field - is expected to have a low number of values that are relatively - stable over time, as opposed to ``version``, which can be - changed whenever new code is deployed. Contains the service - name for error reports extracted from Google App Engine logs - or ``default`` if the App Engine default service is used. - version: - Represents the source code version that the developer - provided, which could represent a version label or a Git SHA-1 - hash, for example. - resource_type: - Type of the MonitoredResource. List of possible values: - https://cloud.google.com/monitoring/api/resources Value is - set automatically for incoming errors and must not be set when - reporting errors. - """, - # @@protoc_insertion_point(class_scope:google.devtools.clouderrorreporting.v1beta1.ServiceContext) - ), -) -_sym_db.RegisterMessage(ServiceContext) - -ErrorContext = _reflection.GeneratedProtocolMessageType( - "ErrorContext", - (_message.Message,), - dict( - DESCRIPTOR=_ERRORCONTEXT, - __module__="google.devtools.clouderrorreporting_v1beta1.proto.common_pb2", - __doc__="""A description of the context in which an error occurred. This data - should be provided by the application when reporting an error, unless - the error report has been generated automatically from Google App Engine - logs. - - - Attributes: - http_request: - The HTTP request which was processed when the error was - triggered. - user: - The user who caused or was affected by the crash. This can be - a user ID, an email address, or an arbitrary token that - uniquely identifies the user. When sending an error report, - leave this field empty if the user was not logged in. In this - case the Error Reporting system will use other data, such as - remote IP address, to distinguish affected users. See - ``affected_users_count`` in ``ErrorGroupStats``. - report_location: - The location in the source code where the decision was made to - report the error, usually the place where it was logged. For a - logged exception this would be the source line where the - exception is logged, usually close to the place where it was - caught. This value is in contrast to - ``Exception.cause_location``, which describes the source line - where the exception was thrown. - """, - # @@protoc_insertion_point(class_scope:google.devtools.clouderrorreporting.v1beta1.ErrorContext) - ), -) -_sym_db.RegisterMessage(ErrorContext) - -HttpRequestContext = _reflection.GeneratedProtocolMessageType( - "HttpRequestContext", - (_message.Message,), - dict( - DESCRIPTOR=_HTTPREQUESTCONTEXT, - __module__="google.devtools.clouderrorreporting_v1beta1.proto.common_pb2", - __doc__="""HTTP request data that is related to a reported error. This data should - be provided by the application when reporting an error, unless the error - report has been generated automatically from Google App Engine logs. - - - Attributes: - method: - The type of HTTP request, such as ``GET``, ``POST``, etc. - url: - The URL of the request. - user_agent: - The user agent information that is provided with the request. - referrer: - The referrer information that is provided with the request. - response_status_code: - The HTTP response status code for the request. - remote_ip: - The IP address from which the request originated. This can be - IPv4, IPv6, or a token which is derived from the IP address, - depending on the data that has been provided in the error - report. - """, - # @@protoc_insertion_point(class_scope:google.devtools.clouderrorreporting.v1beta1.HttpRequestContext) - ), -) -_sym_db.RegisterMessage(HttpRequestContext) - -SourceLocation = _reflection.GeneratedProtocolMessageType( - "SourceLocation", - (_message.Message,), - dict( - DESCRIPTOR=_SOURCELOCATION, - __module__="google.devtools.clouderrorreporting_v1beta1.proto.common_pb2", - __doc__="""Indicates a location in the source code of the service for which errors - are reported. This data should be provided by the application when - reporting an error, unless the error report has been generated - automatically from Google App Engine logs. All fields are optional. - - - Attributes: - file_path: - The source code filename, which can include a truncated - relative path, or a full path from a production machine. - line_number: - 1-based. 0 indicates that the line number is unknown. - function_name: - Human-readable name of a function or method. The value can - include optional context like the class or package name. For - example, ``my.package.MyClass.method`` in case of Java. - """, - # @@protoc_insertion_point(class_scope:google.devtools.clouderrorreporting.v1beta1.SourceLocation) - ), -) -_sym_db.RegisterMessage(SourceLocation) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/error_reporting/google/cloud/errorreporting_v1beta1/proto/common_pb2_grpc.py b/error_reporting/google/cloud/errorreporting_v1beta1/proto/common_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/error_reporting/google/cloud/errorreporting_v1beta1/proto/common_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/error_reporting/google/cloud/errorreporting_v1beta1/proto/error_group_service.proto b/error_reporting/google/cloud/errorreporting_v1beta1/proto/error_group_service.proto deleted file mode 100644 index 15086a9eaa47..000000000000 --- a/error_reporting/google/cloud/errorreporting_v1beta1/proto/error_group_service.proto +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright 2016 Google Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.devtools.clouderrorreporting.v1beta1; - -import "google/api/annotations.proto"; -import "google/devtools/clouderrorreporting/v1beta1/common.proto"; - -option csharp_namespace = "Google.Cloud.ErrorReporting.V1Beta1"; -option go_package = "google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1;clouderrorreporting"; -option java_multiple_files = true; -option java_outer_classname = "ErrorGroupServiceProto"; -option java_package = "com.google.devtools.clouderrorreporting.v1beta1"; -option php_namespace = "Google\\Cloud\\ErrorReporting\\V1beta1"; - -// Service for retrieving and updating individual error groups. -service ErrorGroupService { - // Get the specified group. - rpc GetGroup(GetGroupRequest) returns (ErrorGroup) { - option (google.api.http) = { - get: "/v1beta1/{group_name=projects/*/groups/*}" - }; - } - - // Replace the data for the specified group. - // Fails if the group does not exist. - rpc UpdateGroup(UpdateGroupRequest) returns (ErrorGroup) { - option (google.api.http) = { - put: "/v1beta1/{group.name=projects/*/groups/*}" - body: "group" - }; - } -} - -// A request to return an individual group. -message GetGroupRequest { - // [Required] The group resource name. Written as - // projects/projectID/groups/group_name. - // Call - // - // groupStats.list to return a list of groups belonging to - // this project. - // - // Example: projects/my-project-123/groups/my-group - string group_name = 1; -} - -// A request to replace the existing data for the given group. -message UpdateGroupRequest { - // [Required] The group which replaces the resource on the server. - ErrorGroup group = 1; -} diff --git a/error_reporting/google/cloud/errorreporting_v1beta1/proto/error_group_service_pb2.py b/error_reporting/google/cloud/errorreporting_v1beta1/proto/error_group_service_pb2.py deleted file mode 100644 index a6406ab03623..000000000000 --- a/error_reporting/google/cloud/errorreporting_v1beta1/proto/error_group_service_pb2.py +++ /dev/null @@ -1,207 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/devtools/clouderrorreporting_v1beta1/proto/error_group_service.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.cloud.errorreporting_v1beta1.proto import ( - common_pb2 as google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_common__pb2, -) - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/devtools/clouderrorreporting_v1beta1/proto/error_group_service.proto", - package="google.devtools.clouderrorreporting.v1beta1", - syntax="proto3", - serialized_options=_b( - "\n/com.google.devtools.clouderrorreporting.v1beta1B\026ErrorGroupServiceProtoP\001Z^google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1;clouderrorreporting\252\002#Google.Cloud.ErrorReporting.V1Beta1\312\002#Google\\Cloud\\ErrorReporting\\V1beta1" - ), - serialized_pb=_b( - '\nKgoogle/devtools/clouderrorreporting_v1beta1/proto/error_group_service.proto\x12+google.devtools.clouderrorreporting.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a>google/devtools/clouderrorreporting_v1beta1/proto/common.proto"%\n\x0fGetGroupRequest\x12\x12\n\ngroup_name\x18\x01 \x01(\t"\\\n\x12UpdateGroupRequest\x12\x46\n\x05group\x18\x01 \x01(\x0b\x32\x37.google.devtools.clouderrorreporting.v1beta1.ErrorGroup2\x8e\x03\n\x11\x45rrorGroupService\x12\xb4\x01\n\x08GetGroup\x12<.google.devtools.clouderrorreporting.v1beta1.GetGroupRequest\x1a\x37.google.devtools.clouderrorreporting.v1beta1.ErrorGroup"1\x82\xd3\xe4\x93\x02+\x12)/v1beta1/{group_name=projects/*/groups/*}\x12\xc1\x01\n\x0bUpdateGroup\x12?.google.devtools.clouderrorreporting.v1beta1.UpdateGroupRequest\x1a\x37.google.devtools.clouderrorreporting.v1beta1.ErrorGroup"8\x82\xd3\xe4\x93\x02\x32\x1a)/v1beta1/{group.name=projects/*/groups/*}:\x05groupB\xf7\x01\n/com.google.devtools.clouderrorreporting.v1beta1B\x16\x45rrorGroupServiceProtoP\x01Z^google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1;clouderrorreporting\xaa\x02#Google.Cloud.ErrorReporting.V1Beta1\xca\x02#Google\\Cloud\\ErrorReporting\\V1beta1b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_common__pb2.DESCRIPTOR, - ], -) - - -_GETGROUPREQUEST = _descriptor.Descriptor( - name="GetGroupRequest", - full_name="google.devtools.clouderrorreporting.v1beta1.GetGroupRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="group_name", - full_name="google.devtools.clouderrorreporting.v1beta1.GetGroupRequest.group_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=218, - serialized_end=255, -) - - -_UPDATEGROUPREQUEST = _descriptor.Descriptor( - name="UpdateGroupRequest", - full_name="google.devtools.clouderrorreporting.v1beta1.UpdateGroupRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="group", - full_name="google.devtools.clouderrorreporting.v1beta1.UpdateGroupRequest.group", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=257, - serialized_end=349, -) - -_UPDATEGROUPREQUEST.fields_by_name[ - "group" -].message_type = ( - google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_common__pb2._ERRORGROUP -) -DESCRIPTOR.message_types_by_name["GetGroupRequest"] = _GETGROUPREQUEST -DESCRIPTOR.message_types_by_name["UpdateGroupRequest"] = _UPDATEGROUPREQUEST -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -GetGroupRequest = _reflection.GeneratedProtocolMessageType( - "GetGroupRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETGROUPREQUEST, - __module__="google.devtools.clouderrorreporting_v1beta1.proto.error_group_service_pb2", - __doc__="""A request to return an individual group. - - - Attributes: - group_name: - [Required] The group resource name. Written as - projects/projectID/groups/group\_name. Call groupStats.list to - return a list of groups belonging to this project. Example: - projects/my-project-123/groups/my-group - """, - # @@protoc_insertion_point(class_scope:google.devtools.clouderrorreporting.v1beta1.GetGroupRequest) - ), -) -_sym_db.RegisterMessage(GetGroupRequest) - -UpdateGroupRequest = _reflection.GeneratedProtocolMessageType( - "UpdateGroupRequest", - (_message.Message,), - dict( - DESCRIPTOR=_UPDATEGROUPREQUEST, - __module__="google.devtools.clouderrorreporting_v1beta1.proto.error_group_service_pb2", - __doc__="""A request to replace the existing data for the given group. - - - Attributes: - group: - [Required] The group which replaces the resource on the - server. - """, - # @@protoc_insertion_point(class_scope:google.devtools.clouderrorreporting.v1beta1.UpdateGroupRequest) - ), -) -_sym_db.RegisterMessage(UpdateGroupRequest) - - -DESCRIPTOR._options = None - -_ERRORGROUPSERVICE = _descriptor.ServiceDescriptor( - name="ErrorGroupService", - full_name="google.devtools.clouderrorreporting.v1beta1.ErrorGroupService", - file=DESCRIPTOR, - index=0, - serialized_options=None, - serialized_start=352, - serialized_end=750, - methods=[ - _descriptor.MethodDescriptor( - name="GetGroup", - full_name="google.devtools.clouderrorreporting.v1beta1.ErrorGroupService.GetGroup", - index=0, - containing_service=None, - input_type=_GETGROUPREQUEST, - output_type=google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_common__pb2._ERRORGROUP, - serialized_options=_b( - "\202\323\344\223\002+\022)/v1beta1/{group_name=projects/*/groups/*}" - ), - ), - _descriptor.MethodDescriptor( - name="UpdateGroup", - full_name="google.devtools.clouderrorreporting.v1beta1.ErrorGroupService.UpdateGroup", - index=1, - containing_service=None, - input_type=_UPDATEGROUPREQUEST, - output_type=google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_common__pb2._ERRORGROUP, - serialized_options=_b( - "\202\323\344\223\0022\032)/v1beta1/{group.name=projects/*/groups/*}:\005group" - ), - ), - ], -) -_sym_db.RegisterServiceDescriptor(_ERRORGROUPSERVICE) - -DESCRIPTOR.services_by_name["ErrorGroupService"] = _ERRORGROUPSERVICE - -# @@protoc_insertion_point(module_scope) diff --git a/error_reporting/google/cloud/errorreporting_v1beta1/proto/error_group_service_pb2_grpc.py b/error_reporting/google/cloud/errorreporting_v1beta1/proto/error_group_service_pb2_grpc.py deleted file mode 100644 index 3849772ff67d..000000000000 --- a/error_reporting/google/cloud/errorreporting_v1beta1/proto/error_group_service_pb2_grpc.py +++ /dev/null @@ -1,71 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.cloud.errorreporting_v1beta1.proto import ( - common_pb2 as google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_common__pb2, -) -from google.cloud.errorreporting_v1beta1.proto import ( - error_group_service_pb2 as google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_error__group__service__pb2, -) - - -class ErrorGroupServiceStub(object): - """Service for retrieving and updating individual error groups. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.GetGroup = channel.unary_unary( - "/google.devtools.clouderrorreporting.v1beta1.ErrorGroupService/GetGroup", - request_serializer=google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_error__group__service__pb2.GetGroupRequest.SerializeToString, - response_deserializer=google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_common__pb2.ErrorGroup.FromString, - ) - self.UpdateGroup = channel.unary_unary( - "/google.devtools.clouderrorreporting.v1beta1.ErrorGroupService/UpdateGroup", - request_serializer=google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_error__group__service__pb2.UpdateGroupRequest.SerializeToString, - response_deserializer=google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_common__pb2.ErrorGroup.FromString, - ) - - -class ErrorGroupServiceServicer(object): - """Service for retrieving and updating individual error groups. - """ - - def GetGroup(self, request, context): - """Get the specified group. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateGroup(self, request, context): - """Replace the data for the specified group. - Fails if the group does not exist. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_ErrorGroupServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - "GetGroup": grpc.unary_unary_rpc_method_handler( - servicer.GetGroup, - request_deserializer=google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_error__group__service__pb2.GetGroupRequest.FromString, - response_serializer=google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_common__pb2.ErrorGroup.SerializeToString, - ), - "UpdateGroup": grpc.unary_unary_rpc_method_handler( - servicer.UpdateGroup, - request_deserializer=google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_error__group__service__pb2.UpdateGroupRequest.FromString, - response_serializer=google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_common__pb2.ErrorGroup.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.devtools.clouderrorreporting.v1beta1.ErrorGroupService", - rpc_method_handlers, - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/error_reporting/google/cloud/errorreporting_v1beta1/proto/error_stats_service.proto b/error_reporting/google/cloud/errorreporting_v1beta1/proto/error_stats_service.proto deleted file mode 100644 index ffb25b2b8ac2..000000000000 --- a/error_reporting/google/cloud/errorreporting_v1beta1/proto/error_stats_service.proto +++ /dev/null @@ -1,345 +0,0 @@ -// Copyright 2016 Google Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.devtools.clouderrorreporting.v1beta1; - -import "google/api/annotations.proto"; -import "google/devtools/clouderrorreporting/v1beta1/common.proto"; -import "google/protobuf/duration.proto"; -import "google/protobuf/timestamp.proto"; - -option csharp_namespace = "Google.Cloud.ErrorReporting.V1Beta1"; -option go_package = "google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1;clouderrorreporting"; -option java_multiple_files = true; -option java_outer_classname = "ErrorStatsServiceProto"; -option java_package = "com.google.devtools.clouderrorreporting.v1beta1"; -option php_namespace = "Google\\Cloud\\ErrorReporting\\V1beta1"; - -// An API for retrieving and managing error statistics as well as data for -// individual events. -service ErrorStatsService { - // Lists the specified groups. - rpc ListGroupStats(ListGroupStatsRequest) returns (ListGroupStatsResponse) { - option (google.api.http) = { - get: "/v1beta1/{project_name=projects/*}/groupStats" - }; - } - - // Lists the specified events. - rpc ListEvents(ListEventsRequest) returns (ListEventsResponse) { - option (google.api.http) = { - get: "/v1beta1/{project_name=projects/*}/events" - }; - } - - // Deletes all error events of a given project. - rpc DeleteEvents(DeleteEventsRequest) returns (DeleteEventsResponse) { - option (google.api.http) = { - delete: "/v1beta1/{project_name=projects/*}/events" - }; - } -} - -// Specifies a set of `ErrorGroupStats` to return. -message ListGroupStatsRequest { - // [Required] The resource name of the Google Cloud Platform project. Written - // as projects/ plus the - // Google Cloud - // Platform project ID. - // - // Example: projects/my-project-123. - string project_name = 1; - - // [Optional] List all ErrorGroupStats with these IDs. - repeated string group_id = 2; - - // [Optional] List only ErrorGroupStats which belong to a service - // context that matches the filter. - // Data for all service contexts is returned if this field is not specified. - ServiceContextFilter service_filter = 3; - - // [Optional] List data for the given time range. - // If not set a default time range is used. The field time_range_begin - // in the response will specify the beginning of this time range. - // Only ErrorGroupStats with a non-zero count in the given time - // range are returned, unless the request contains an explicit group_id list. - // If a group_id list is given, also ErrorGroupStats with zero - // occurrences are returned. - QueryTimeRange time_range = 5; - - // [Optional] The preferred duration for a single returned `TimedCount`. - // If not set, no timed counts are returned. - google.protobuf.Duration timed_count_duration = 6; - - // [Optional] The alignment of the timed counts to be returned. - // Default is `ALIGNMENT_EQUAL_AT_END`. - TimedCountAlignment alignment = 7; - - // [Optional] Time where the timed counts shall be aligned if rounded - // alignment is chosen. Default is 00:00 UTC. - google.protobuf.Timestamp alignment_time = 8; - - // [Optional] The sort order in which the results are returned. - // Default is `COUNT_DESC`. - ErrorGroupOrder order = 9; - - // [Optional] The maximum number of results to return per response. - // Default is 20. - int32 page_size = 11; - - // [Optional] A `next_page_token` provided by a previous response. To view - // additional results, pass this token along with the identical query - // parameters as the first request. - string page_token = 12; -} - -// Contains a set of requested error group stats. -message ListGroupStatsResponse { - // The error group stats which match the given request. - repeated ErrorGroupStats error_group_stats = 1; - - // If non-empty, more results are available. - // Pass this token, along with the same query parameters as the first - // request, to view the next page of results. - string next_page_token = 2; - - // The timestamp specifies the start time to which the request was restricted. - // The start time is set based on the requested time range. It may be adjusted - // to a later time if a project has exceeded the storage quota and older data - // has been deleted. - google.protobuf.Timestamp time_range_begin = 4; -} - -// Data extracted for a specific group based on certain filter criteria, -// such as a given time period and/or service filter. -message ErrorGroupStats { - // Group data that is independent of the filter criteria. - ErrorGroup group = 1; - - // Approximate total number of events in the given group that match - // the filter criteria. - int64 count = 2; - - // Approximate number of affected users in the given group that - // match the filter criteria. - // Users are distinguished by data in the `ErrorContext` of the - // individual error events, such as their login name or their remote - // IP address in case of HTTP requests. - // The number of affected users can be zero even if the number of - // errors is non-zero if no data was provided from which the - // affected user could be deduced. - // Users are counted based on data in the request - // context that was provided in the error report. If more users are - // implicitly affected, such as due to a crash of the whole service, - // this is not reflected here. - int64 affected_users_count = 3; - - // Approximate number of occurrences over time. - // Timed counts returned by ListGroups are guaranteed to be: - // - // - Inside the requested time interval - // - Non-overlapping, and - // - Ordered by ascending time. - repeated TimedCount timed_counts = 4; - - // Approximate first occurrence that was ever seen for this group - // and which matches the given filter criteria, ignoring the - // time_range that was specified in the request. - google.protobuf.Timestamp first_seen_time = 5; - - // Approximate last occurrence that was ever seen for this group and - // which matches the given filter criteria, ignoring the time_range - // that was specified in the request. - google.protobuf.Timestamp last_seen_time = 6; - - // Service contexts with a non-zero error count for the given filter - // criteria. This list can be truncated if multiple services are affected. - // Refer to `num_affected_services` for the total count. - repeated ServiceContext affected_services = 7; - - // The total number of services with a non-zero error count for the given - // filter criteria. - int32 num_affected_services = 8; - - // An arbitrary event that is chosen as representative for the whole group. - // The representative event is intended to be used as a quick preview for - // the whole group. Events in the group are usually sufficiently similar - // to each other such that showing an arbitrary representative provides - // insight into the characteristics of the group as a whole. - ErrorEvent representative = 9; -} - -// The number of errors in a given time period. -// All numbers are approximate since the error events are sampled -// before counting them. -message TimedCount { - // Approximate number of occurrences in the given time period. - int64 count = 1; - - // Start of the time period to which `count` refers (included). - google.protobuf.Timestamp start_time = 2; - - // End of the time period to which `count` refers (excluded). - google.protobuf.Timestamp end_time = 3; -} - -// Specifies a set of error events to return. -message ListEventsRequest { - // [Required] The resource name of the Google Cloud Platform project. Written - // as `projects/` plus the - // [Google Cloud Platform project - // ID](https://support.google.com/cloud/answer/6158840). - // Example: `projects/my-project-123`. - string project_name = 1; - - // [Required] The group for which events shall be returned. - string group_id = 2; - - // [Optional] List only ErrorGroups which belong to a service context that - // matches the filter. - // Data for all service contexts is returned if this field is not specified. - ServiceContextFilter service_filter = 3; - - // [Optional] List only data for the given time range. - // If not set a default time range is used. The field time_range_begin - // in the response will specify the beginning of this time range. - QueryTimeRange time_range = 4; - - // [Optional] The maximum number of results to return per response. - int32 page_size = 6; - - // [Optional] A `next_page_token` provided by a previous response. - string page_token = 7; -} - -// Contains a set of requested error events. -message ListEventsResponse { - // The error events which match the given request. - repeated ErrorEvent error_events = 1; - - // If non-empty, more results are available. - // Pass this token, along with the same query parameters as the first - // request, to view the next page of results. - string next_page_token = 2; - - // The timestamp specifies the start time to which the request was restricted. - google.protobuf.Timestamp time_range_begin = 4; -} - -// Requests might be rejected or the resulting timed count durations might be -// adjusted for lower durations. -message QueryTimeRange { - // The supported time ranges. - enum Period { - // Do not use. - PERIOD_UNSPECIFIED = 0; - - // Retrieve data for the last hour. - // Recommended minimum timed count duration: 1 min. - PERIOD_1_HOUR = 1; - - // Retrieve data for the last 6 hours. - // Recommended minimum timed count duration: 10 min. - PERIOD_6_HOURS = 2; - - // Retrieve data for the last day. - // Recommended minimum timed count duration: 1 hour. - PERIOD_1_DAY = 3; - - // Retrieve data for the last week. - // Recommended minimum timed count duration: 6 hours. - PERIOD_1_WEEK = 4; - - // Retrieve data for the last 30 days. - // Recommended minimum timed count duration: 1 day. - PERIOD_30_DAYS = 5; - } - - // Restricts the query to the specified time range. - Period period = 1; -} - -// Specifies criteria for filtering a subset of service contexts. -// The fields in the filter correspond to the fields in `ServiceContext`. -// Only exact, case-sensitive matches are supported. -// If a field is unset or empty, it matches arbitrary values. -message ServiceContextFilter { - // [Optional] The exact value to match against - // [`ServiceContext.service`](/error-reporting/reference/rest/v1beta1/ServiceContext#FIELDS.service). - string service = 2; - - // [Optional] The exact value to match against - // [`ServiceContext.version`](/error-reporting/reference/rest/v1beta1/ServiceContext#FIELDS.version). - string version = 3; - - // [Optional] The exact value to match against - // [`ServiceContext.resource_type`](/error-reporting/reference/rest/v1beta1/ServiceContext#FIELDS.resource_type). - string resource_type = 4; -} - -// Deletes all events in the project. -message DeleteEventsRequest { - // [Required] The resource name of the Google Cloud Platform project. Written - // as `projects/` plus the - // [Google Cloud Platform project - // ID](https://support.google.com/cloud/answer/6158840). - // Example: `projects/my-project-123`. - string project_name = 1; -} - -// Response message for deleting error events. -message DeleteEventsResponse {} - -// Specifies how the time periods of error group counts are aligned. -enum TimedCountAlignment { - // No alignment specified. - ERROR_COUNT_ALIGNMENT_UNSPECIFIED = 0; - - // The time periods shall be consecutive, have width equal to the - // requested duration, and be aligned at the `alignment_time` provided in - // the request. - // The `alignment_time` does not have to be inside the query period but - // even if it is outside, only time periods are returned which overlap - // with the query period. - // A rounded alignment will typically result in a - // different size of the first or the last time period. - ALIGNMENT_EQUAL_ROUNDED = 1; - - // The time periods shall be consecutive, have width equal to the - // requested duration, and be aligned at the end of the requested time - // period. This can result in a different size of the - // first time period. - ALIGNMENT_EQUAL_AT_END = 2; -} - -// A sorting order of error groups. -enum ErrorGroupOrder { - // No group order specified. - GROUP_ORDER_UNSPECIFIED = 0; - - // Total count of errors in the given time window in descending order. - COUNT_DESC = 1; - - // Timestamp when the group was last seen in the given time window - // in descending order. - LAST_SEEN_DESC = 2; - - // Timestamp when the group was created in descending order. - CREATED_DESC = 3; - - // Number of affected users in the given time window in descending order. - AFFECTED_USERS_DESC = 4; -} diff --git a/error_reporting/google/cloud/errorreporting_v1beta1/proto/error_stats_service_pb2.py b/error_reporting/google/cloud/errorreporting_v1beta1/proto/error_stats_service_pb2.py deleted file mode 100644 index a8fe60bd67f7..000000000000 --- a/error_reporting/google/cloud/errorreporting_v1beta1/proto/error_stats_service_pb2.py +++ /dev/null @@ -1,1513 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/devtools/clouderrorreporting_v1beta1/proto/error_stats_service.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.cloud.errorreporting_v1beta1.proto import ( - common_pb2 as google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_common__pb2, -) -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/devtools/clouderrorreporting_v1beta1/proto/error_stats_service.proto", - package="google.devtools.clouderrorreporting.v1beta1", - syntax="proto3", - serialized_options=_b( - "\n/com.google.devtools.clouderrorreporting.v1beta1B\026ErrorStatsServiceProtoP\001Z^google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1;clouderrorreporting\252\002#Google.Cloud.ErrorReporting.V1Beta1\312\002#Google\\Cloud\\ErrorReporting\\V1beta1" - ), - serialized_pb=_b( - '\nKgoogle/devtools/clouderrorreporting_v1beta1/proto/error_stats_service.proto\x12+google.devtools.clouderrorreporting.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a>google/devtools/clouderrorreporting_v1beta1/proto/common.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xa1\x04\n\x15ListGroupStatsRequest\x12\x14\n\x0cproject_name\x18\x01 \x01(\t\x12\x10\n\x08group_id\x18\x02 \x03(\t\x12Y\n\x0eservice_filter\x18\x03 \x01(\x0b\x32\x41.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter\x12O\n\ntime_range\x18\x05 \x01(\x0b\x32;.google.devtools.clouderrorreporting.v1beta1.QueryTimeRange\x12\x37\n\x14timed_count_duration\x18\x06 \x01(\x0b\x32\x19.google.protobuf.Duration\x12S\n\talignment\x18\x07 \x01(\x0e\x32@.google.devtools.clouderrorreporting.v1beta1.TimedCountAlignment\x12\x32\n\x0e\x61lignment_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12K\n\x05order\x18\t \x01(\x0e\x32<.google.devtools.clouderrorreporting.v1beta1.ErrorGroupOrder\x12\x11\n\tpage_size\x18\x0b \x01(\x05\x12\x12\n\npage_token\x18\x0c \x01(\t"\xc0\x01\n\x16ListGroupStatsResponse\x12W\n\x11\x65rror_group_stats\x18\x01 \x03(\x0b\x32<.google.devtools.clouderrorreporting.v1beta1.ErrorGroupStats\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\x12\x34\n\x10time_range_begin\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\x86\x04\n\x0f\x45rrorGroupStats\x12\x46\n\x05group\x18\x01 \x01(\x0b\x32\x37.google.devtools.clouderrorreporting.v1beta1.ErrorGroup\x12\r\n\x05\x63ount\x18\x02 \x01(\x03\x12\x1c\n\x14\x61\x66\x66\x65\x63ted_users_count\x18\x03 \x01(\x03\x12M\n\x0ctimed_counts\x18\x04 \x03(\x0b\x32\x37.google.devtools.clouderrorreporting.v1beta1.TimedCount\x12\x33\n\x0f\x66irst_seen_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x0elast_seen_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12V\n\x11\x61\x66\x66\x65\x63ted_services\x18\x07 \x03(\x0b\x32;.google.devtools.clouderrorreporting.v1beta1.ServiceContext\x12\x1d\n\x15num_affected_services\x18\x08 \x01(\x05\x12O\n\x0erepresentative\x18\t \x01(\x0b\x32\x37.google.devtools.clouderrorreporting.v1beta1.ErrorEvent"y\n\nTimedCount\x12\r\n\x05\x63ount\x18\x01 \x01(\x03\x12.\n\nstart_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\x8e\x02\n\x11ListEventsRequest\x12\x14\n\x0cproject_name\x18\x01 \x01(\t\x12\x10\n\x08group_id\x18\x02 \x01(\t\x12Y\n\x0eservice_filter\x18\x03 \x01(\x0b\x32\x41.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter\x12O\n\ntime_range\x18\x04 \x01(\x0b\x32;.google.devtools.clouderrorreporting.v1beta1.QueryTimeRange\x12\x11\n\tpage_size\x18\x06 \x01(\x05\x12\x12\n\npage_token\x18\x07 \x01(\t"\xb2\x01\n\x12ListEventsResponse\x12M\n\x0c\x65rror_events\x18\x01 \x03(\x0b\x32\x37.google.devtools.clouderrorreporting.v1beta1.ErrorEvent\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\x12\x34\n\x10time_range_begin\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\xe7\x01\n\x0eQueryTimeRange\x12R\n\x06period\x18\x01 \x01(\x0e\x32\x42.google.devtools.clouderrorreporting.v1beta1.QueryTimeRange.Period"\x80\x01\n\x06Period\x12\x16\n\x12PERIOD_UNSPECIFIED\x10\x00\x12\x11\n\rPERIOD_1_HOUR\x10\x01\x12\x12\n\x0ePERIOD_6_HOURS\x10\x02\x12\x10\n\x0cPERIOD_1_DAY\x10\x03\x12\x11\n\rPERIOD_1_WEEK\x10\x04\x12\x12\n\x0ePERIOD_30_DAYS\x10\x05"O\n\x14ServiceContextFilter\x12\x0f\n\x07service\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\t\x12\x15\n\rresource_type\x18\x04 \x01(\t"+\n\x13\x44\x65leteEventsRequest\x12\x14\n\x0cproject_name\x18\x01 \x01(\t"\x16\n\x14\x44\x65leteEventsResponse*u\n\x13TimedCountAlignment\x12%\n!ERROR_COUNT_ALIGNMENT_UNSPECIFIED\x10\x00\x12\x1b\n\x17\x41LIGNMENT_EQUAL_ROUNDED\x10\x01\x12\x1a\n\x16\x41LIGNMENT_EQUAL_AT_END\x10\x02*}\n\x0f\x45rrorGroupOrder\x12\x1b\n\x17GROUP_ORDER_UNSPECIFIED\x10\x00\x12\x0e\n\nCOUNT_DESC\x10\x01\x12\x12\n\x0eLAST_SEEN_DESC\x10\x02\x12\x10\n\x0c\x43REATED_DESC\x10\x03\x12\x17\n\x13\x41\x46\x46\x45\x43TED_USERS_DESC\x10\x04\x32\xf2\x04\n\x11\x45rrorStatsService\x12\xd0\x01\n\x0eListGroupStats\x12\x42.google.devtools.clouderrorreporting.v1beta1.ListGroupStatsRequest\x1a\x43.google.devtools.clouderrorreporting.v1beta1.ListGroupStatsResponse"5\x82\xd3\xe4\x93\x02/\x12-/v1beta1/{project_name=projects/*}/groupStats\x12\xc0\x01\n\nListEvents\x12>.google.devtools.clouderrorreporting.v1beta1.ListEventsRequest\x1a?.google.devtools.clouderrorreporting.v1beta1.ListEventsResponse"1\x82\xd3\xe4\x93\x02+\x12)/v1beta1/{project_name=projects/*}/events\x12\xc6\x01\n\x0c\x44\x65leteEvents\x12@.google.devtools.clouderrorreporting.v1beta1.DeleteEventsRequest\x1a\x41.google.devtools.clouderrorreporting.v1beta1.DeleteEventsResponse"1\x82\xd3\xe4\x93\x02+*)/v1beta1/{project_name=projects/*}/eventsB\xf7\x01\n/com.google.devtools.clouderrorreporting.v1beta1B\x16\x45rrorStatsServiceProtoP\x01Z^google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1;clouderrorreporting\xaa\x02#Google.Cloud.ErrorReporting.V1Beta1\xca\x02#Google\\Cloud\\ErrorReporting\\V1beta1b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_common__pb2.DESCRIPTOR, - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - ], -) - -_TIMEDCOUNTALIGNMENT = _descriptor.EnumDescriptor( - name="TimedCountAlignment", - full_name="google.devtools.clouderrorreporting.v1beta1.TimedCountAlignment", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="ERROR_COUNT_ALIGNMENT_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="ALIGNMENT_EQUAL_ROUNDED", - index=1, - number=1, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="ALIGNMENT_EQUAL_AT_END", - index=2, - number=2, - serialized_options=None, - type=None, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=2508, - serialized_end=2625, -) -_sym_db.RegisterEnumDescriptor(_TIMEDCOUNTALIGNMENT) - -TimedCountAlignment = enum_type_wrapper.EnumTypeWrapper(_TIMEDCOUNTALIGNMENT) -_ERRORGROUPORDER = _descriptor.EnumDescriptor( - name="ErrorGroupOrder", - full_name="google.devtools.clouderrorreporting.v1beta1.ErrorGroupOrder", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="GROUP_ORDER_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="COUNT_DESC", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="LAST_SEEN_DESC", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="CREATED_DESC", index=3, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="AFFECTED_USERS_DESC", - index=4, - number=4, - serialized_options=None, - type=None, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=2627, - serialized_end=2752, -) -_sym_db.RegisterEnumDescriptor(_ERRORGROUPORDER) - -ErrorGroupOrder = enum_type_wrapper.EnumTypeWrapper(_ERRORGROUPORDER) -ERROR_COUNT_ALIGNMENT_UNSPECIFIED = 0 -ALIGNMENT_EQUAL_ROUNDED = 1 -ALIGNMENT_EQUAL_AT_END = 2 -GROUP_ORDER_UNSPECIFIED = 0 -COUNT_DESC = 1 -LAST_SEEN_DESC = 2 -CREATED_DESC = 3 -AFFECTED_USERS_DESC = 4 - - -_QUERYTIMERANGE_PERIOD = _descriptor.EnumDescriptor( - name="Period", - full_name="google.devtools.clouderrorreporting.v1beta1.QueryTimeRange.Period", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="PERIOD_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="PERIOD_1_HOUR", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="PERIOD_6_HOURS", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="PERIOD_1_DAY", index=3, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="PERIOD_1_WEEK", index=4, number=4, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="PERIOD_30_DAYS", index=5, number=5, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=2228, - serialized_end=2356, -) -_sym_db.RegisterEnumDescriptor(_QUERYTIMERANGE_PERIOD) - - -_LISTGROUPSTATSREQUEST = _descriptor.Descriptor( - name="ListGroupStatsRequest", - full_name="google.devtools.clouderrorreporting.v1beta1.ListGroupStatsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_name", - full_name="google.devtools.clouderrorreporting.v1beta1.ListGroupStatsRequest.project_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="group_id", - full_name="google.devtools.clouderrorreporting.v1beta1.ListGroupStatsRequest.group_id", - index=1, - number=2, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="service_filter", - full_name="google.devtools.clouderrorreporting.v1beta1.ListGroupStatsRequest.service_filter", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="time_range", - full_name="google.devtools.clouderrorreporting.v1beta1.ListGroupStatsRequest.time_range", - index=3, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="timed_count_duration", - full_name="google.devtools.clouderrorreporting.v1beta1.ListGroupStatsRequest.timed_count_duration", - index=4, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="alignment", - full_name="google.devtools.clouderrorreporting.v1beta1.ListGroupStatsRequest.alignment", - index=5, - number=7, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="alignment_time", - full_name="google.devtools.clouderrorreporting.v1beta1.ListGroupStatsRequest.alignment_time", - index=6, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="order", - full_name="google.devtools.clouderrorreporting.v1beta1.ListGroupStatsRequest.order", - index=7, - number=9, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.devtools.clouderrorreporting.v1beta1.ListGroupStatsRequest.page_size", - index=8, - number=11, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.devtools.clouderrorreporting.v1beta1.ListGroupStatsRequest.page_token", - index=9, - number=12, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=284, - serialized_end=829, -) - - -_LISTGROUPSTATSRESPONSE = _descriptor.Descriptor( - name="ListGroupStatsResponse", - full_name="google.devtools.clouderrorreporting.v1beta1.ListGroupStatsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="error_group_stats", - full_name="google.devtools.clouderrorreporting.v1beta1.ListGroupStatsResponse.error_group_stats", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.devtools.clouderrorreporting.v1beta1.ListGroupStatsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="time_range_begin", - full_name="google.devtools.clouderrorreporting.v1beta1.ListGroupStatsResponse.time_range_begin", - index=2, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=832, - serialized_end=1024, -) - - -_ERRORGROUPSTATS = _descriptor.Descriptor( - name="ErrorGroupStats", - full_name="google.devtools.clouderrorreporting.v1beta1.ErrorGroupStats", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="group", - full_name="google.devtools.clouderrorreporting.v1beta1.ErrorGroupStats.group", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="count", - full_name="google.devtools.clouderrorreporting.v1beta1.ErrorGroupStats.count", - index=1, - number=2, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="affected_users_count", - full_name="google.devtools.clouderrorreporting.v1beta1.ErrorGroupStats.affected_users_count", - index=2, - number=3, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="timed_counts", - full_name="google.devtools.clouderrorreporting.v1beta1.ErrorGroupStats.timed_counts", - index=3, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="first_seen_time", - full_name="google.devtools.clouderrorreporting.v1beta1.ErrorGroupStats.first_seen_time", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="last_seen_time", - full_name="google.devtools.clouderrorreporting.v1beta1.ErrorGroupStats.last_seen_time", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="affected_services", - full_name="google.devtools.clouderrorreporting.v1beta1.ErrorGroupStats.affected_services", - index=6, - number=7, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="num_affected_services", - full_name="google.devtools.clouderrorreporting.v1beta1.ErrorGroupStats.num_affected_services", - index=7, - number=8, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="representative", - full_name="google.devtools.clouderrorreporting.v1beta1.ErrorGroupStats.representative", - index=8, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1027, - serialized_end=1545, -) - - -_TIMEDCOUNT = _descriptor.Descriptor( - name="TimedCount", - full_name="google.devtools.clouderrorreporting.v1beta1.TimedCount", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="count", - full_name="google.devtools.clouderrorreporting.v1beta1.TimedCount.count", - index=0, - number=1, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="start_time", - full_name="google.devtools.clouderrorreporting.v1beta1.TimedCount.start_time", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="end_time", - full_name="google.devtools.clouderrorreporting.v1beta1.TimedCount.end_time", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1547, - serialized_end=1668, -) - - -_LISTEVENTSREQUEST = _descriptor.Descriptor( - name="ListEventsRequest", - full_name="google.devtools.clouderrorreporting.v1beta1.ListEventsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_name", - full_name="google.devtools.clouderrorreporting.v1beta1.ListEventsRequest.project_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="group_id", - full_name="google.devtools.clouderrorreporting.v1beta1.ListEventsRequest.group_id", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="service_filter", - full_name="google.devtools.clouderrorreporting.v1beta1.ListEventsRequest.service_filter", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="time_range", - full_name="google.devtools.clouderrorreporting.v1beta1.ListEventsRequest.time_range", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.devtools.clouderrorreporting.v1beta1.ListEventsRequest.page_size", - index=4, - number=6, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.devtools.clouderrorreporting.v1beta1.ListEventsRequest.page_token", - index=5, - number=7, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1671, - serialized_end=1941, -) - - -_LISTEVENTSRESPONSE = _descriptor.Descriptor( - name="ListEventsResponse", - full_name="google.devtools.clouderrorreporting.v1beta1.ListEventsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="error_events", - full_name="google.devtools.clouderrorreporting.v1beta1.ListEventsResponse.error_events", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.devtools.clouderrorreporting.v1beta1.ListEventsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="time_range_begin", - full_name="google.devtools.clouderrorreporting.v1beta1.ListEventsResponse.time_range_begin", - index=2, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1944, - serialized_end=2122, -) - - -_QUERYTIMERANGE = _descriptor.Descriptor( - name="QueryTimeRange", - full_name="google.devtools.clouderrorreporting.v1beta1.QueryTimeRange", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="period", - full_name="google.devtools.clouderrorreporting.v1beta1.QueryTimeRange.period", - index=0, - number=1, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[_QUERYTIMERANGE_PERIOD], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2125, - serialized_end=2356, -) - - -_SERVICECONTEXTFILTER = _descriptor.Descriptor( - name="ServiceContextFilter", - full_name="google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="service", - full_name="google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter.service", - index=0, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="version", - full_name="google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter.version", - index=1, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="resource_type", - full_name="google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter.resource_type", - index=2, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2358, - serialized_end=2437, -) - - -_DELETEEVENTSREQUEST = _descriptor.Descriptor( - name="DeleteEventsRequest", - full_name="google.devtools.clouderrorreporting.v1beta1.DeleteEventsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_name", - full_name="google.devtools.clouderrorreporting.v1beta1.DeleteEventsRequest.project_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2439, - serialized_end=2482, -) - - -_DELETEEVENTSRESPONSE = _descriptor.Descriptor( - name="DeleteEventsResponse", - full_name="google.devtools.clouderrorreporting.v1beta1.DeleteEventsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2484, - serialized_end=2506, -) - -_LISTGROUPSTATSREQUEST.fields_by_name[ - "service_filter" -].message_type = _SERVICECONTEXTFILTER -_LISTGROUPSTATSREQUEST.fields_by_name["time_range"].message_type = _QUERYTIMERANGE -_LISTGROUPSTATSREQUEST.fields_by_name[ - "timed_count_duration" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_LISTGROUPSTATSREQUEST.fields_by_name["alignment"].enum_type = _TIMEDCOUNTALIGNMENT -_LISTGROUPSTATSREQUEST.fields_by_name[ - "alignment_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LISTGROUPSTATSREQUEST.fields_by_name["order"].enum_type = _ERRORGROUPORDER -_LISTGROUPSTATSRESPONSE.fields_by_name[ - "error_group_stats" -].message_type = _ERRORGROUPSTATS -_LISTGROUPSTATSRESPONSE.fields_by_name[ - "time_range_begin" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_ERRORGROUPSTATS.fields_by_name[ - "group" -].message_type = ( - google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_common__pb2._ERRORGROUP -) -_ERRORGROUPSTATS.fields_by_name["timed_counts"].message_type = _TIMEDCOUNT -_ERRORGROUPSTATS.fields_by_name[ - "first_seen_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_ERRORGROUPSTATS.fields_by_name[ - "last_seen_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_ERRORGROUPSTATS.fields_by_name[ - "affected_services" -].message_type = ( - google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_common__pb2._SERVICECONTEXT -) -_ERRORGROUPSTATS.fields_by_name[ - "representative" -].message_type = ( - google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_common__pb2._ERROREVENT -) -_TIMEDCOUNT.fields_by_name[ - "start_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_TIMEDCOUNT.fields_by_name[ - "end_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LISTEVENTSREQUEST.fields_by_name["service_filter"].message_type = _SERVICECONTEXTFILTER -_LISTEVENTSREQUEST.fields_by_name["time_range"].message_type = _QUERYTIMERANGE -_LISTEVENTSRESPONSE.fields_by_name[ - "error_events" -].message_type = ( - google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_common__pb2._ERROREVENT -) -_LISTEVENTSRESPONSE.fields_by_name[ - "time_range_begin" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_QUERYTIMERANGE.fields_by_name["period"].enum_type = _QUERYTIMERANGE_PERIOD -_QUERYTIMERANGE_PERIOD.containing_type = _QUERYTIMERANGE -DESCRIPTOR.message_types_by_name["ListGroupStatsRequest"] = _LISTGROUPSTATSREQUEST -DESCRIPTOR.message_types_by_name["ListGroupStatsResponse"] = _LISTGROUPSTATSRESPONSE -DESCRIPTOR.message_types_by_name["ErrorGroupStats"] = _ERRORGROUPSTATS -DESCRIPTOR.message_types_by_name["TimedCount"] = _TIMEDCOUNT -DESCRIPTOR.message_types_by_name["ListEventsRequest"] = _LISTEVENTSREQUEST -DESCRIPTOR.message_types_by_name["ListEventsResponse"] = _LISTEVENTSRESPONSE -DESCRIPTOR.message_types_by_name["QueryTimeRange"] = _QUERYTIMERANGE -DESCRIPTOR.message_types_by_name["ServiceContextFilter"] = _SERVICECONTEXTFILTER -DESCRIPTOR.message_types_by_name["DeleteEventsRequest"] = _DELETEEVENTSREQUEST -DESCRIPTOR.message_types_by_name["DeleteEventsResponse"] = _DELETEEVENTSRESPONSE -DESCRIPTOR.enum_types_by_name["TimedCountAlignment"] = _TIMEDCOUNTALIGNMENT -DESCRIPTOR.enum_types_by_name["ErrorGroupOrder"] = _ERRORGROUPORDER -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -ListGroupStatsRequest = _reflection.GeneratedProtocolMessageType( - "ListGroupStatsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTGROUPSTATSREQUEST, - __module__="google.devtools.clouderrorreporting_v1beta1.proto.error_stats_service_pb2", - __doc__="""Specifies a set of ``ErrorGroupStats`` to return. - - - Attributes: - project_name: - [Required] The resource name of the Google Cloud Platform - project. Written as projects/ plus the Google Cloud Platform - project ID. Example: projects/my-project-123. - group_id: - [Optional] List all ErrorGroupStats with these IDs. - service_filter: - [Optional] List only ErrorGroupStats which belong to a service - context that matches the filter. Data for all service contexts - is returned if this field is not specified. - time_range: - [Optional] List data for the given time range. If not set a - default time range is used. The field time\_range\_begin in - the response will specify the beginning of this time range. - Only ErrorGroupStats with a non-zero count in the given time - range are returned, unless the request contains an explicit - group\_id list. If a group\_id list is given, also - ErrorGroupStats with zero occurrences are returned. - timed_count_duration: - [Optional] The preferred duration for a single returned - ``TimedCount``. If not set, no timed counts are returned. - alignment: - [Optional] The alignment of the timed counts to be returned. - Default is ``ALIGNMENT_EQUAL_AT_END``. - alignment_time: - [Optional] Time where the timed counts shall be aligned if - rounded alignment is chosen. Default is 00:00 UTC. - order: - [Optional] The sort order in which the results are returned. - Default is ``COUNT_DESC``. - page_size: - [Optional] The maximum number of results to return per - response. Default is 20. - page_token: - [Optional] A ``next_page_token`` provided by a previous - response. To view additional results, pass this token along - with the identical query parameters as the first request. - """, - # @@protoc_insertion_point(class_scope:google.devtools.clouderrorreporting.v1beta1.ListGroupStatsRequest) - ), -) -_sym_db.RegisterMessage(ListGroupStatsRequest) - -ListGroupStatsResponse = _reflection.GeneratedProtocolMessageType( - "ListGroupStatsResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTGROUPSTATSRESPONSE, - __module__="google.devtools.clouderrorreporting_v1beta1.proto.error_stats_service_pb2", - __doc__="""Contains a set of requested error group stats. - - - Attributes: - error_group_stats: - The error group stats which match the given request. - next_page_token: - If non-empty, more results are available. Pass this token, - along with the same query parameters as the first request, to - view the next page of results. - time_range_begin: - The timestamp specifies the start time to which the request - was restricted. The start time is set based on the requested - time range. It may be adjusted to a later time if a project - has exceeded the storage quota and older data has been - deleted. - """, - # @@protoc_insertion_point(class_scope:google.devtools.clouderrorreporting.v1beta1.ListGroupStatsResponse) - ), -) -_sym_db.RegisterMessage(ListGroupStatsResponse) - -ErrorGroupStats = _reflection.GeneratedProtocolMessageType( - "ErrorGroupStats", - (_message.Message,), - dict( - DESCRIPTOR=_ERRORGROUPSTATS, - __module__="google.devtools.clouderrorreporting_v1beta1.proto.error_stats_service_pb2", - __doc__="""Data extracted for a specific group based on certain filter criteria, - such as a given time period and/or service filter. - - - Attributes: - group: - Group data that is independent of the filter criteria. - count: - Approximate total number of events in the given group that - match the filter criteria. - affected_users_count: - Approximate number of affected users in the given group that - match the filter criteria. Users are distinguished by data in - the ``ErrorContext`` of the individual error events, such as - their login name or their remote IP address in case of HTTP - requests. The number of affected users can be zero even if the - number of errors is non-zero if no data was provided from - which the affected user could be deduced. Users are counted - based on data in the request context that was provided in the - error report. If more users are implicitly affected, such as - due to a crash of the whole service, this is not reflected - here. - timed_counts: - Approximate number of occurrences over time. Timed counts - returned by ListGroups are guaranteed to be: - Inside the - requested time interval - Non-overlapping, and - Ordered by - ascending time. - first_seen_time: - Approximate first occurrence that was ever seen for this group - and which matches the given filter criteria, ignoring the - time\_range that was specified in the request. - last_seen_time: - Approximate last occurrence that was ever seen for this group - and which matches the given filter criteria, ignoring the - time\_range that was specified in the request. - affected_services: - Service contexts with a non-zero error count for the given - filter criteria. This list can be truncated if multiple - services are affected. Refer to ``num_affected_services`` for - the total count. - num_affected_services: - The total number of services with a non-zero error count for - the given filter criteria. - representative: - An arbitrary event that is chosen as representative for the - whole group. The representative event is intended to be used - as a quick preview for the whole group. Events in the group - are usually sufficiently similar to each other such that - showing an arbitrary representative provides insight into the - characteristics of the group as a whole. - """, - # @@protoc_insertion_point(class_scope:google.devtools.clouderrorreporting.v1beta1.ErrorGroupStats) - ), -) -_sym_db.RegisterMessage(ErrorGroupStats) - -TimedCount = _reflection.GeneratedProtocolMessageType( - "TimedCount", - (_message.Message,), - dict( - DESCRIPTOR=_TIMEDCOUNT, - __module__="google.devtools.clouderrorreporting_v1beta1.proto.error_stats_service_pb2", - __doc__="""The number of errors in a given time period. All numbers are approximate - since the error events are sampled before counting them. - - - Attributes: - count: - Approximate number of occurrences in the given time period. - start_time: - Start of the time period to which ``count`` refers (included). - end_time: - End of the time period to which ``count`` refers (excluded). - """, - # @@protoc_insertion_point(class_scope:google.devtools.clouderrorreporting.v1beta1.TimedCount) - ), -) -_sym_db.RegisterMessage(TimedCount) - -ListEventsRequest = _reflection.GeneratedProtocolMessageType( - "ListEventsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTEVENTSREQUEST, - __module__="google.devtools.clouderrorreporting_v1beta1.proto.error_stats_service_pb2", - __doc__="""Specifies a set of error events to return. - - - Attributes: - project_name: - [Required] The resource name of the Google Cloud Platform - project. Written as ``projects/`` plus the `Google Cloud - Platform project ID - `__. Example: - ``projects/my-project-123``. - group_id: - [Required] The group for which events shall be returned. - service_filter: - [Optional] List only ErrorGroups which belong to a service - context that matches the filter. Data for all service contexts - is returned if this field is not specified. - time_range: - [Optional] List only data for the given time range. If not set - a default time range is used. The field time\_range\_begin in - the response will specify the beginning of this time range. - page_size: - [Optional] The maximum number of results to return per - response. - page_token: - [Optional] A ``next_page_token`` provided by a previous - response. - """, - # @@protoc_insertion_point(class_scope:google.devtools.clouderrorreporting.v1beta1.ListEventsRequest) - ), -) -_sym_db.RegisterMessage(ListEventsRequest) - -ListEventsResponse = _reflection.GeneratedProtocolMessageType( - "ListEventsResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTEVENTSRESPONSE, - __module__="google.devtools.clouderrorreporting_v1beta1.proto.error_stats_service_pb2", - __doc__="""Contains a set of requested error events. - - - Attributes: - error_events: - The error events which match the given request. - next_page_token: - If non-empty, more results are available. Pass this token, - along with the same query parameters as the first request, to - view the next page of results. - time_range_begin: - The timestamp specifies the start time to which the request - was restricted. - """, - # @@protoc_insertion_point(class_scope:google.devtools.clouderrorreporting.v1beta1.ListEventsResponse) - ), -) -_sym_db.RegisterMessage(ListEventsResponse) - -QueryTimeRange = _reflection.GeneratedProtocolMessageType( - "QueryTimeRange", - (_message.Message,), - dict( - DESCRIPTOR=_QUERYTIMERANGE, - __module__="google.devtools.clouderrorreporting_v1beta1.proto.error_stats_service_pb2", - __doc__="""Requests might be rejected or the resulting timed count durations might - be adjusted for lower durations. - - - Attributes: - period: - Restricts the query to the specified time range. - """, - # @@protoc_insertion_point(class_scope:google.devtools.clouderrorreporting.v1beta1.QueryTimeRange) - ), -) -_sym_db.RegisterMessage(QueryTimeRange) - -ServiceContextFilter = _reflection.GeneratedProtocolMessageType( - "ServiceContextFilter", - (_message.Message,), - dict( - DESCRIPTOR=_SERVICECONTEXTFILTER, - __module__="google.devtools.clouderrorreporting_v1beta1.proto.error_stats_service_pb2", - __doc__="""Specifies criteria for filtering a subset of service contexts. The - fields in the filter correspond to the fields in ``ServiceContext``. - Only exact, case-sensitive matches are supported. If a field is unset or - empty, it matches arbitrary values. - - - Attributes: - service: - [Optional] The exact value to match against - ```ServiceContext.service`` `__. - version: - [Optional] The exact value to match against - ```ServiceContext.version`` `__. - resource_type: - [Optional] The exact value to match against - ```ServiceContext.resource_type`` `__. - """, - # @@protoc_insertion_point(class_scope:google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter) - ), -) -_sym_db.RegisterMessage(ServiceContextFilter) - -DeleteEventsRequest = _reflection.GeneratedProtocolMessageType( - "DeleteEventsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETEEVENTSREQUEST, - __module__="google.devtools.clouderrorreporting_v1beta1.proto.error_stats_service_pb2", - __doc__="""Deletes all events in the project. - - - Attributes: - project_name: - [Required] The resource name of the Google Cloud Platform - project. Written as ``projects/`` plus the `Google Cloud - Platform project ID - `__. Example: - ``projects/my-project-123``. - """, - # @@protoc_insertion_point(class_scope:google.devtools.clouderrorreporting.v1beta1.DeleteEventsRequest) - ), -) -_sym_db.RegisterMessage(DeleteEventsRequest) - -DeleteEventsResponse = _reflection.GeneratedProtocolMessageType( - "DeleteEventsResponse", - (_message.Message,), - dict( - DESCRIPTOR=_DELETEEVENTSRESPONSE, - __module__="google.devtools.clouderrorreporting_v1beta1.proto.error_stats_service_pb2", - __doc__="""Response message for deleting error events. - """, - # @@protoc_insertion_point(class_scope:google.devtools.clouderrorreporting.v1beta1.DeleteEventsResponse) - ), -) -_sym_db.RegisterMessage(DeleteEventsResponse) - - -DESCRIPTOR._options = None - -_ERRORSTATSSERVICE = _descriptor.ServiceDescriptor( - name="ErrorStatsService", - full_name="google.devtools.clouderrorreporting.v1beta1.ErrorStatsService", - file=DESCRIPTOR, - index=0, - serialized_options=None, - serialized_start=2755, - serialized_end=3381, - methods=[ - _descriptor.MethodDescriptor( - name="ListGroupStats", - full_name="google.devtools.clouderrorreporting.v1beta1.ErrorStatsService.ListGroupStats", - index=0, - containing_service=None, - input_type=_LISTGROUPSTATSREQUEST, - output_type=_LISTGROUPSTATSRESPONSE, - serialized_options=_b( - "\202\323\344\223\002/\022-/v1beta1/{project_name=projects/*}/groupStats" - ), - ), - _descriptor.MethodDescriptor( - name="ListEvents", - full_name="google.devtools.clouderrorreporting.v1beta1.ErrorStatsService.ListEvents", - index=1, - containing_service=None, - input_type=_LISTEVENTSREQUEST, - output_type=_LISTEVENTSRESPONSE, - serialized_options=_b( - "\202\323\344\223\002+\022)/v1beta1/{project_name=projects/*}/events" - ), - ), - _descriptor.MethodDescriptor( - name="DeleteEvents", - full_name="google.devtools.clouderrorreporting.v1beta1.ErrorStatsService.DeleteEvents", - index=2, - containing_service=None, - input_type=_DELETEEVENTSREQUEST, - output_type=_DELETEEVENTSRESPONSE, - serialized_options=_b( - "\202\323\344\223\002+*)/v1beta1/{project_name=projects/*}/events" - ), - ), - ], -) -_sym_db.RegisterServiceDescriptor(_ERRORSTATSSERVICE) - -DESCRIPTOR.services_by_name["ErrorStatsService"] = _ERRORSTATSSERVICE - -# @@protoc_insertion_point(module_scope) diff --git a/error_reporting/google/cloud/errorreporting_v1beta1/proto/error_stats_service_pb2_grpc.py b/error_reporting/google/cloud/errorreporting_v1beta1/proto/error_stats_service_pb2_grpc.py deleted file mode 100644 index 06944055deb1..000000000000 --- a/error_reporting/google/cloud/errorreporting_v1beta1/proto/error_stats_service_pb2_grpc.py +++ /dev/null @@ -1,86 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.cloud.errorreporting_v1beta1.proto import ( - error_stats_service_pb2 as google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_error__stats__service__pb2, -) - - -class ErrorStatsServiceStub(object): - """An API for retrieving and managing error statistics as well as data for - individual events. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.ListGroupStats = channel.unary_unary( - "/google.devtools.clouderrorreporting.v1beta1.ErrorStatsService/ListGroupStats", - request_serializer=google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_error__stats__service__pb2.ListGroupStatsRequest.SerializeToString, - response_deserializer=google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_error__stats__service__pb2.ListGroupStatsResponse.FromString, - ) - self.ListEvents = channel.unary_unary( - "/google.devtools.clouderrorreporting.v1beta1.ErrorStatsService/ListEvents", - request_serializer=google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_error__stats__service__pb2.ListEventsRequest.SerializeToString, - response_deserializer=google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_error__stats__service__pb2.ListEventsResponse.FromString, - ) - self.DeleteEvents = channel.unary_unary( - "/google.devtools.clouderrorreporting.v1beta1.ErrorStatsService/DeleteEvents", - request_serializer=google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_error__stats__service__pb2.DeleteEventsRequest.SerializeToString, - response_deserializer=google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_error__stats__service__pb2.DeleteEventsResponse.FromString, - ) - - -class ErrorStatsServiceServicer(object): - """An API for retrieving and managing error statistics as well as data for - individual events. - """ - - def ListGroupStats(self, request, context): - """Lists the specified groups. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListEvents(self, request, context): - """Lists the specified events. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteEvents(self, request, context): - """Deletes all error events of a given project. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_ErrorStatsServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - "ListGroupStats": grpc.unary_unary_rpc_method_handler( - servicer.ListGroupStats, - request_deserializer=google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_error__stats__service__pb2.ListGroupStatsRequest.FromString, - response_serializer=google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_error__stats__service__pb2.ListGroupStatsResponse.SerializeToString, - ), - "ListEvents": grpc.unary_unary_rpc_method_handler( - servicer.ListEvents, - request_deserializer=google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_error__stats__service__pb2.ListEventsRequest.FromString, - response_serializer=google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_error__stats__service__pb2.ListEventsResponse.SerializeToString, - ), - "DeleteEvents": grpc.unary_unary_rpc_method_handler( - servicer.DeleteEvents, - request_deserializer=google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_error__stats__service__pb2.DeleteEventsRequest.FromString, - response_serializer=google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_error__stats__service__pb2.DeleteEventsResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.devtools.clouderrorreporting.v1beta1.ErrorStatsService", - rpc_method_handlers, - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/error_reporting/google/cloud/errorreporting_v1beta1/proto/report_errors_service.proto b/error_reporting/google/cloud/errorreporting_v1beta1/proto/report_errors_service.proto deleted file mode 100644 index d77f646ce464..000000000000 --- a/error_reporting/google/cloud/errorreporting_v1beta1/proto/report_errors_service.proto +++ /dev/null @@ -1,85 +0,0 @@ -// Copyright 2016 Google Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.devtools.clouderrorreporting.v1beta1; - -import "google/api/annotations.proto"; -import "google/devtools/clouderrorreporting/v1beta1/common.proto"; -import "google/protobuf/timestamp.proto"; - -option csharp_namespace = "Google.Cloud.ErrorReporting.V1Beta1"; -option go_package = "google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1;clouderrorreporting"; -option java_multiple_files = true; -option java_outer_classname = "ReportErrorsServiceProto"; -option java_package = "com.google.devtools.clouderrorreporting.v1beta1"; -option php_namespace = "Google\\Cloud\\ErrorReporting\\V1beta1"; - -// An API for reporting error events. -service ReportErrorsService { - // Report an individual error event. - // - // This endpoint accepts either an OAuth token, - // or an - // API key - // for authentication. To use an API key, append it to the URL as the value of - // a `key` parameter. For example: - //
POST
-  // https://clouderrorreporting.googleapis.com/v1beta1/projects/example-project/events:report?key=123ABC456
- rpc ReportErrorEvent(ReportErrorEventRequest) - returns (ReportErrorEventResponse) { - option (google.api.http) = { - post: "/v1beta1/{project_name=projects/*}/events:report" - body: "event" - }; - } -} - -// A request for reporting an individual error event. -message ReportErrorEventRequest { - // [Required] The resource name of the Google Cloud Platform project. Written - // as `projects/` plus the - // [Google Cloud Platform project - // ID](https://support.google.com/cloud/answer/6158840). Example: - // `projects/my-project-123`. - string project_name = 1; - - // [Required] The error event to be reported. - ReportedErrorEvent event = 2; -} - -// Response for reporting an individual error event. -// Data may be added to this message in the future. -message ReportErrorEventResponse {} - -// An error event which is reported to the Error Reporting system. -message ReportedErrorEvent { - // [Optional] Time when the event occurred. - // If not provided, the time when the event was received by the - // Error Reporting system will be used. - google.protobuf.Timestamp event_time = 1; - - // [Required] The service context in which this error has occurred. - ServiceContext service_context = 2; - - // [Required] A message describing the error. The message can contain an - // exception stack in one of the supported programming languages and formats. - // In that case, the message is parsed and detailed exception information - // is returned when retrieving the error event again. - string message = 3; - - // [Optional] A description of the context in which the error occurred. - ErrorContext context = 4; -} diff --git a/error_reporting/google/cloud/errorreporting_v1beta1/proto/report_errors_service_pb2.py b/error_reporting/google/cloud/errorreporting_v1beta1/proto/report_errors_service_pb2.py deleted file mode 100644 index 9562dd34b90b..000000000000 --- a/error_reporting/google/cloud/errorreporting_v1beta1/proto/report_errors_service_pb2.py +++ /dev/null @@ -1,330 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/devtools/clouderrorreporting_v1beta1/proto/report_errors_service.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.cloud.errorreporting_v1beta1.proto import ( - common_pb2 as google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_common__pb2, -) -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/devtools/clouderrorreporting_v1beta1/proto/report_errors_service.proto", - package="google.devtools.clouderrorreporting.v1beta1", - syntax="proto3", - serialized_options=_b( - "\n/com.google.devtools.clouderrorreporting.v1beta1B\030ReportErrorsServiceProtoP\001Z^google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1;clouderrorreporting\252\002#Google.Cloud.ErrorReporting.V1Beta1\312\002#Google\\Cloud\\ErrorReporting\\V1beta1" - ), - serialized_pb=_b( - '\nMgoogle/devtools/clouderrorreporting_v1beta1/proto/report_errors_service.proto\x12+google.devtools.clouderrorreporting.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a>google/devtools/clouderrorreporting_v1beta1/proto/common.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\x7f\n\x17ReportErrorEventRequest\x12\x14\n\x0cproject_name\x18\x01 \x01(\t\x12N\n\x05\x65vent\x18\x02 \x01(\x0b\x32?.google.devtools.clouderrorreporting.v1beta1.ReportedErrorEvent"\x1a\n\x18ReportErrorEventResponse"\xf7\x01\n\x12ReportedErrorEvent\x12.\n\nevent_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12T\n\x0fservice_context\x18\x02 \x01(\x0b\x32;.google.devtools.clouderrorreporting.v1beta1.ServiceContext\x12\x0f\n\x07message\x18\x03 \x01(\t\x12J\n\x07\x63ontext\x18\x04 \x01(\x0b\x32\x39.google.devtools.clouderrorreporting.v1beta1.ErrorContext2\xf8\x01\n\x13ReportErrorsService\x12\xe0\x01\n\x10ReportErrorEvent\x12\x44.google.devtools.clouderrorreporting.v1beta1.ReportErrorEventRequest\x1a\x45.google.devtools.clouderrorreporting.v1beta1.ReportErrorEventResponse"?\x82\xd3\xe4\x93\x02\x39"0/v1beta1/{project_name=projects/*}/events:report:\x05\x65ventB\xf9\x01\n/com.google.devtools.clouderrorreporting.v1beta1B\x18ReportErrorsServiceProtoP\x01Z^google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1;clouderrorreporting\xaa\x02#Google.Cloud.ErrorReporting.V1Beta1\xca\x02#Google\\Cloud\\ErrorReporting\\V1beta1b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_common__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - ], -) - - -_REPORTERROREVENTREQUEST = _descriptor.Descriptor( - name="ReportErrorEventRequest", - full_name="google.devtools.clouderrorreporting.v1beta1.ReportErrorEventRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_name", - full_name="google.devtools.clouderrorreporting.v1beta1.ReportErrorEventRequest.project_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="event", - full_name="google.devtools.clouderrorreporting.v1beta1.ReportErrorEventRequest.event", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=253, - serialized_end=380, -) - - -_REPORTERROREVENTRESPONSE = _descriptor.Descriptor( - name="ReportErrorEventResponse", - full_name="google.devtools.clouderrorreporting.v1beta1.ReportErrorEventResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=382, - serialized_end=408, -) - - -_REPORTEDERROREVENT = _descriptor.Descriptor( - name="ReportedErrorEvent", - full_name="google.devtools.clouderrorreporting.v1beta1.ReportedErrorEvent", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="event_time", - full_name="google.devtools.clouderrorreporting.v1beta1.ReportedErrorEvent.event_time", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="service_context", - full_name="google.devtools.clouderrorreporting.v1beta1.ReportedErrorEvent.service_context", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="message", - full_name="google.devtools.clouderrorreporting.v1beta1.ReportedErrorEvent.message", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="context", - full_name="google.devtools.clouderrorreporting.v1beta1.ReportedErrorEvent.context", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=411, - serialized_end=658, -) - -_REPORTERROREVENTREQUEST.fields_by_name["event"].message_type = _REPORTEDERROREVENT -_REPORTEDERROREVENT.fields_by_name[ - "event_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_REPORTEDERROREVENT.fields_by_name[ - "service_context" -].message_type = ( - google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_common__pb2._SERVICECONTEXT -) -_REPORTEDERROREVENT.fields_by_name[ - "context" -].message_type = ( - google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_common__pb2._ERRORCONTEXT -) -DESCRIPTOR.message_types_by_name["ReportErrorEventRequest"] = _REPORTERROREVENTREQUEST -DESCRIPTOR.message_types_by_name["ReportErrorEventResponse"] = _REPORTERROREVENTRESPONSE -DESCRIPTOR.message_types_by_name["ReportedErrorEvent"] = _REPORTEDERROREVENT -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -ReportErrorEventRequest = _reflection.GeneratedProtocolMessageType( - "ReportErrorEventRequest", - (_message.Message,), - dict( - DESCRIPTOR=_REPORTERROREVENTREQUEST, - __module__="google.devtools.clouderrorreporting_v1beta1.proto.report_errors_service_pb2", - __doc__="""A request for reporting an individual error event. - - - Attributes: - project_name: - [Required] The resource name of the Google Cloud Platform - project. Written as ``projects/`` plus the `Google Cloud - Platform project ID - `__. Example: - ``projects/my-project-123``. - event: - [Required] The error event to be reported. - """, - # @@protoc_insertion_point(class_scope:google.devtools.clouderrorreporting.v1beta1.ReportErrorEventRequest) - ), -) -_sym_db.RegisterMessage(ReportErrorEventRequest) - -ReportErrorEventResponse = _reflection.GeneratedProtocolMessageType( - "ReportErrorEventResponse", - (_message.Message,), - dict( - DESCRIPTOR=_REPORTERROREVENTRESPONSE, - __module__="google.devtools.clouderrorreporting_v1beta1.proto.report_errors_service_pb2", - __doc__="""Response for reporting an individual error event. Data may be added to - this message in the future. - """, - # @@protoc_insertion_point(class_scope:google.devtools.clouderrorreporting.v1beta1.ReportErrorEventResponse) - ), -) -_sym_db.RegisterMessage(ReportErrorEventResponse) - -ReportedErrorEvent = _reflection.GeneratedProtocolMessageType( - "ReportedErrorEvent", - (_message.Message,), - dict( - DESCRIPTOR=_REPORTEDERROREVENT, - __module__="google.devtools.clouderrorreporting_v1beta1.proto.report_errors_service_pb2", - __doc__="""An error event which is reported to the Error Reporting system. - - - Attributes: - event_time: - [Optional] Time when the event occurred. If not provided, the - time when the event was received by the Error Reporting system - will be used. - service_context: - [Required] The service context in which this error has - occurred. - message: - [Required] A message describing the error. The message can - contain an exception stack in one of the supported programming - languages and formats. In that case, the message is parsed and - detailed exception information is returned when retrieving the - error event again. - context: - [Optional] A description of the context in which the error - occurred. - """, - # @@protoc_insertion_point(class_scope:google.devtools.clouderrorreporting.v1beta1.ReportedErrorEvent) - ), -) -_sym_db.RegisterMessage(ReportedErrorEvent) - - -DESCRIPTOR._options = None - -_REPORTERRORSSERVICE = _descriptor.ServiceDescriptor( - name="ReportErrorsService", - full_name="google.devtools.clouderrorreporting.v1beta1.ReportErrorsService", - file=DESCRIPTOR, - index=0, - serialized_options=None, - serialized_start=661, - serialized_end=909, - methods=[ - _descriptor.MethodDescriptor( - name="ReportErrorEvent", - full_name="google.devtools.clouderrorreporting.v1beta1.ReportErrorsService.ReportErrorEvent", - index=0, - containing_service=None, - input_type=_REPORTERROREVENTREQUEST, - output_type=_REPORTERROREVENTRESPONSE, - serialized_options=_b( - '\202\323\344\223\0029"0/v1beta1/{project_name=projects/*}/events:report:\005event' - ), - ) - ], -) -_sym_db.RegisterServiceDescriptor(_REPORTERRORSSERVICE) - -DESCRIPTOR.services_by_name["ReportErrorsService"] = _REPORTERRORSSERVICE - -# @@protoc_insertion_point(module_scope) diff --git a/error_reporting/google/cloud/errorreporting_v1beta1/proto/report_errors_service_pb2_grpc.py b/error_reporting/google/cloud/errorreporting_v1beta1/proto/report_errors_service_pb2_grpc.py deleted file mode 100644 index 12f6e08e7b57..000000000000 --- a/error_reporting/google/cloud/errorreporting_v1beta1/proto/report_errors_service_pb2_grpc.py +++ /dev/null @@ -1,58 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.cloud.errorreporting_v1beta1.proto import ( - report_errors_service_pb2 as google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_report__errors__service__pb2, -) - - -class ReportErrorsServiceStub(object): - """An API for reporting error events. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.ReportErrorEvent = channel.unary_unary( - "/google.devtools.clouderrorreporting.v1beta1.ReportErrorsService/ReportErrorEvent", - request_serializer=google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_report__errors__service__pb2.ReportErrorEventRequest.SerializeToString, - response_deserializer=google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_report__errors__service__pb2.ReportErrorEventResponse.FromString, - ) - - -class ReportErrorsServiceServicer(object): - """An API for reporting error events. - """ - - def ReportErrorEvent(self, request, context): - """Report an individual error event. - - This endpoint accepts either an OAuth token, - or an - API key - for authentication. To use an API key, append it to the URL as the value of - a `key` parameter. For example: -
POST
-    https://clouderrorreporting.googleapis.com/v1beta1/projects/example-project/events:report?key=123ABC456
- """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_ReportErrorsServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - "ReportErrorEvent": grpc.unary_unary_rpc_method_handler( - servicer.ReportErrorEvent, - request_deserializer=google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_report__errors__service__pb2.ReportErrorEventRequest.FromString, - response_serializer=google_dot_devtools_dot_clouderrorreporting__v1beta1_dot_proto_dot_report__errors__service__pb2.ReportErrorEventResponse.SerializeToString, - ) - } - generic_handler = grpc.method_handlers_generic_handler( - "google.devtools.clouderrorreporting.v1beta1.ReportErrorsService", - rpc_method_handlers, - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/error_reporting/google/cloud/errorreporting_v1beta1/proto/synth.metadata b/error_reporting/google/cloud/errorreporting_v1beta1/proto/synth.metadata deleted file mode 100644 index aa85442f198e..000000000000 --- a/error_reporting/google/cloud/errorreporting_v1beta1/proto/synth.metadata +++ /dev/null @@ -1,3 +0,0 @@ -{ - "updateTime": "2019-01-23T23:03:31.526988Z" -} \ No newline at end of file diff --git a/error_reporting/google/cloud/errorreporting_v1beta1/types.py b/error_reporting/google/cloud/errorreporting_v1beta1/types.py deleted file mode 100644 index 388e69bbad78..000000000000 --- a/error_reporting/google/cloud/errorreporting_v1beta1/types.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import -import sys - -from google.api import http_pb2 -from google.api import label_pb2 -from google.api import monitored_resource_pb2 -from google.protobuf import descriptor_pb2 -from google.protobuf import duration_pb2 -from google.protobuf import timestamp_pb2 - -from google.api_core.protobuf_helpers import get_messages -from google.cloud.errorreporting_v1beta1.proto import common_pb2 -from google.cloud.errorreporting_v1beta1.proto import error_group_service_pb2 -from google.cloud.errorreporting_v1beta1.proto import error_stats_service_pb2 -from google.cloud.errorreporting_v1beta1.proto import report_errors_service_pb2 - - -_shared_modules = [ - http_pb2, - label_pb2, - monitored_resource_pb2, - report_errors_service_pb2, - descriptor_pb2, - duration_pb2, - timestamp_pb2, -] - -_local_modules = [common_pb2, error_group_service_pb2, error_stats_service_pb2] - -names = [] - -for module in _shared_modules: - for name, message in get_messages(module).items(): - setattr(sys.modules[__name__], name, message) - names.append(name) - -for module in _local_modules: - for name, message in get_messages(module).items(): - message.__module__ = "google.cloud.errorreporting_v1beta1.types" - setattr(sys.modules[__name__], name, message) - names.append(name) - -__all__ = tuple(sorted(names)) diff --git a/error_reporting/noxfile.py b/error_reporting/noxfile.py deleted file mode 100644 index a2eefbb6765f..000000000000 --- a/error_reporting/noxfile.py +++ /dev/null @@ -1,160 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! - -from __future__ import absolute_import -import os -import shutil - -import nox - - -LOCAL_DEPS = (os.path.join("..", "api_core"), os.path.join("..", "core")) -BLACK_VERSION = "black==19.3b0" -BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] - -if os.path.exists("samples"): - BLACK_PATHS.append("samples") - - -@nox.session(python="3.7") -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION, *LOCAL_DEPS) - session.run("black", "--check", *BLACK_PATHS) - session.run("flake8", "google", "tests") - - -@nox.session(python="3.6") -def blacken(session): - """Run black. - - Format code to uniform standard. - - This currently uses Python 3.6 due to the automated Kokoro run of synthtool. - That run uses an image that doesn't have 3.6 installed. Before updating this - check the state of the `gcp_ubuntu_config` we use for that Kokoro run. - """ - session.install(BLACK_VERSION) - session.run("black", *BLACK_PATHS) - - -@nox.session(python="3.7") -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") - - -def default(session): - # Install all test dependencies, then install this package in-place. - session.install("mock", "pytest", "pytest-cov") - for local_dep in LOCAL_DEPS: - session.install("-e", local_dep) - session.install("-e", ".") - - # Run py.test against the unit tests. - session.run( - "py.test", - "--quiet", - "--cov=google.cloud", - "--cov=tests.unit", - "--cov-append", - "--cov-config=.coveragerc", - "--cov-report=", - "--cov-fail-under=0", - os.path.join("tests", "unit"), - *session.posargs, - ) - - -@nox.session(python=["2.7", "3.5", "3.6", "3.7"]) -def unit(session): - """Run the unit test suite.""" - default(session) - - -@nox.session(python=["2.7", "3.7"]) -def system(session): - """Run the system test suite.""" - system_test_path = os.path.join("tests", "system.py") - system_test_folder_path = os.path.join("tests", "system") - # Sanity check: Only run tests if the environment variable is set. - if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): - session.skip("Credentials must be set via environment variable") - - system_test_exists = os.path.exists(system_test_path) - system_test_folder_exists = os.path.exists(system_test_folder_path) - # Sanity check: only run tests if found. - if not system_test_exists and not system_test_folder_exists: - session.skip("System tests were not found") - - # Use pre-release gRPC for system tests. - session.install("--pre", "grpcio") - - # Install all test dependencies, then install this package into the - # virtualenv's dist-packages. - session.install("mock", "pytest") - for local_dep in LOCAL_DEPS: - session.install("-e", local_dep) - session.install("-e", "../test_utils/") - session.install("-e", ".") - - # Run py.test against the system tests. - if system_test_exists: - session.run("py.test", "--quiet", system_test_path, *session.posargs) - if system_test_folder_exists: - session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) - - -@nox.session(python="3.7") -def cover(session): - """Run the final coverage report. - - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python="3.7") -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) diff --git a/error_reporting/pylint.config.py b/error_reporting/pylint.config.py deleted file mode 100644 index 5d64b9d2f256..000000000000 --- a/error_reporting/pylint.config.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This module is used to configure gcp-devrel-py-tools run-pylint.""" - -# Library configuration - -# library_additions = {} -# library_replacements = {} - -# Test configuration - -# test_additions = copy.deepcopy(library_additions) -# test_replacements = copy.deepcopy(library_replacements) diff --git a/error_reporting/setup.cfg b/error_reporting/setup.cfg deleted file mode 100644 index 3bd555500e37..000000000000 --- a/error_reporting/setup.cfg +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by synthtool. DO NOT EDIT! -[bdist_wheel] -universal = 1 diff --git a/error_reporting/setup.py b/error_reporting/setup.py deleted file mode 100644 index 80a0350ad79a..000000000000 --- a/error_reporting/setup.py +++ /dev/null @@ -1,86 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import io -import os - -import setuptools - - -# Package metadata. - -name = "google-cloud-error-reporting" -description = "Stackdriver Error Reporting API client library" -version = "0.33.0" -# Should be one of: -# 'Development Status :: 3 - Alpha' -# 'Development Status :: 4 - Beta' -# 'Development Status :: 5 - Production/Stable' -release_status = "Development Status :: 3 - Alpha" -dependencies = ["google-cloud-logging>=1.14.0, <2.0dev"] -extras = {} - - -# Setup boilerplate below this line. - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -# Only include packages under the 'google' namespace. Do not include tests, -# benchmarks, etc. -packages = [ - package for package in setuptools.find_packages() if package.startswith("google") -] - -# Determine which namespaces are needed. -namespaces = ["google"] -if "google.cloud" in packages: - namespaces.append("google.cloud") - - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url="https://github.com/GoogleCloudPlatform/google-cloud-python", - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.5", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - namespace_packages=namespaces, - install_requires=dependencies, - extras_require=extras, - python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*", - include_package_data=True, - zip_safe=False, -) diff --git a/error_reporting/synth.metadata b/error_reporting/synth.metadata deleted file mode 100644 index f02edcf46dae..000000000000 --- a/error_reporting/synth.metadata +++ /dev/null @@ -1,39 +0,0 @@ -{ - "updateTime": "2019-11-12T13:27:15.140489Z", - "sources": [ - { - "generator": { - "name": "artman", - "version": "0.41.1", - "dockerImage": "googleapis/artman@sha256:545c758c76c3f779037aa259023ec3d1ef2d57d2c8cd00a222cb187d63ceac5e" - } - }, - { - "git": { - "name": "googleapis", - "remote": "https://github.com/googleapis/googleapis.git", - "sha": "f69562be0608904932bdcfbc5ad8b9a22d9dceb8", - "internalRef": "279774957" - } - }, - { - "template": { - "name": "python_library", - "origin": "synthtool.gcp", - "version": "2019.10.17" - } - } - ], - "destinations": [ - { - "client": { - "source": "googleapis", - "apiName": "errorreporting", - "apiVersion": "v1beta1", - "language": "python", - "generator": "gapic", - "config": "google/devtools/clouderrorreporting/artman_errorreporting.yaml" - } - } - ] -} \ No newline at end of file diff --git a/error_reporting/synth.py b/error_reporting/synth.py deleted file mode 100644 index 6c34630d506e..000000000000 --- a/error_reporting/synth.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This script is used to synthesize generated parts of this library.""" -import synthtool as s -from synthtool import gcp - -gapic = gcp.GAPICGenerator() -common = gcp.CommonTemplates() - -# ---------------------------------------------------------------------------- -# Generate error_reporting GAPIC layer -# ---------------------------------------------------------------------------- -library = gapic.py_library( - "errorreporting", - "v1beta1", - config_path="/google/devtools/clouderrorreporting" "/artman_errorreporting.yaml", - artman_output_name="error-reporting-v1beta1", - include_protos=True, -) - -s.move(library / "google/cloud/errorreporting_v1beta1/proto") -s.move(library / "google/cloud/errorreporting_v1beta1/gapic") -s.move(library / "tests/unit/gapic/v1beta1") -s.move(library / "tests/system/gapic/v1beta1") - -# Fix up imports -s.replace( - "google/**/*.py", - r"from google.devtools.clouderrorreporting_v1beta1.proto import ", - r"from google.cloud.errorreporting_v1beta1.proto import ", -) - -# Fix up docstrings in GAPIC clients -DISCARD_AUTH_BOILERPLATE = r""" - This endpoint accepts either an OAuth token, or an API key for - authentication. To use an API key, append it to the URL as the value of - a ``key`` parameter. For example: - - \.\. raw:: html -
POST
-            .*
-""" - -targets = [ - "google/cloud/errorreporting_v1beta1/gapic/*_client.py", - "google/cloud/errorreporting_v1beta1/gapic/transports/*_transport.py", -] - -s.replace(targets, DISCARD_AUTH_BOILERPLATE, r"") - -# ---------------------------------------------------------------------------- -# Add templated files -# ---------------------------------------------------------------------------- -templated_files = common.py_library(unit_cov_level=97, cov_level=100) -s.move(templated_files) - -s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/error_reporting/tests/__init__.py b/error_reporting/tests/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/error_reporting/tests/system/gapic/v1beta1/test_system_report_errors_service_v1beta1.py b/error_reporting/tests/system/gapic/v1beta1/test_system_report_errors_service_v1beta1.py deleted file mode 100644 index bb4239282d68..000000000000 --- a/error_reporting/tests/system/gapic/v1beta1/test_system_report_errors_service_v1beta1.py +++ /dev/null @@ -1,48 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import time - -from google.cloud import errorreporting_v1beta1 -from google.cloud.errorreporting_v1beta1.proto import common_pb2 -from google.cloud.errorreporting_v1beta1.proto import report_errors_service_pb2 - - -class TestSystemReportErrorsService(object): - def test_report_error_event(self): - project_id = os.environ["PROJECT_ID"] - - client = errorreporting_v1beta1.ReportErrorsServiceClient() - project_name = client.project_path(project_id) - message = "[MESSAGE]" - service = "[SERVICE]" - service_context = {"service": service} - file_path = "path/to/file.lang" - line_number = 42 - function_name = "meaningOfLife" - report_location = { - "file_path": file_path, - "line_number": line_number, - "function_name": function_name, - } - context = {"report_location": report_location} - event = { - "message": message, - "service_context": service_context, - "context": context, - } - response = client.report_error_event(project_name, event) diff --git a/error_reporting/tests/system/test_system.py b/error_reporting/tests/system/test_system.py deleted file mode 100644 index cf454aecdbd1..000000000000 --- a/error_reporting/tests/system/test_system.py +++ /dev/null @@ -1,124 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import functools -import operator -import unittest - -from google.cloud import error_reporting -from google.cloud.errorreporting_v1beta1.gapic import error_stats_service_client -from google.cloud.errorreporting_v1beta1.proto import error_stats_service_pb2 -from google.protobuf.duration_pb2 import Duration - -from test_utils.retry import RetryResult -from test_utils.system import unique_resource_id - - -ERROR_MSG = "Stackdriver Error Reporting System Test" - - -def setUpModule(): - Config.CLIENT = error_reporting.Client() - - -class Config(object): - """Run-time configuration to be modified at set-up. - - This is a mutable stand-in to allow test set-up to modify - global state. - """ - - CLIENT = None - - -def _list_groups(client): - """List Error Groups from the last 60 seconds. - - This class provides a wrapper around making calls to the GAX - API. It's used by the system tests to find the appropriate error group - to verify the error was successfully reported. - - :type client: :class:`~google.cloud.error_reporting.client.Client` - :param client: The client containing a project and credentials. - - :rtype: :class:`~google.gax.ResourceIterator` - :returns: Iterable of :class:`~.error_stats_service_pb2.ErrorGroupStats`. - """ - gax_api = error_stats_service_client.ErrorStatsServiceClient( - credentials=client._credentials - ) - project_name = gax_api.project_path(client.project) - - time_range = error_stats_service_pb2.QueryTimeRange() - time_range.period = error_stats_service_pb2.QueryTimeRange.PERIOD_1_HOUR - - duration = Duration(seconds=60 * 60) - - return gax_api.list_group_stats( - project_name, time_range, timed_count_duration=duration - ) - - -def _simulate_exception(class_name, client): - """Simulates an exception to verify it was reported. - - :type class_name: str - :param class_name: The name of a custom error class to - create (and raise). - - :type client: :class:`~google.cloud.error_reporting.client.Client` - :param client: The client that will report the exception. - """ - custom_exc = type(class_name, (RuntimeError,), {}) - try: - raise custom_exc(ERROR_MSG) - except RuntimeError: - client.report_exception() - - -def _get_error_count(class_name, client): - """Counts the number of errors in the group of the test exception. - - :type class_name: str - :param class_name: The name of a custom error class used. - - :type client: :class:`~google.cloud.error_reporting.client.Client` - :param client: The client containing a project and credentials. - - :rtype: int - :returns: Group count for errors that match ``class_name``. If no - match is found, returns :data:`None`. - """ - groups = _list_groups(client) - for group in groups: - if class_name in group.representative.message: - return group.count - - -class TestErrorReporting(unittest.TestCase): - def test_report_exception(self): - # Get a class name unique to this test case. - class_name = "RuntimeError" + unique_resource_id("_") - - # Simulate an error: group won't exist until we report - # first exception. - _simulate_exception(class_name, Config.CLIENT) - - is_one = functools.partial(operator.eq, 1) - is_one.__name__ = "is_one" # partial() has no name. - retry = RetryResult(is_one, max_tries=8) - wrapped_get_count = retry(_get_error_count) - - error_count = wrapped_get_count(class_name, Config.CLIENT) - self.assertEqual(error_count, 1) diff --git a/error_reporting/tests/unit/__init__.py b/error_reporting/tests/unit/__init__.py deleted file mode 100644 index df379f1e9d88..000000000000 --- a/error_reporting/tests/unit/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/error_reporting/tests/unit/gapic/v1beta1/test_error_group_service_client_v1beta1.py b/error_reporting/tests/unit/gapic/v1beta1/test_error_group_service_client_v1beta1.py deleted file mode 100644 index d65f3f1f7527..000000000000 --- a/error_reporting/tests/unit/gapic/v1beta1/test_error_group_service_client_v1beta1.py +++ /dev/null @@ -1,142 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests.""" - -import mock -import pytest - -from google.cloud import errorreporting_v1beta1 -from google.cloud.errorreporting_v1beta1.proto import common_pb2 -from google.cloud.errorreporting_v1beta1.proto import error_group_service_pb2 - - -class MultiCallableStub(object): - """Stub for the grpc.UnaryUnaryMultiCallable interface.""" - - def __init__(self, method, channel_stub): - self.method = method - self.channel_stub = channel_stub - - def __call__(self, request, timeout=None, metadata=None, credentials=None): - self.channel_stub.requests.append((self.method, request)) - - response = None - if self.channel_stub.responses: - response = self.channel_stub.responses.pop() - - if isinstance(response, Exception): - raise response - - if response: - return response - - -class ChannelStub(object): - """Stub for the grpc.Channel interface.""" - - def __init__(self, responses=[]): - self.responses = responses - self.requests = [] - - def unary_unary(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - -class CustomException(Exception): - pass - - -class TestErrorGroupServiceClient(object): - def test_get_group(self): - # Setup Expected Response - name = "name3373707" - group_id = "groupId506361563" - expected_response = {"name": name, "group_id": group_id} - expected_response = common_pb2.ErrorGroup(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = errorreporting_v1beta1.ErrorGroupServiceClient() - - # Setup Request - group_name = client.group_path("[PROJECT]", "[GROUP]") - - response = client.get_group(group_name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = error_group_service_pb2.GetGroupRequest( - group_name=group_name - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_group_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = errorreporting_v1beta1.ErrorGroupServiceClient() - - # Setup request - group_name = client.group_path("[PROJECT]", "[GROUP]") - - with pytest.raises(CustomException): - client.get_group(group_name) - - def test_update_group(self): - # Setup Expected Response - name = "name3373707" - group_id = "groupId506361563" - expected_response = {"name": name, "group_id": group_id} - expected_response = common_pb2.ErrorGroup(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = errorreporting_v1beta1.ErrorGroupServiceClient() - - # Setup Request - group = {} - - response = client.update_group(group) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = error_group_service_pb2.UpdateGroupRequest(group=group) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_group_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = errorreporting_v1beta1.ErrorGroupServiceClient() - - # Setup request - group = {} - - with pytest.raises(CustomException): - client.update_group(group) diff --git a/error_reporting/tests/unit/gapic/v1beta1/test_error_stats_service_client_v1beta1.py b/error_reporting/tests/unit/gapic/v1beta1/test_error_stats_service_client_v1beta1.py deleted file mode 100644 index dc09b13fc1b1..000000000000 --- a/error_reporting/tests/unit/gapic/v1beta1/test_error_stats_service_client_v1beta1.py +++ /dev/null @@ -1,207 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests.""" - -import mock -import pytest - -from google.cloud import errorreporting_v1beta1 -from google.cloud.errorreporting_v1beta1.proto import common_pb2 -from google.cloud.errorreporting_v1beta1.proto import error_stats_service_pb2 - - -class MultiCallableStub(object): - """Stub for the grpc.UnaryUnaryMultiCallable interface.""" - - def __init__(self, method, channel_stub): - self.method = method - self.channel_stub = channel_stub - - def __call__(self, request, timeout=None, metadata=None, credentials=None): - self.channel_stub.requests.append((self.method, request)) - - response = None - if self.channel_stub.responses: - response = self.channel_stub.responses.pop() - - if isinstance(response, Exception): - raise response - - if response: - return response - - -class ChannelStub(object): - """Stub for the grpc.Channel interface.""" - - def __init__(self, responses=[]): - self.responses = responses - self.requests = [] - - def unary_unary(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - -class CustomException(Exception): - pass - - -class TestErrorStatsServiceClient(object): - def test_list_group_stats(self): - # Setup Expected Response - next_page_token = "" - error_group_stats_element = {} - error_group_stats = [error_group_stats_element] - expected_response = { - "next_page_token": next_page_token, - "error_group_stats": error_group_stats, - } - expected_response = error_stats_service_pb2.ListGroupStatsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = errorreporting_v1beta1.ErrorStatsServiceClient() - - # Setup Request - project_name = client.project_path("[PROJECT]") - time_range = {} - - paged_list_response = client.list_group_stats(project_name, time_range) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.error_group_stats[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = error_stats_service_pb2.ListGroupStatsRequest( - project_name=project_name, time_range=time_range - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_group_stats_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = errorreporting_v1beta1.ErrorStatsServiceClient() - - # Setup request - project_name = client.project_path("[PROJECT]") - time_range = {} - - paged_list_response = client.list_group_stats(project_name, time_range) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_list_events(self): - # Setup Expected Response - next_page_token = "" - error_events_element = {} - error_events = [error_events_element] - expected_response = { - "next_page_token": next_page_token, - "error_events": error_events, - } - expected_response = error_stats_service_pb2.ListEventsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = errorreporting_v1beta1.ErrorStatsServiceClient() - - # Setup Request - project_name = client.project_path("[PROJECT]") - group_id = "groupId506361563" - - paged_list_response = client.list_events(project_name, group_id) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.error_events[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = error_stats_service_pb2.ListEventsRequest( - project_name=project_name, group_id=group_id - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_events_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = errorreporting_v1beta1.ErrorStatsServiceClient() - - # Setup request - project_name = client.project_path("[PROJECT]") - group_id = "groupId506361563" - - paged_list_response = client.list_events(project_name, group_id) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_delete_events(self): - # Setup Expected Response - expected_response = {} - expected_response = error_stats_service_pb2.DeleteEventsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = errorreporting_v1beta1.ErrorStatsServiceClient() - - # Setup Request - project_name = client.project_path("[PROJECT]") - - response = client.delete_events(project_name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = error_stats_service_pb2.DeleteEventsRequest( - project_name=project_name - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_events_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = errorreporting_v1beta1.ErrorStatsServiceClient() - - # Setup request - project_name = client.project_path("[PROJECT]") - - with pytest.raises(CustomException): - client.delete_events(project_name) diff --git a/error_reporting/tests/unit/gapic/v1beta1/test_report_errors_service_client_v1beta1.py b/error_reporting/tests/unit/gapic/v1beta1/test_report_errors_service_client_v1beta1.py deleted file mode 100644 index ef85fa15051a..000000000000 --- a/error_reporting/tests/unit/gapic/v1beta1/test_report_errors_service_client_v1beta1.py +++ /dev/null @@ -1,104 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests.""" - -import mock -import pytest - -from google.cloud import errorreporting_v1beta1 -from google.cloud.errorreporting_v1beta1.proto import report_errors_service_pb2 - - -class MultiCallableStub(object): - """Stub for the grpc.UnaryUnaryMultiCallable interface.""" - - def __init__(self, method, channel_stub): - self.method = method - self.channel_stub = channel_stub - - def __call__(self, request, timeout=None, metadata=None, credentials=None): - self.channel_stub.requests.append((self.method, request)) - - response = None - if self.channel_stub.responses: - response = self.channel_stub.responses.pop() - - if isinstance(response, Exception): - raise response - - if response: - return response - - -class ChannelStub(object): - """Stub for the grpc.Channel interface.""" - - def __init__(self, responses=[]): - self.responses = responses - self.requests = [] - - def unary_unary(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - -class CustomException(Exception): - pass - - -class TestReportErrorsServiceClient(object): - def test_report_error_event(self): - # Setup Expected Response - expected_response = {} - expected_response = report_errors_service_pb2.ReportErrorEventResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = errorreporting_v1beta1.ReportErrorsServiceClient() - - # Setup Request - project_name = client.project_path("[PROJECT]") - event = {} - - response = client.report_error_event(project_name, event) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = report_errors_service_pb2.ReportErrorEventRequest( - project_name=project_name, event=event - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_report_error_event_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = errorreporting_v1beta1.ReportErrorsServiceClient() - - # Setup request - project_name = client.project_path("[PROJECT]") - event = {} - - with pytest.raises(CustomException): - client.report_error_event(project_name, event) diff --git a/error_reporting/tests/unit/test__gapic.py b/error_reporting/tests/unit/test__gapic.py deleted file mode 100644 index 00940f466df0..000000000000 --- a/error_reporting/tests/unit/test__gapic.py +++ /dev/null @@ -1,82 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - - -class Test_make_report_error_api(unittest.TestCase): - @staticmethod - def _call_fut(client): - from google.cloud.error_reporting._gapic import make_report_error_api - - return make_report_error_api(client) - - def test_make_report_error_api(self): - client = mock.Mock( - spec=["project", "_credentials", "_client_info", "_client_options"] - ) - - # Call the function being tested. - patch = mock.patch( - "google.cloud.errorreporting_v1beta1." - "gapic.report_errors_service_client.ReportErrorsServiceClient" - ) - - with patch as patched: - report_error_client = self._call_fut(client) - - # Assert that the final error client has the project in - # the expected location. - self.assertIs(report_error_client._project, client.project) - self.assertIs(report_error_client._gapic_api, patched.return_value) - patched.assert_called_once_with( - credentials=client._credentials, - client_info=client._client_info, - client_options=client._client_options, - ) - - -class Test_ErrorReportingGapicApi(unittest.TestCase): - - PROJECT = "PROJECT" - - def _make_one(self, gapic_api, project): - from google.cloud.error_reporting._gapic import _ErrorReportingGapicApi - - return _ErrorReportingGapicApi(gapic_api, project) - - def test_constructor(self): - gapic_api = mock.Mock(spec=[]) - gapic_client_wrapper = self._make_one(gapic_api, self.PROJECT) - - self.assertEqual(gapic_client_wrapper._project, self.PROJECT) - self.assertEqual(gapic_client_wrapper._gapic_api, gapic_api) - - def test_report_error_event(self): - from google.cloud.errorreporting_v1beta1.proto import report_errors_service_pb2 - - gapic_api = mock.Mock(spec=["project_path", "report_error_event"]) - gapic_client_wrapper = self._make_one(gapic_api, self.PROJECT) - - error_report = {"message": "The cabs are here."} - gapic_client_wrapper.report_error_event(error_report) - - gapic_api.project_path.assert_called_once_with(self.PROJECT) - project_name = gapic_api.project_path.return_value - error_pb = report_errors_service_pb2.ReportedErrorEvent( - message=error_report["message"] - ) - gapic_api.report_error_event.assert_called_once_with(project_name, error_pb) diff --git a/error_reporting/tests/unit/test__logging.py b/error_reporting/tests/unit/test__logging.py deleted file mode 100644 index 726eaabfec75..000000000000 --- a/error_reporting/tests/unit/test__logging.py +++ /dev/null @@ -1,83 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - - -def _make_credentials(): - import google.auth.credentials - - return mock.Mock(spec=google.auth.credentials.Credentials) - - -class Test_ErrorReportingLoggingAPI(unittest.TestCase): - - PROJECT = "PROJECT" - - def _make_one(self, project, credentials, **kw): - from google.cloud.error_reporting._logging import _ErrorReportingLoggingAPI - - return _ErrorReportingLoggingAPI(project, credentials, **kw) - - @mock.patch("google.cloud.logging.client.Client") - def test_ctor_defaults(self, mocked_cls): - credentials = _make_credentials() - - logging_api = self._make_one(self.PROJECT, credentials) - - self.assertIs(logging_api.logging_client, mocked_cls.return_value) - mocked_cls.assert_called_once_with( - self.PROJECT, credentials, _http=None, client_info=None, client_options=None - ) - - @mock.patch("google.cloud.logging.client.Client") - def test_ctor_explicit(self, mocked_cls): - credentials = _make_credentials() - http = mock.Mock() - client_info = mock.Mock() - client_options = mock.Mock() - - logging_api = self._make_one( - self.PROJECT, - credentials, - _http=http, - client_info=client_info, - client_options=client_options, - ) - - self.assertIs(logging_api.logging_client, mocked_cls.return_value) - mocked_cls.assert_called_once_with( - self.PROJECT, - credentials, - _http=http, - client_info=client_info, - client_options=client_options, - ) - - @mock.patch("google.cloud.logging.client.Client") - def test_report_error_event(self, mocked_cls): - credentials = _make_credentials() - logging_api = self._make_one(self.PROJECT, credentials) - - logger = mock.Mock(spec=["log_struct"]) - logging_api.logging_client.logger.return_value = logger - - # Actually make the API call. - error_report = {"message": "The cabs are here."} - logging_api.report_error_event(error_report) - - # Check the mocks. - logger.log_struct.assert_called_once_with(error_report) diff --git a/error_reporting/tests/unit/test_client.py b/error_reporting/tests/unit/test_client.py deleted file mode 100644 index 3a7290e8aa96..000000000000 --- a/error_reporting/tests/unit/test_client.py +++ /dev/null @@ -1,222 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import unittest - -import mock - - -def _make_credentials(): - import google.auth.credentials - - return mock.Mock(spec=google.auth.credentials.Credentials) - - -class TestClient(unittest.TestCase): - - PROJECT = "PROJECT" - SERVICE = "SERVICE" - VERSION = "myversion" - - @staticmethod - def _get_target_class(): - from google.cloud.error_reporting.client import Client - - return Client - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def _make_http(self, *args, **kw): - from google.cloud.error_reporting.client import HTTPContext - - return HTTPContext(*args, **kw) - - def _get_report_payload(self, error_api): - self.assertEqual(error_api.report_error_event.call_count, 1) - call = error_api.report_error_event.mock_calls[0] - _, positional, kwargs = call - self.assertEqual(kwargs, {}) - self.assertEqual(len(positional), 1) - return positional[0] - - @mock.patch("google.cloud.client._determine_default_project") - def test_ctor_defaults(self, default_mock): - from google.api_core.client_info import ClientInfo - - credentials = _make_credentials() - default_mock.return_value = "foo" - client = self._make_one(credentials=credentials) - self.assertEqual(client.service, client.DEFAULT_SERVICE) - self.assertEqual(client.version, None) - self.assertIsInstance(client._client_info, ClientInfo) - default_mock.assert_called_once_with(None) - - def test_ctor_explicit(self): - credentials = _make_credentials() - client_info = mock.Mock() - client_options = mock.Mock() - client = self._make_one( - project=self.PROJECT, - credentials=credentials, - service=self.SERVICE, - version=self.VERSION, - client_info=client_info, - client_options=client_options, - ) - self.assertEqual(client.service, self.SERVICE) - self.assertEqual(client.version, self.VERSION) - self.assertIs(client._client_info, client_info) - self.assertIs(client._client_options, client_options) - - def test_report_errors_api_already(self): - credentials = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=credentials) - client._report_errors_api = already = mock.Mock() - self.assertIs(client.report_errors_api, already) - - def test_report_errors_api_wo_grpc(self): - credentials = _make_credentials() - client_info = mock.Mock() - client_options = mock.Mock() - http = mock.Mock() - client = self._make_one( - project=self.PROJECT, - credentials=credentials, - client_info=client_info, - client_options=client_options, - _http=http, - _use_grpc=False, - ) - patch = mock.patch( - "google.cloud.error_reporting.client._ErrorReportingLoggingAPI" - ) - - with patch as patched: - api = client.report_errors_api - - self.assertIs(api, patched.return_value) - patched.assert_called_once_with( - self.PROJECT, credentials, http, client_info, client_options - ) - - def test_report_errors_api_w_grpc(self): - credentials = _make_credentials() - client = self._make_one( - project=self.PROJECT, credentials=credentials, _use_grpc=True - ) - patch = mock.patch("google.cloud.error_reporting.client.make_report_error_api") - - with patch as patched: - api = client.report_errors_api - - self.assertIs(api, patched.return_value) - patched.assert_called_once_with(client) - - def test_report_exception_with_grpc(self): - credentials = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=credentials) - - patch = mock.patch("google.cloud.error_reporting.client.make_report_error_api") - with patch as make_api: - try: - raise NameError - except NameError: - client.report_exception() - payload = make_api.return_value.report_error_event.call_args[0][0] - make_api.assert_called_once_with(client) - - self.assertEqual(payload["serviceContext"], {"service": client.DEFAULT_SERVICE}) - self.assertIn("test_report", payload["message"]) - self.assertIn("test_client.py", payload["message"]) - - def test_report_exception_wo_grpc(self): - credentials = _make_credentials() - client = self._make_one( - project=self.PROJECT, credentials=credentials, _use_grpc=False - ) - patch = mock.patch( - "google.cloud.error_reporting.client._ErrorReportingLoggingAPI" - ) - with patch as _error_api: - try: - raise NameError - except NameError: - client.report_exception() - mock_report = _error_api.return_value.report_error_event - payload = mock_report.call_args[0][0] - - self.assertEqual(payload["serviceContext"], {"service": client.DEFAULT_SERVICE}) - self.assertIn("test_report", payload["message"]) - self.assertIn("test_client.py", payload["message"]) - self.assertIsNotNone(client.report_errors_api) - - @mock.patch("google.cloud.error_reporting.client.make_report_error_api") - def test_report_exception_with_service_version_in_constructor(self, make_api): - credentials = _make_credentials() - service = "notdefault" - version = "notdefaultversion" - client = self._make_one( - project=self.PROJECT, - credentials=credentials, - service=service, - version=version, - ) - - http_context = self._make_http(method="GET", response_status_code=500) - user = "user@gmail.com" - - error_api = mock.Mock(spec=["report_error_event"]) - make_api.return_value = error_api - - try: - raise NameError - except NameError: - client.report_exception(http_context=http_context, user=user) - - make_api.assert_called_once_with(client) - - payload = self._get_report_payload(error_api) - self.assertEqual( - payload["serviceContext"], {"service": service, "version": version} - ) - self.assertIn( - "test_report_exception_with_service_version_in_constructor", - payload["message"], - ) - self.assertIn("test_client.py", payload["message"]) - self.assertEqual(payload["context"]["httpRequest"]["responseStatusCode"], 500) - self.assertEqual(payload["context"]["httpRequest"]["method"], "GET") - self.assertEqual(payload["context"]["user"], user) - - @mock.patch("google.cloud.error_reporting.client.make_report_error_api") - def test_report(self, make_api): - credentials = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=credentials) - - error_api = mock.Mock(spec=["report_error_event"]) - make_api.return_value = error_api - - message = "this is an error" - client.report(message) - - payload = self._get_report_payload(error_api) - - self.assertEqual(payload["message"], message) - report_location = payload["context"]["reportLocation"] - self.assertIn("test_client.py", report_location["filePath"]) - self.assertEqual(report_location["functionName"], "test_report") - self.assertGreater(report_location["lineNumber"], 100) - self.assertLess(report_location["lineNumber"], 250) diff --git a/error_reporting/tests/unit/test_util.py b/error_reporting/tests/unit/test_util.py deleted file mode 100644 index cd48f3f8d3ad..000000000000 --- a/error_reporting/tests/unit/test_util.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright 2016 Google LLC All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class Test_build_flask_context(unittest.TestCase): - def _call_fut(self, request): - from google.cloud.error_reporting.util import build_flask_context - - return build_flask_context(request) - - def test_flask_helper(self): - import mock - - user_agent = mock.Mock(string="Google Cloud Unit Tests Agent") - request = _Request( - "http://google.com", "GET", user_agent, "http://gmail.com", "127.0.0.1" - ) - - context = self._call_fut(request) - self.assertEqual(request.url, context.url) - self.assertEqual(request.method, context.method) - self.assertEqual(request.user_agent.string, context.userAgent) - self.assertEqual(request.referrer, context.referrer) - self.assertEqual(request.remote_addr, context.remoteIp) - - -class _Request(object): - def __init__(self, url, method, user_agent, referrer, remote_addr): - self.url = url - self.method = method - self.user_agent = user_agent - self.referrer = referrer - self.remote_addr = remote_addr From 67c3114cff21151db75294999301267ccf55599e Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Fri, 7 Feb 2020 09:26:20 +0000 Subject: [PATCH 3/3] chore: remove grafeas from the monorepo --- .kokoro/continuous/grafeas.cfg | 7 - .kokoro/docs/grafeas.cfg | 7 - .kokoro/presubmit/grafeas.cfg | 7 - .kokoro/release/grafeas.cfg | 7 - README.rst | 2 +- grafeas/.coveragerc | 19 - grafeas/.flake8 | 14 - grafeas/.repo-metadata.json | 13 - grafeas/CHANGELOG.md | 47 - grafeas/LICENSE | 201 -- grafeas/MANIFEST.in | 5 - grafeas/README.rst | 61 - grafeas/docs/README.rst | 1 - grafeas/docs/_static/custom.css | 4 - grafeas/docs/_templates/layout.html | 50 - grafeas/docs/changelog.md | 1 - grafeas/docs/conf.py | 349 -- grafeas/docs/gapic/v1/api.rst | 6 - grafeas/docs/gapic/v1/types.rst | 5 - grafeas/docs/index.rst | 19 - grafeas/grafeas.py | 29 - grafeas/grafeas/__init__.py | 24 - grafeas/grafeas/grafeas.py | 29 - grafeas/grafeas/grafeas_v1/__init__.py | 45 - grafeas/grafeas/grafeas_v1/gapic/__init__.py | 0 grafeas/grafeas/grafeas_v1/gapic/enums.py | 257 -- .../grafeas_v1/gapic/grafeas_client.py | 1363 -------- .../grafeas_v1/gapic/grafeas_client_config.py | 93 - .../grafeas_v1/gapic/transports/__init__.py | 0 .../transports/grafeas_grpc_transport.py | 291 -- grafeas/grafeas/grafeas_v1/proto/__init__.py | 0 .../grafeas_v1/proto/attestation.proto | 73 - .../grafeas_v1/proto/attestation_pb2.py | 260 -- .../grafeas_v1/proto/attestation_pb2_grpc.py | 2 - grafeas/grafeas/grafeas_v1/proto/build.proto | 50 - grafeas/grafeas/grafeas_v1/proto/build_pb2.py | 190 -- .../grafeas_v1/proto/build_pb2_grpc.py | 2 - grafeas/grafeas/grafeas_v1/proto/common.proto | 103 - .../grafeas/grafeas_v1/proto/common_pb2.py | 286 -- .../grafeas_v1/proto/common_pb2_grpc.py | 2 - grafeas/grafeas/grafeas_v1/proto/cvss.proto | 85 - grafeas/grafeas/grafeas_v1/proto/cvss_pb2.py | 521 --- .../grafeas/grafeas_v1/proto/cvss_pb2_grpc.py | 2 - .../grafeas/grafeas_v1/proto/deployment.proto | 66 - .../grafeas_v1/proto/deployment_pb2.py | 317 -- .../grafeas_v1/proto/deployment_pb2_grpc.py | 2 - .../grafeas/grafeas_v1/proto/discovery.proto | 83 - .../grafeas/grafeas_v1/proto/discovery_pb2.py | 342 -- .../grafeas_v1/proto/discovery_pb2_grpc.py | 2 - .../grafeas/grafeas_v1/proto/grafeas.proto | 531 ---- .../grafeas/grafeas_v1/proto/grafeas_pb2.py | 2810 ----------------- .../grafeas_v1/proto/grafeas_pb2_grpc.py | 302 -- grafeas/grafeas/grafeas_v1/proto/image.proto | 83 - grafeas/grafeas/grafeas_v1/proto/image_pb2.py | 428 --- .../grafeas_v1/proto/image_pb2_grpc.py | 2 - .../grafeas/grafeas_v1/proto/package.proto | 124 - .../grafeas/grafeas_v1/proto/package_pb2.py | 674 ---- .../grafeas_v1/proto/package_pb2_grpc.py | 2 - .../grafeas/grafeas_v1/proto/provenance.proto | 265 -- .../grafeas_v1/proto/provenance_pb2.py | 1893 ----------- .../grafeas_v1/proto/provenance_pb2_grpc.py | 2 - .../grafeas/grafeas_v1/proto/upgrade.proto | 114 - .../grafeas/grafeas_v1/proto/upgrade_pb2.py | 770 ----- .../grafeas_v1/proto/upgrade_pb2_grpc.py | 2 - .../grafeas_v1/proto/vulnerability.proto | 226 -- .../grafeas_v1/proto/vulnerability_pb2.py | 1212 ------- .../proto/vulnerability_pb2_grpc.py | 2 - grafeas/grafeas/grafeas_v1/types.py | 78 - grafeas/noxfile.py | 160 - grafeas/setup.cfg | 3 - grafeas/setup.py | 73 - grafeas/synth.metadata | 249 -- grafeas/synth.py | 358 --- .../unit/gapic/v1/test_grafeas_client_v1.py | 842 ----- 74 files changed, 1 insertion(+), 16548 deletions(-) delete mode 100644 .kokoro/continuous/grafeas.cfg delete mode 100644 .kokoro/docs/grafeas.cfg delete mode 100644 .kokoro/presubmit/grafeas.cfg delete mode 100644 .kokoro/release/grafeas.cfg delete mode 100644 grafeas/.coveragerc delete mode 100644 grafeas/.flake8 delete mode 100644 grafeas/.repo-metadata.json delete mode 100644 grafeas/CHANGELOG.md delete mode 100644 grafeas/LICENSE delete mode 100644 grafeas/MANIFEST.in delete mode 100644 grafeas/README.rst delete mode 120000 grafeas/docs/README.rst delete mode 100644 grafeas/docs/_static/custom.css delete mode 100644 grafeas/docs/_templates/layout.html delete mode 120000 grafeas/docs/changelog.md delete mode 100644 grafeas/docs/conf.py delete mode 100644 grafeas/docs/gapic/v1/api.rst delete mode 100644 grafeas/docs/gapic/v1/types.rst delete mode 100644 grafeas/docs/index.rst delete mode 100644 grafeas/grafeas.py delete mode 100644 grafeas/grafeas/__init__.py delete mode 100644 grafeas/grafeas/grafeas.py delete mode 100644 grafeas/grafeas/grafeas_v1/__init__.py delete mode 100644 grafeas/grafeas/grafeas_v1/gapic/__init__.py delete mode 100644 grafeas/grafeas/grafeas_v1/gapic/enums.py delete mode 100644 grafeas/grafeas/grafeas_v1/gapic/grafeas_client.py delete mode 100644 grafeas/grafeas/grafeas_v1/gapic/grafeas_client_config.py delete mode 100644 grafeas/grafeas/grafeas_v1/gapic/transports/__init__.py delete mode 100644 grafeas/grafeas/grafeas_v1/gapic/transports/grafeas_grpc_transport.py delete mode 100644 grafeas/grafeas/grafeas_v1/proto/__init__.py delete mode 100644 grafeas/grafeas/grafeas_v1/proto/attestation.proto delete mode 100644 grafeas/grafeas/grafeas_v1/proto/attestation_pb2.py delete mode 100644 grafeas/grafeas/grafeas_v1/proto/attestation_pb2_grpc.py delete mode 100644 grafeas/grafeas/grafeas_v1/proto/build.proto delete mode 100644 grafeas/grafeas/grafeas_v1/proto/build_pb2.py delete mode 100644 grafeas/grafeas/grafeas_v1/proto/build_pb2_grpc.py delete mode 100644 grafeas/grafeas/grafeas_v1/proto/common.proto delete mode 100644 grafeas/grafeas/grafeas_v1/proto/common_pb2.py delete mode 100644 grafeas/grafeas/grafeas_v1/proto/common_pb2_grpc.py delete mode 100644 grafeas/grafeas/grafeas_v1/proto/cvss.proto delete mode 100644 grafeas/grafeas/grafeas_v1/proto/cvss_pb2.py delete mode 100644 grafeas/grafeas/grafeas_v1/proto/cvss_pb2_grpc.py delete mode 100644 grafeas/grafeas/grafeas_v1/proto/deployment.proto delete mode 100644 grafeas/grafeas/grafeas_v1/proto/deployment_pb2.py delete mode 100644 grafeas/grafeas/grafeas_v1/proto/deployment_pb2_grpc.py delete mode 100644 grafeas/grafeas/grafeas_v1/proto/discovery.proto delete mode 100644 grafeas/grafeas/grafeas_v1/proto/discovery_pb2.py delete mode 100644 grafeas/grafeas/grafeas_v1/proto/discovery_pb2_grpc.py delete mode 100644 grafeas/grafeas/grafeas_v1/proto/grafeas.proto delete mode 100644 grafeas/grafeas/grafeas_v1/proto/grafeas_pb2.py delete mode 100644 grafeas/grafeas/grafeas_v1/proto/grafeas_pb2_grpc.py delete mode 100644 grafeas/grafeas/grafeas_v1/proto/image.proto delete mode 100644 grafeas/grafeas/grafeas_v1/proto/image_pb2.py delete mode 100644 grafeas/grafeas/grafeas_v1/proto/image_pb2_grpc.py delete mode 100644 grafeas/grafeas/grafeas_v1/proto/package.proto delete mode 100644 grafeas/grafeas/grafeas_v1/proto/package_pb2.py delete mode 100644 grafeas/grafeas/grafeas_v1/proto/package_pb2_grpc.py delete mode 100644 grafeas/grafeas/grafeas_v1/proto/provenance.proto delete mode 100644 grafeas/grafeas/grafeas_v1/proto/provenance_pb2.py delete mode 100644 grafeas/grafeas/grafeas_v1/proto/provenance_pb2_grpc.py delete mode 100644 grafeas/grafeas/grafeas_v1/proto/upgrade.proto delete mode 100644 grafeas/grafeas/grafeas_v1/proto/upgrade_pb2.py delete mode 100644 grafeas/grafeas/grafeas_v1/proto/upgrade_pb2_grpc.py delete mode 100644 grafeas/grafeas/grafeas_v1/proto/vulnerability.proto delete mode 100644 grafeas/grafeas/grafeas_v1/proto/vulnerability_pb2.py delete mode 100644 grafeas/grafeas/grafeas_v1/proto/vulnerability_pb2_grpc.py delete mode 100644 grafeas/grafeas/grafeas_v1/types.py delete mode 100644 grafeas/noxfile.py delete mode 100644 grafeas/setup.cfg delete mode 100644 grafeas/setup.py delete mode 100644 grafeas/synth.metadata delete mode 100644 grafeas/synth.py delete mode 100644 grafeas/tests/unit/gapic/v1/test_grafeas_client_v1.py diff --git a/.kokoro/continuous/grafeas.cfg b/.kokoro/continuous/grafeas.cfg deleted file mode 100644 index 8f0e2f271518..000000000000 --- a/.kokoro/continuous/grafeas.cfg +++ /dev/null @@ -1,7 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Tell the trampoline which build file to use. -env_vars: { - key: "PACKAGE" - value: "grafeas" -} diff --git a/.kokoro/docs/grafeas.cfg b/.kokoro/docs/grafeas.cfg deleted file mode 100644 index 8f0e2f271518..000000000000 --- a/.kokoro/docs/grafeas.cfg +++ /dev/null @@ -1,7 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Tell the trampoline which build file to use. -env_vars: { - key: "PACKAGE" - value: "grafeas" -} diff --git a/.kokoro/presubmit/grafeas.cfg b/.kokoro/presubmit/grafeas.cfg deleted file mode 100644 index 8f0e2f271518..000000000000 --- a/.kokoro/presubmit/grafeas.cfg +++ /dev/null @@ -1,7 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Tell the trampoline which build file to use. -env_vars: { - key: "PACKAGE" - value: "grafeas" -} diff --git a/.kokoro/release/grafeas.cfg b/.kokoro/release/grafeas.cfg deleted file mode 100644 index 8f0e2f271518..000000000000 --- a/.kokoro/release/grafeas.cfg +++ /dev/null @@ -1,7 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Tell the trampoline which build file to use. -env_vars: { - key: "PACKAGE" - value: "grafeas" -} diff --git a/README.rst b/README.rst index 8c767a12493c..8614d07d89a4 100644 --- a/README.rst +++ b/README.rst @@ -260,7 +260,7 @@ The following client libraries have **alpha** support: .. _Trace Documentation: https://googleapis.dev/python/cloudtrace/latest .. _Grafeas: https://pypi.org/project/grafeas/ -.. _Grafeas README: https://github.com/googleapis/google-cloud-python/tree/master/grafeas +.. _Grafeas README: https://github.com/googleapis/python-grafeas#python-client-for-grafeas-api-alpha .. _Grafeas Documentation: https://googleapis.dev/python/grafeas/latest .. _Stackdriver Error Reporting: https://pypi.org/project/google-cloud-error-reporting/ diff --git a/grafeas/.coveragerc b/grafeas/.coveragerc deleted file mode 100644 index b178b094aa1d..000000000000 --- a/grafeas/.coveragerc +++ /dev/null @@ -1,19 +0,0 @@ -# Generated by synthtool. DO NOT EDIT! -[run] -branch = True - -[report] -fail_under = 100 -show_missing = True -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ - # Ignore abstract methods - raise NotImplementedError -omit = - */gapic/*.py - */proto/*.py - */core/*.py - */site-packages/*.py \ No newline at end of file diff --git a/grafeas/.flake8 b/grafeas/.flake8 deleted file mode 100644 index 0268ecc9c55c..000000000000 --- a/grafeas/.flake8 +++ /dev/null @@ -1,14 +0,0 @@ -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - *_pb2.py - - # Standard linting exemptions. - __pycache__, - .git, - *.pyc, - conf.py diff --git a/grafeas/.repo-metadata.json b/grafeas/.repo-metadata.json deleted file mode 100644 index 3be697127145..000000000000 --- a/grafeas/.repo-metadata.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "name": "grafeas", - "name_pretty": "Grafeas", - "product_documentation": "https://grafeas.io", - "client_documentation": "https://googleapis.dev/python/grafeas/latest", - "issue_tracker": "", - "release_level": "alpha", - "language": "python", - "repo": "googleapis/google-cloud-python", - "distribution_name": "grafeas", - "api_id": "", - "requires_billing": false -} \ No newline at end of file diff --git a/grafeas/CHANGELOG.md b/grafeas/CHANGELOG.md deleted file mode 100644 index b66a945bfffe..000000000000 --- a/grafeas/CHANGELOG.md +++ /dev/null @@ -1,47 +0,0 @@ -# Changelog - -[PyPI History][1] - -[1]: https://pypi.org/project/grafeas/#history - -## 0.3.0 - -10-10-2019 11:28 PDT - - -### Implementation Changes -- Remove send / receive message size limit (via synth). ([#8981](https://github.com/googleapis/google-cloud-python/pull/8981)) - -### Dependencies -- Bump minimum version for google-api-core to 1.14.0. ([#8709](https://github.com/googleapis/google-cloud-python/pull/8709)) - -### Documentation -- Fix intersphinx reference to requests. ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) -- Remove CI for gh-pages, use googleapis.dev for `api_core` refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) -- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) -- Link to googleapis.dev documentation in READMEs. ([#8705](https://github.com/googleapis/google-cloud-python/pull/8705)) - -## 0.2.0 - -07-12-2019 17:04 PDT - - -### Implementation Changes -- replace `min_affected_version` w/ `affected_version_{start,end}` (via synth). ([#8465](https://github.com/googleapis/google-cloud-python/pull/8465)) -- Allow kwargs to be passed to create_channel, update templates (via synth). ([#8391](https://github.com/googleapis/google-cloud-python/pull/8391)) - -### New Features -- Update list method docstrings (via synth). ([#8510](https://github.com/googleapis/google-cloud-python/pull/8510)) - -### Documentation -- Update READMEs. ([#8456](https://github.com/googleapis/google-cloud-python/pull/8456)) - -### Internal / Testing Changes -- Add docs job to publish to googleapis.dev. ([#8464](https://github.com/googleapis/google-cloud-python/pull/8464)) - -## 0.1.0 - -06-17-2019 10:44 PDT - -### New Features -- Initial release of the Grafeas client library. ([#8186](https://github.com/googleapis/google-cloud-python/pull/8186)) diff --git a/grafeas/LICENSE b/grafeas/LICENSE deleted file mode 100644 index a8ee855de2aa..000000000000 --- a/grafeas/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - https://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - https://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/grafeas/MANIFEST.in b/grafeas/MANIFEST.in deleted file mode 100644 index 9cbf175afe6b..000000000000 --- a/grafeas/MANIFEST.in +++ /dev/null @@ -1,5 +0,0 @@ -include README.rst LICENSE -recursive-include google *.json *.proto -recursive-include tests * -global-exclude *.py[co] -global-exclude __pycache__ diff --git a/grafeas/README.rst b/grafeas/README.rst deleted file mode 100644 index b8997cffaf93..000000000000 --- a/grafeas/README.rst +++ /dev/null @@ -1,61 +0,0 @@ -Python Client for Grafeas API (`Alpha`_) -=================================================== - -`Grafeas API`_: An implementation of the Grafeas API, which stores, and enables querying and -retrieval of critical metadata about all of your software artifacts. - -- `Client Library Documentation`_ -- `Product Documentation`_ - -.. _Alpha: https://github.com/googleapis/google-cloud-python/blob/master/README.rst -.. _Grafeas API: https://grafeas.io/ -.. _Client Library Documentation: https://googleapis.dev/python/grafeas/latest -.. _Product Documentation: https://grafeas.io/ - -Installation --------------- - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - pip install virtualenv - virtualenv - source /bin/activate - /bin/pip install grafeas - - -Windows -^^^^^^^ - -.. code-block:: console - - pip install virtualenv - virtualenv - \Scripts\activate - \Scripts\pip.exe install grafeas - -Next Steps --------------- - -- Read the `Client Library Documentation`_ for Grafeas API - API to see other available methods on the client. -- Read the `Grafeas API Product documentation`_ to learn - more about the product and see How-to Guides. -- View this `repository’s main README`_ to see the full list of Cloud - APIs that we cover. - -.. _Grafeas API Product documentation: https://grafeas.io/ -.. _repository’s main README: https://github.com/googleapis/google-cloud-python/blob/master/README.rst \ No newline at end of file diff --git a/grafeas/docs/README.rst b/grafeas/docs/README.rst deleted file mode 120000 index 89a0106941ff..000000000000 --- a/grafeas/docs/README.rst +++ /dev/null @@ -1 +0,0 @@ -../README.rst \ No newline at end of file diff --git a/grafeas/docs/_static/custom.css b/grafeas/docs/_static/custom.css deleted file mode 100644 index 0abaf229fce3..000000000000 --- a/grafeas/docs/_static/custom.css +++ /dev/null @@ -1,4 +0,0 @@ -div#python2-eol { - border-color: red; - border-width: medium; -} \ No newline at end of file diff --git a/grafeas/docs/_templates/layout.html b/grafeas/docs/_templates/layout.html deleted file mode 100644 index 228529efe2d2..000000000000 --- a/grafeas/docs/_templates/layout.html +++ /dev/null @@ -1,50 +0,0 @@ - -{% extends "!layout.html" %} -{%- block content %} -{%- if theme_fixed_sidebar|lower == 'true' %} -
- {{ sidebar() }} - {%- block document %} -
- {%- if render_sidebar %} -
- {%- endif %} - - {%- block relbar_top %} - {%- if theme_show_relbar_top|tobool %} - - {%- endif %} - {% endblock %} - -
-
- On January 1, 2020 this library will no longer support Python 2 on the latest released version. - Previously released library versions will continue to be available. For more information please - visit Python 2 support on Google Cloud. -
- {% block body %} {% endblock %} -
- - {%- block relbar_bottom %} - {%- if theme_show_relbar_bottom|tobool %} - - {%- endif %} - {% endblock %} - - {%- if render_sidebar %} -
- {%- endif %} -
- {%- endblock %} -
-
-{%- else %} -{{ super() }} -{%- endif %} -{%- endblock %} diff --git a/grafeas/docs/changelog.md b/grafeas/docs/changelog.md deleted file mode 120000 index 04c99a55caae..000000000000 --- a/grafeas/docs/changelog.md +++ /dev/null @@ -1 +0,0 @@ -../CHANGELOG.md \ No newline at end of file diff --git a/grafeas/docs/conf.py b/grafeas/docs/conf.py deleted file mode 100644 index 89b4fe6b16e4..000000000000 --- a/grafeas/docs/conf.py +++ /dev/null @@ -1,349 +0,0 @@ -# -*- coding: utf-8 -*- -# -# grafeas documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "1.6.3" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# source_suffix = ['.rst', '.md'] -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = "index" - -# General information about the project. -project = u"grafeas" -copyright = u"2017, Google" -author = u"Google APIs" - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "grafeas-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - #'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - #'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - #'preamble': '', - # Latex figure (float) alignment - #'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - (master_doc, "grafeas.tex", u"grafeas Documentation", author, "manual") -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [(master_doc, "grafeas", u"grafeas Documentation", [author], 1)] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - master_doc, - "grafeas", - u"grafeas Documentation", - author, - "grafeas", - "GAPIC library for the {metadata.shortName} v1 service", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://requests.kennethreitz.org/en/stable/", None), - "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), - "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/grafeas/docs/gapic/v1/api.rst b/grafeas/docs/gapic/v1/api.rst deleted file mode 100644 index 66589f53952e..000000000000 --- a/grafeas/docs/gapic/v1/api.rst +++ /dev/null @@ -1,6 +0,0 @@ -Client for Grafeas API -================================= - -.. automodule:: grafeas.grafeas_v1 - :members: - :inherited-members: \ No newline at end of file diff --git a/grafeas/docs/gapic/v1/types.rst b/grafeas/docs/gapic/v1/types.rst deleted file mode 100644 index ee1343181d2d..000000000000 --- a/grafeas/docs/gapic/v1/types.rst +++ /dev/null @@ -1,5 +0,0 @@ -Types for Grafeas API Client -======================================= - -.. automodule:: grafeas.grafeas_v1.types - :members: \ No newline at end of file diff --git a/grafeas/docs/index.rst b/grafeas/docs/index.rst deleted file mode 100644 index 221c0e06399d..000000000000 --- a/grafeas/docs/index.rst +++ /dev/null @@ -1,19 +0,0 @@ -.. include:: README.rst - -Api Reference -------------- -.. toctree:: - :maxdepth: 2 - - gapic/v1/api - gapic/v1/types - -Changelog ---------- - -For a list of all ``grafeas`` releases: - -.. toctree:: - :maxdepth: 2 - - changelog \ No newline at end of file diff --git a/grafeas/grafeas.py b/grafeas/grafeas.py deleted file mode 100644 index b40dffd3313f..000000000000 --- a/grafeas/grafeas.py +++ /dev/null @@ -1,29 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from __future__ import absolute_import - -from grafeas_v1 import GrafeasClient -from grafeas_v1 import enums -from grafeas_v1 import types - - -__all__ = ( - 'enums', - 'types', - 'GrafeasClient', -) diff --git a/grafeas/grafeas/__init__.py b/grafeas/grafeas/__init__.py deleted file mode 100644 index 8fcc60e2b9c6..000000000000 --- a/grafeas/grafeas/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - - __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/grafeas/grafeas/grafeas.py b/grafeas/grafeas/grafeas.py deleted file mode 100644 index be586f6cfd64..000000000000 --- a/grafeas/grafeas/grafeas.py +++ /dev/null @@ -1,29 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from __future__ import absolute_import - -from grafeas.grafeas_v1 import GrafeasClient -from grafeas.grafeas_v1 import enums -from grafeas.grafeas_v1 import types - - -__all__ = ( - "enums", - "types", - "GrafeasClient", -) diff --git a/grafeas/grafeas/grafeas_v1/__init__.py b/grafeas/grafeas/grafeas_v1/__init__.py deleted file mode 100644 index 2e0f3ee626da..000000000000 --- a/grafeas/grafeas/grafeas_v1/__init__.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from __future__ import absolute_import -import sys -import warnings - -from grafeas.grafeas_v1 import types -from grafeas.grafeas_v1.gapic import enums -from grafeas.grafeas_v1.gapic import grafeas_client - - -if sys.version_info[:2] == (2, 7): - message = ( - "A future version of this library will drop support for Python 2.7." - "More details about Python 2 support for Google Cloud Client Libraries" - "can be found at https://cloud.google.com/python/docs/python2-sunset/" - ) - warnings.warn(message, DeprecationWarning) - - -class GrafeasClient(grafeas_client.GrafeasClient): - __doc__ = grafeas_client.GrafeasClient.__doc__ - enums = enums - - -__all__ = ( - "enums", - "types", - "GrafeasClient", -) diff --git a/grafeas/grafeas/grafeas_v1/gapic/__init__.py b/grafeas/grafeas/grafeas_v1/gapic/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/grafeas/grafeas/grafeas_v1/gapic/enums.py b/grafeas/grafeas/grafeas_v1/gapic/enums.py deleted file mode 100644 index 59dd0c26cea4..000000000000 --- a/grafeas/grafeas/grafeas_v1/gapic/enums.py +++ /dev/null @@ -1,257 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Wrappers for protocol buffer enum types.""" - -import enum - - -class Architecture(enum.IntEnum): - """ - Instruction set architectures supported by various package managers. - - Attributes: - ARCHITECTURE_UNSPECIFIED (int): Unknown architecture. - X86 (int): X86 architecture. - X64 (int): X64 architecture. - """ - - ARCHITECTURE_UNSPECIFIED = 0 - X86 = 1 - X64 = 2 - - -class NoteKind(enum.IntEnum): - """ - Kind represents the kinds of notes supported. - - Attributes: - NOTE_KIND_UNSPECIFIED (int): Unknown. - VULNERABILITY (int): The note and occurrence represent a package vulnerability. - BUILD (int): The note and occurrence assert build provenance. - IMAGE (int): This represents an image basis relationship. - PACKAGE (int): This represents a package installed via a package manager. - DEPLOYMENT (int): The note and occurrence track deployment events. - DISCOVERY (int): The note and occurrence track the initial discovery status of a resource. - ATTESTATION (int): This represents a logical "role" that can attest to artifacts. - UPGRADE (int): This represents an available package upgrade. - """ - - NOTE_KIND_UNSPECIFIED = 0 - VULNERABILITY = 1 - BUILD = 2 - IMAGE = 3 - PACKAGE = 4 - DEPLOYMENT = 5 - DISCOVERY = 6 - ATTESTATION = 7 - UPGRADE = 8 - - -class Severity(enum.IntEnum): - """ - Note provider assigned severity/impact ranking. - - Attributes: - SEVERITY_UNSPECIFIED (int): Unknown. - MINIMAL (int): Minimal severity. - LOW (int): Low severity. - MEDIUM (int): Medium severity. - HIGH (int): High severity. - CRITICAL (int): Critical severity. - """ - - SEVERITY_UNSPECIFIED = 0 - MINIMAL = 1 - LOW = 2 - MEDIUM = 3 - HIGH = 4 - CRITICAL = 5 - - -class AliasContext(object): - class Kind(enum.IntEnum): - """ - The type of an alias. - - Attributes: - KIND_UNSPECIFIED (int): Unknown. - FIXED (int): Git tag. - MOVABLE (int): Git branch. - OTHER (int): Used to specify non-standard aliases. For example, if a Git repo has a - ref named "refs/foo/bar". - """ - - KIND_UNSPECIFIED = 0 - FIXED = 1 - MOVABLE = 2 - OTHER = 4 - - -class CVSSv3(object): - class AttackComplexity(enum.IntEnum): - """ - Attributes: - ATTACK_COMPLEXITY_UNSPECIFIED (int) - ATTACK_COMPLEXITY_LOW (int) - ATTACK_COMPLEXITY_HIGH (int) - """ - - ATTACK_COMPLEXITY_UNSPECIFIED = 0 - ATTACK_COMPLEXITY_LOW = 1 - ATTACK_COMPLEXITY_HIGH = 2 - - class AttackVector(enum.IntEnum): - """ - Attributes: - ATTACK_VECTOR_UNSPECIFIED (int) - ATTACK_VECTOR_NETWORK (int) - ATTACK_VECTOR_ADJACENT (int) - ATTACK_VECTOR_LOCAL (int) - ATTACK_VECTOR_PHYSICAL (int) - """ - - ATTACK_VECTOR_UNSPECIFIED = 0 - ATTACK_VECTOR_NETWORK = 1 - ATTACK_VECTOR_ADJACENT = 2 - ATTACK_VECTOR_LOCAL = 3 - ATTACK_VECTOR_PHYSICAL = 4 - - class Impact(enum.IntEnum): - """ - Attributes: - IMPACT_UNSPECIFIED (int) - IMPACT_HIGH (int) - IMPACT_LOW (int) - IMPACT_NONE (int) - """ - - IMPACT_UNSPECIFIED = 0 - IMPACT_HIGH = 1 - IMPACT_LOW = 2 - IMPACT_NONE = 3 - - class PrivilegesRequired(enum.IntEnum): - """ - Attributes: - PRIVILEGES_REQUIRED_UNSPECIFIED (int) - PRIVILEGES_REQUIRED_NONE (int) - PRIVILEGES_REQUIRED_LOW (int) - PRIVILEGES_REQUIRED_HIGH (int) - """ - - PRIVILEGES_REQUIRED_UNSPECIFIED = 0 - PRIVILEGES_REQUIRED_NONE = 1 - PRIVILEGES_REQUIRED_LOW = 2 - PRIVILEGES_REQUIRED_HIGH = 3 - - class Scope(enum.IntEnum): - """ - Attributes: - SCOPE_UNSPECIFIED (int) - SCOPE_UNCHANGED (int) - SCOPE_CHANGED (int) - """ - - SCOPE_UNSPECIFIED = 0 - SCOPE_UNCHANGED = 1 - SCOPE_CHANGED = 2 - - class UserInteraction(enum.IntEnum): - """ - Attributes: - USER_INTERACTION_UNSPECIFIED (int) - USER_INTERACTION_NONE (int) - USER_INTERACTION_REQUIRED (int) - """ - - USER_INTERACTION_UNSPECIFIED = 0 - USER_INTERACTION_NONE = 1 - USER_INTERACTION_REQUIRED = 2 - - -class DeploymentOccurrence(object): - class Platform(enum.IntEnum): - """ - Types of platforms. - - Attributes: - PLATFORM_UNSPECIFIED (int): Unknown. - GKE (int): Google Container Engine. - FLEX (int): Google App Engine: Flexible Environment. - CUSTOM (int): Custom user-defined platform. - """ - - PLATFORM_UNSPECIFIED = 0 - GKE = 1 - FLEX = 2 - CUSTOM = 3 - - -class DiscoveryOccurrence(object): - class AnalysisStatus(enum.IntEnum): - """ - Analysis status for a resource. Currently for initial analysis only (not - updated in continuous analysis). - - Attributes: - ANALYSIS_STATUS_UNSPECIFIED (int): Unknown. - PENDING (int): Resource is known but no action has been taken yet. - SCANNING (int): Resource is being analyzed. - FINISHED_SUCCESS (int): Analysis has finished successfully. - FINISHED_FAILED (int): Analysis has finished unsuccessfully, the analysis itself is in a bad - state. - FINISHED_UNSUPPORTED (int): The resource is known not to be supported - """ - - ANALYSIS_STATUS_UNSPECIFIED = 0 - PENDING = 1 - SCANNING = 2 - FINISHED_SUCCESS = 3 - FINISHED_FAILED = 4 - FINISHED_UNSUPPORTED = 5 - - class ContinuousAnalysis(enum.IntEnum): - """ - Whether the resource is continuously analyzed. - - Attributes: - CONTINUOUS_ANALYSIS_UNSPECIFIED (int): Unknown. - ACTIVE (int): The resource is continuously analyzed. - INACTIVE (int): The resource is ignored for continuous analysis. - """ - - CONTINUOUS_ANALYSIS_UNSPECIFIED = 0 - ACTIVE = 1 - INACTIVE = 2 - - -class Version(object): - class VersionKind(enum.IntEnum): - """ - Whether this is an ordinary package version or a sentinel MIN/MAX version. - - Attributes: - VERSION_KIND_UNSPECIFIED (int): Unknown. - NORMAL (int): A standard package version. - MINIMUM (int): A special version representing negative infinity. - MAXIMUM (int): A special version representing positive infinity. - """ - - VERSION_KIND_UNSPECIFIED = 0 - NORMAL = 1 - MINIMUM = 2 - MAXIMUM = 3 diff --git a/grafeas/grafeas/grafeas_v1/gapic/grafeas_client.py b/grafeas/grafeas/grafeas_v1/gapic/grafeas_client.py deleted file mode 100644 index cfda1db0881a..000000000000 --- a/grafeas/grafeas/grafeas_v1/gapic/grafeas_client.py +++ /dev/null @@ -1,1363 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the grafeas.v1 Grafeas API.""" - -import functools -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import google.api_core.page_iterator -import google.api_core.path_template -import grpc - -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 -from grafeas.grafeas_v1.gapic import enums -from grafeas.grafeas_v1.gapic import grafeas_client_config -from grafeas.grafeas_v1.gapic.transports import grafeas_grpc_transport -from grafeas.grafeas_v1.proto import grafeas_pb2 -from grafeas.grafeas_v1.proto import grafeas_pb2_grpc - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("grafeas",).version - - -class GrafeasClient(object): - """ - `Grafeas `__ API. - - Retrieves analysis results of Cloud components such as Docker container - images. - - Analysis results are stored as a series of occurrences. An - ``Occurrence`` contains information about a specific analysis instance - on a resource. An occurrence refers to a ``Note``. A note contains - details describing the analysis and is generally stored in a separate - project, called a ``Provider``. Multiple occurrences can refer to the - same note. - - For example, an SSL vulnerability could affect multiple images. In this - case, there would be one note for the vulnerability and an occurrence - for each image with the vulnerability referring to that note. - """ - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "grafeas.v1.Grafeas" - - @classmethod - def note_path(cls, project, note): - """Return a fully-qualified note string.""" - return google.api_core.path_template.expand( - "projects/{project}/notes/{note}", project=project, note=note, - ) - - @classmethod - def occurrence_path(cls, project, occurrence): - """Return a fully-qualified occurrence string.""" - return google.api_core.path_template.expand( - "projects/{project}/occurrences/{occurrence}", - project=project, - occurrence=occurrence, - ) - - @classmethod - def project_path(cls, project): - """Return a fully-qualified project string.""" - return google.api_core.path_template.expand( - "projects/{project}", project=project, - ) - - def __init__(self, transport, client_config=None, client_info=None): - """Constructor. - - Args: - transport (~.GrafeasGrpcTransport): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = grafeas_client_config.config - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - self.transport = transport - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION, - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME], - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def get_occurrence( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets the specified occurrence. - - Example: - >>> from grafeas import grafeas_v1 - >>> from grafeas.grafeas_v1.gapic.transports import grafeas_grpc_transport - >>> - >>> address = "[SERVICE_ADDRESS]" - >>> scopes = ("[SCOPE]") - >>> transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - >>> client = grafeas_v1.GrafeasClient(transport) - >>> - >>> name = client.occurrence_path('[PROJECT]', '[OCCURRENCE]') - >>> - >>> response = client.get_occurrence(name) - - Args: - name (str): The name of the occurrence in the form of - ``projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~grafeas.grafeas_v1.types.Occurrence` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_occurrence" not in self._inner_api_calls: - self._inner_api_calls[ - "get_occurrence" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_occurrence, - default_retry=self._method_configs["GetOccurrence"].retry, - default_timeout=self._method_configs["GetOccurrence"].timeout, - client_info=self._client_info, - ) - - request = grafeas_pb2.GetOccurrenceRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_occurrence"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_occurrences( - self, - parent, - filter_=None, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists occurrences for the specified project. - - Example: - >>> from grafeas import grafeas_v1 - >>> from grafeas.grafeas_v1.gapic.transports import grafeas_grpc_transport - >>> - >>> address = "[SERVICE_ADDRESS]" - >>> scopes = ("[SCOPE]") - >>> transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - >>> client = grafeas_v1.GrafeasClient(transport) - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_occurrences(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_occurrences(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): The name of the project to list occurrences for in the form of - ``projects/[PROJECT_ID]``. - filter_ (str): The filter expression. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~grafeas.grafeas_v1.types.Occurrence` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_occurrences" not in self._inner_api_calls: - self._inner_api_calls[ - "list_occurrences" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_occurrences, - default_retry=self._method_configs["ListOccurrences"].retry, - default_timeout=self._method_configs["ListOccurrences"].timeout, - client_info=self._client_info, - ) - - request = grafeas_pb2.ListOccurrencesRequest( - parent=parent, filter=filter_, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_occurrences"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="occurrences", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def delete_occurrence( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes the specified occurrence. For example, use this method to delete an - occurrence when the occurrence is no longer applicable for the given - resource. - - Example: - >>> from grafeas import grafeas_v1 - >>> from grafeas.grafeas_v1.gapic.transports import grafeas_grpc_transport - >>> - >>> address = "[SERVICE_ADDRESS]" - >>> scopes = ("[SCOPE]") - >>> transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - >>> client = grafeas_v1.GrafeasClient(transport) - >>> - >>> name = client.occurrence_path('[PROJECT]', '[OCCURRENCE]') - >>> - >>> client.delete_occurrence(name) - - Args: - name (str): The name of the occurrence in the form of - ``projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_occurrence" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_occurrence" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_occurrence, - default_retry=self._method_configs["DeleteOccurrence"].retry, - default_timeout=self._method_configs["DeleteOccurrence"].timeout, - client_info=self._client_info, - ) - - request = grafeas_pb2.DeleteOccurrenceRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_occurrence"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def create_occurrence( - self, - parent, - occurrence, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a new occurrence. - - Example: - >>> from grafeas import grafeas_v1 - >>> from grafeas.grafeas_v1.gapic.transports import grafeas_grpc_transport - >>> - >>> address = "[SERVICE_ADDRESS]" - >>> scopes = ("[SCOPE]") - >>> transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - >>> client = grafeas_v1.GrafeasClient(transport) - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # TODO: Initialize `occurrence`: - >>> occurrence = {} - >>> - >>> response = client.create_occurrence(parent, occurrence) - - Args: - parent (str): The name of the project in the form of ``projects/[PROJECT_ID]``, under - which the occurrence is to be created. - occurrence (Union[dict, ~grafeas.grafeas_v1.types.Occurrence]): The occurrence to create. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~grafeas.grafeas_v1.types.Occurrence` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~grafeas.grafeas_v1.types.Occurrence` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_occurrence" not in self._inner_api_calls: - self._inner_api_calls[ - "create_occurrence" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_occurrence, - default_retry=self._method_configs["CreateOccurrence"].retry, - default_timeout=self._method_configs["CreateOccurrence"].timeout, - client_info=self._client_info, - ) - - request = grafeas_pb2.CreateOccurrenceRequest( - parent=parent, occurrence=occurrence, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_occurrence"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def batch_create_occurrences( - self, - parent, - occurrences, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates new occurrences in batch. - - Example: - >>> from grafeas import grafeas_v1 - >>> from grafeas.grafeas_v1.gapic.transports import grafeas_grpc_transport - >>> - >>> address = "[SERVICE_ADDRESS]" - >>> scopes = ("[SCOPE]") - >>> transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - >>> client = grafeas_v1.GrafeasClient(transport) - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # TODO: Initialize `occurrences`: - >>> occurrences = [] - >>> - >>> response = client.batch_create_occurrences(parent, occurrences) - - Args: - parent (str): The name of the project in the form of ``projects/[PROJECT_ID]``, under - which the occurrences are to be created. - occurrences (list[Union[dict, ~grafeas.grafeas_v1.types.Occurrence]]): The occurrences to create. Max allowed length is 1000. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~grafeas.grafeas_v1.types.Occurrence` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~grafeas.grafeas_v1.types.BatchCreateOccurrencesResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "batch_create_occurrences" not in self._inner_api_calls: - self._inner_api_calls[ - "batch_create_occurrences" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.batch_create_occurrences, - default_retry=self._method_configs["BatchCreateOccurrences"].retry, - default_timeout=self._method_configs["BatchCreateOccurrences"].timeout, - client_info=self._client_info, - ) - - request = grafeas_pb2.BatchCreateOccurrencesRequest( - parent=parent, occurrences=occurrences, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["batch_create_occurrences"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def update_occurrence( - self, - name, - occurrence, - update_mask=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates the specified occurrence. - - Example: - >>> from grafeas import grafeas_v1 - >>> from grafeas.grafeas_v1.gapic.transports import grafeas_grpc_transport - >>> - >>> address = "[SERVICE_ADDRESS]" - >>> scopes = ("[SCOPE]") - >>> transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - >>> client = grafeas_v1.GrafeasClient(transport) - >>> - >>> name = client.occurrence_path('[PROJECT]', '[OCCURRENCE]') - >>> - >>> # TODO: Initialize `occurrence`: - >>> occurrence = {} - >>> - >>> response = client.update_occurrence(name, occurrence) - - Args: - name (str): The name of the occurrence in the form of - ``projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]``. - occurrence (Union[dict, ~grafeas.grafeas_v1.types.Occurrence]): The updated occurrence. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~grafeas.grafeas_v1.types.Occurrence` - update_mask (Union[dict, ~grafeas.grafeas_v1.types.FieldMask]): The fields to update. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~grafeas.grafeas_v1.types.FieldMask` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~grafeas.grafeas_v1.types.Occurrence` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_occurrence" not in self._inner_api_calls: - self._inner_api_calls[ - "update_occurrence" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_occurrence, - default_retry=self._method_configs["UpdateOccurrence"].retry, - default_timeout=self._method_configs["UpdateOccurrence"].timeout, - client_info=self._client_info, - ) - - request = grafeas_pb2.UpdateOccurrenceRequest( - name=name, occurrence=occurrence, update_mask=update_mask, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_occurrence"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_occurrence_note( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets the note attached to the specified occurrence. Consumer projects can - use this method to get a note that belongs to a provider project. - - Example: - >>> from grafeas import grafeas_v1 - >>> from grafeas.grafeas_v1.gapic.transports import grafeas_grpc_transport - >>> - >>> address = "[SERVICE_ADDRESS]" - >>> scopes = ("[SCOPE]") - >>> transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - >>> client = grafeas_v1.GrafeasClient(transport) - >>> - >>> name = client.occurrence_path('[PROJECT]', '[OCCURRENCE]') - >>> - >>> response = client.get_occurrence_note(name) - - Args: - name (str): The name of the occurrence in the form of - ``projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~grafeas.grafeas_v1.types.Note` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_occurrence_note" not in self._inner_api_calls: - self._inner_api_calls[ - "get_occurrence_note" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_occurrence_note, - default_retry=self._method_configs["GetOccurrenceNote"].retry, - default_timeout=self._method_configs["GetOccurrenceNote"].timeout, - client_info=self._client_info, - ) - - request = grafeas_pb2.GetOccurrenceNoteRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_occurrence_note"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_note( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets the specified note. - - Example: - >>> from grafeas import grafeas_v1 - >>> from grafeas.grafeas_v1.gapic.transports import grafeas_grpc_transport - >>> - >>> address = "[SERVICE_ADDRESS]" - >>> scopes = ("[SCOPE]") - >>> transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - >>> client = grafeas_v1.GrafeasClient(transport) - >>> - >>> name = client.note_path('[PROJECT]', '[NOTE]') - >>> - >>> response = client.get_note(name) - - Args: - name (str): The name of the note in the form of - ``projects/[PROVIDER_ID]/notes/[NOTE_ID]``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~grafeas.grafeas_v1.types.Note` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_note" not in self._inner_api_calls: - self._inner_api_calls[ - "get_note" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_note, - default_retry=self._method_configs["GetNote"].retry, - default_timeout=self._method_configs["GetNote"].timeout, - client_info=self._client_info, - ) - - request = grafeas_pb2.GetNoteRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_note"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_notes( - self, - parent, - filter_=None, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists notes for the specified project. - - Example: - >>> from grafeas import grafeas_v1 - >>> from grafeas.grafeas_v1.gapic.transports import grafeas_grpc_transport - >>> - >>> address = "[SERVICE_ADDRESS]" - >>> scopes = ("[SCOPE]") - >>> transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - >>> client = grafeas_v1.GrafeasClient(transport) - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_notes(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_notes(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): The name of the project to list notes for in the form of - ``projects/[PROJECT_ID]``. - filter_ (str): The filter expression. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~grafeas.grafeas_v1.types.Note` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_notes" not in self._inner_api_calls: - self._inner_api_calls[ - "list_notes" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_notes, - default_retry=self._method_configs["ListNotes"].retry, - default_timeout=self._method_configs["ListNotes"].timeout, - client_info=self._client_info, - ) - - request = grafeas_pb2.ListNotesRequest( - parent=parent, filter=filter_, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_notes"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="notes", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def delete_note( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes the specified note. - - Example: - >>> from grafeas import grafeas_v1 - >>> from grafeas.grafeas_v1.gapic.transports import grafeas_grpc_transport - >>> - >>> address = "[SERVICE_ADDRESS]" - >>> scopes = ("[SCOPE]") - >>> transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - >>> client = grafeas_v1.GrafeasClient(transport) - >>> - >>> name = client.note_path('[PROJECT]', '[NOTE]') - >>> - >>> client.delete_note(name) - - Args: - name (str): The name of the note in the form of - ``projects/[PROVIDER_ID]/notes/[NOTE_ID]``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_note" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_note" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_note, - default_retry=self._method_configs["DeleteNote"].retry, - default_timeout=self._method_configs["DeleteNote"].timeout, - client_info=self._client_info, - ) - - request = grafeas_pb2.DeleteNoteRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_note"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def create_note( - self, - parent, - note_id, - note, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a new note. - - Example: - >>> from grafeas import grafeas_v1 - >>> from grafeas.grafeas_v1.gapic.transports import grafeas_grpc_transport - >>> - >>> address = "[SERVICE_ADDRESS]" - >>> scopes = ("[SCOPE]") - >>> transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - >>> client = grafeas_v1.GrafeasClient(transport) - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # TODO: Initialize `note_id`: - >>> note_id = '' - >>> - >>> # TODO: Initialize `note`: - >>> note = {} - >>> - >>> response = client.create_note(parent, note_id, note) - - Args: - parent (str): The name of the project in the form of ``projects/[PROJECT_ID]``, under - which the note is to be created. - note_id (str): The ID to use for this note. - note (Union[dict, ~grafeas.grafeas_v1.types.Note]): The note to create. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~grafeas.grafeas_v1.types.Note` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~grafeas.grafeas_v1.types.Note` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_note" not in self._inner_api_calls: - self._inner_api_calls[ - "create_note" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_note, - default_retry=self._method_configs["CreateNote"].retry, - default_timeout=self._method_configs["CreateNote"].timeout, - client_info=self._client_info, - ) - - request = grafeas_pb2.CreateNoteRequest( - parent=parent, note_id=note_id, note=note, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_note"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def batch_create_notes( - self, - parent, - notes, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates new notes in batch. - - Example: - >>> from grafeas import grafeas_v1 - >>> from grafeas.grafeas_v1.gapic.transports import grafeas_grpc_transport - >>> - >>> address = "[SERVICE_ADDRESS]" - >>> scopes = ("[SCOPE]") - >>> transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - >>> client = grafeas_v1.GrafeasClient(transport) - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # TODO: Initialize `notes`: - >>> notes = {} - >>> - >>> response = client.batch_create_notes(parent, notes) - - Args: - parent (str): The name of the project in the form of ``projects/[PROJECT_ID]``, under - which the notes are to be created. - notes (dict[str -> Union[dict, ~grafeas.grafeas_v1.types.Note]]): The notes to create. Max allowed length is 1000. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~grafeas.grafeas_v1.types.Note` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~grafeas.grafeas_v1.types.BatchCreateNotesResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "batch_create_notes" not in self._inner_api_calls: - self._inner_api_calls[ - "batch_create_notes" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.batch_create_notes, - default_retry=self._method_configs["BatchCreateNotes"].retry, - default_timeout=self._method_configs["BatchCreateNotes"].timeout, - client_info=self._client_info, - ) - - request = grafeas_pb2.BatchCreateNotesRequest(parent=parent, notes=notes,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["batch_create_notes"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def update_note( - self, - name, - note, - update_mask=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates the specified note. - - Example: - >>> from grafeas import grafeas_v1 - >>> from grafeas.grafeas_v1.gapic.transports import grafeas_grpc_transport - >>> - >>> address = "[SERVICE_ADDRESS]" - >>> scopes = ("[SCOPE]") - >>> transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - >>> client = grafeas_v1.GrafeasClient(transport) - >>> - >>> name = client.note_path('[PROJECT]', '[NOTE]') - >>> - >>> # TODO: Initialize `note`: - >>> note = {} - >>> - >>> response = client.update_note(name, note) - - Args: - name (str): The name of the note in the form of - ``projects/[PROVIDER_ID]/notes/[NOTE_ID]``. - note (Union[dict, ~grafeas.grafeas_v1.types.Note]): The updated note. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~grafeas.grafeas_v1.types.Note` - update_mask (Union[dict, ~grafeas.grafeas_v1.types.FieldMask]): The fields to update. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~grafeas.grafeas_v1.types.FieldMask` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~grafeas.grafeas_v1.types.Note` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_note" not in self._inner_api_calls: - self._inner_api_calls[ - "update_note" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_note, - default_retry=self._method_configs["UpdateNote"].retry, - default_timeout=self._method_configs["UpdateNote"].timeout, - client_info=self._client_info, - ) - - request = grafeas_pb2.UpdateNoteRequest( - name=name, note=note, update_mask=update_mask, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_note"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_note_occurrences( - self, - name, - filter_=None, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists occurrences referencing the specified note. Provider projects can use - this method to get all occurrences across consumer projects referencing the - specified note. - - Example: - >>> from grafeas import grafeas_v1 - >>> from grafeas.grafeas_v1.gapic.transports import grafeas_grpc_transport - >>> - >>> address = "[SERVICE_ADDRESS]" - >>> scopes = ("[SCOPE]") - >>> transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - >>> client = grafeas_v1.GrafeasClient(transport) - >>> - >>> name = client.note_path('[PROJECT]', '[NOTE]') - >>> - >>> # Iterate over all results - >>> for element in client.list_note_occurrences(name): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_note_occurrences(name).pages: - ... for element in page: - ... # process element - ... pass - - Args: - name (str): The name of the note to list occurrences for in the form of - ``projects/[PROVIDER_ID]/notes/[NOTE_ID]``. - filter_ (str): The filter expression. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~grafeas.grafeas_v1.types.Occurrence` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_note_occurrences" not in self._inner_api_calls: - self._inner_api_calls[ - "list_note_occurrences" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_note_occurrences, - default_retry=self._method_configs["ListNoteOccurrences"].retry, - default_timeout=self._method_configs["ListNoteOccurrences"].timeout, - client_info=self._client_info, - ) - - request = grafeas_pb2.ListNoteOccurrencesRequest( - name=name, filter=filter_, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_note_occurrences"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="occurrences", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator diff --git a/grafeas/grafeas/grafeas_v1/gapic/grafeas_client_config.py b/grafeas/grafeas/grafeas_v1/gapic/grafeas_client_config.py deleted file mode 100644 index f899bf6ddd4b..000000000000 --- a/grafeas/grafeas/grafeas_v1/gapic/grafeas_client_config.py +++ /dev/null @@ -1,93 +0,0 @@ -config = { - "interfaces": { - "grafeas.v1.Grafeas": { - "retry_codes": { - "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "non_idempotent": [], - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 20000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 20000, - "total_timeout_millis": 600000, - } - }, - "methods": { - "GetOccurrence": { - "timeout_millis": 30000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListOccurrences": { - "timeout_millis": 30000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "DeleteOccurrence": { - "timeout_millis": 30000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "CreateOccurrence": { - "timeout_millis": 30000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "BatchCreateOccurrences": { - "timeout_millis": 30000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "UpdateOccurrence": { - "timeout_millis": 30000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "GetOccurrenceNote": { - "timeout_millis": 30000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "GetNote": { - "timeout_millis": 30000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListNotes": { - "timeout_millis": 30000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "DeleteNote": { - "timeout_millis": 30000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "CreateNote": { - "timeout_millis": 30000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "BatchCreateNotes": { - "timeout_millis": 30000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "UpdateNote": { - "timeout_millis": 30000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "ListNoteOccurrences": { - "timeout_millis": 30000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - }, - } - } -} diff --git a/grafeas/grafeas/grafeas_v1/gapic/transports/__init__.py b/grafeas/grafeas/grafeas_v1/gapic/transports/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/grafeas/grafeas/grafeas_v1/gapic/transports/grafeas_grpc_transport.py b/grafeas/grafeas/grafeas_v1/gapic/transports/grafeas_grpc_transport.py deleted file mode 100644 index 8a630ead26aa..000000000000 --- a/grafeas/grafeas/grafeas_v1/gapic/transports/grafeas_grpc_transport.py +++ /dev/null @@ -1,291 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers - -from grafeas.grafeas_v1.proto import grafeas_pb2_grpc - - -class GrafeasGrpcTransport(object): - """gRPC transport class providing stubs for - grafeas.v1 Grafeas API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - def __init__(self, address, scopes, channel=None, credentials=None): - """Instantiate the transport class. - - Args: - address (str): The address where the service is hosted. - scopes (Sequence[str]): The scopes needed to make gRPC calls. - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive.", - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address, - scopes, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "grafeas_stub": grafeas_pb2_grpc.GrafeasStub(channel), - } - - @classmethod - def create_channel(cls, address, scopes, credentials=None, **kwargs): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - scopes (Sequence[str]): The scopes needed to make gRPC calls. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=scopes, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def get_occurrence(self): - """Return the gRPC stub for :meth:`GrafeasClient.get_occurrence`. - - Gets the specified occurrence. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["grafeas_stub"].GetOccurrence - - @property - def list_occurrences(self): - """Return the gRPC stub for :meth:`GrafeasClient.list_occurrences`. - - Lists occurrences for the specified project. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["grafeas_stub"].ListOccurrences - - @property - def delete_occurrence(self): - """Return the gRPC stub for :meth:`GrafeasClient.delete_occurrence`. - - Deletes the specified occurrence. For example, use this method to delete an - occurrence when the occurrence is no longer applicable for the given - resource. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["grafeas_stub"].DeleteOccurrence - - @property - def create_occurrence(self): - """Return the gRPC stub for :meth:`GrafeasClient.create_occurrence`. - - Creates a new occurrence. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["grafeas_stub"].CreateOccurrence - - @property - def batch_create_occurrences(self): - """Return the gRPC stub for :meth:`GrafeasClient.batch_create_occurrences`. - - Creates new occurrences in batch. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["grafeas_stub"].BatchCreateOccurrences - - @property - def update_occurrence(self): - """Return the gRPC stub for :meth:`GrafeasClient.update_occurrence`. - - Updates the specified occurrence. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["grafeas_stub"].UpdateOccurrence - - @property - def get_occurrence_note(self): - """Return the gRPC stub for :meth:`GrafeasClient.get_occurrence_note`. - - Gets the note attached to the specified occurrence. Consumer projects can - use this method to get a note that belongs to a provider project. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["grafeas_stub"].GetOccurrenceNote - - @property - def get_note(self): - """Return the gRPC stub for :meth:`GrafeasClient.get_note`. - - Gets the specified note. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["grafeas_stub"].GetNote - - @property - def list_notes(self): - """Return the gRPC stub for :meth:`GrafeasClient.list_notes`. - - Lists notes for the specified project. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["grafeas_stub"].ListNotes - - @property - def delete_note(self): - """Return the gRPC stub for :meth:`GrafeasClient.delete_note`. - - Deletes the specified note. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["grafeas_stub"].DeleteNote - - @property - def create_note(self): - """Return the gRPC stub for :meth:`GrafeasClient.create_note`. - - Creates a new note. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["grafeas_stub"].CreateNote - - @property - def batch_create_notes(self): - """Return the gRPC stub for :meth:`GrafeasClient.batch_create_notes`. - - Creates new notes in batch. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["grafeas_stub"].BatchCreateNotes - - @property - def update_note(self): - """Return the gRPC stub for :meth:`GrafeasClient.update_note`. - - Updates the specified note. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["grafeas_stub"].UpdateNote - - @property - def list_note_occurrences(self): - """Return the gRPC stub for :meth:`GrafeasClient.list_note_occurrences`. - - Lists occurrences referencing the specified note. Provider projects can use - this method to get all occurrences across consumer projects referencing the - specified note. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["grafeas_stub"].ListNoteOccurrences diff --git a/grafeas/grafeas/grafeas_v1/proto/__init__.py b/grafeas/grafeas/grafeas_v1/proto/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/grafeas/grafeas/grafeas_v1/proto/attestation.proto b/grafeas/grafeas/grafeas_v1/proto/attestation.proto deleted file mode 100644 index 61423d3eb29e..000000000000 --- a/grafeas/grafeas/grafeas_v1/proto/attestation.proto +++ /dev/null @@ -1,73 +0,0 @@ -// Copyright 2019 The Grafeas Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package grafeas.v1; - -import "grafeas/v1/common.proto"; - -option go_package = "google.golang.org/genproto/googleapis/grafeas/v1;grafeas"; -option java_multiple_files = true; -option java_package = "io.grafeas.v1"; -option objc_class_prefix = "GRA"; - -// An attestation wrapper with a PGP-compatible signature. This message only -// supports `ATTACHED` signatures, where the payload that is signed is included -// alongside the signature itself in the same file. - -// Note kind that represents a logical attestation "role" or "authority". For -// example, an organization might have one `Authority` for "QA" and one for -// "build". This note is intended to act strictly as a grouping mechanism for -// the attached occurrences (Attestations). This grouping mechanism also -// provides a security boundary, since IAM ACLs gate the ability for a principle -// to attach an occurrence to a given note. It also provides a single point of -// lookup to find all attached attestation occurrences, even if they don't all -// live in the same project. -message AttestationNote { - // This submessage provides human-readable hints about the purpose of the - // authority. Because the name of a note acts as its resource reference, it is - // important to disambiguate the canonical name of the Note (which might be a - // UUID for security purposes) from "readable" names more suitable for debug - // output. Note that these hints should not be used to look up authorities in - // security sensitive contexts, such as when looking up attestations to - // verify. - message Hint { - // Required. The human readable name of this attestation authority, for - // example "qa". - string human_readable_name = 1; - } - - // Hint hints at the purpose of the attestation authority. - Hint hint = 1; -} - -// Occurrence that represents a single "attestation". The authenticity of an -// attestation can be verified using the attached signature. If the verifier -// trusts the public key of the signer, then verifying the signature is -// sufficient to establish trust. In this circumstance, the authority to which -// this attestation is attached is primarily useful for lookup (how to find -// this attestation if you already know the authority and artifact to be -// verified) and intent (for which authority this attestation was intended to -// sign. -message AttestationOccurrence { - // Required. The serialized payload that is verified by one or more - // `signatures`. - bytes serialized_payload = 1; - // One or more signatures over `serialized_payload`. Verifier implementations - // should consider this attestation message verified if at least one - // `signature` verifies `serialized_payload`. See `Signature` in common.proto - // for more details on signature structure and verification. - repeated Signature signatures = 2; -} diff --git a/grafeas/grafeas/grafeas_v1/proto/attestation_pb2.py b/grafeas/grafeas/grafeas_v1/proto/attestation_pb2.py deleted file mode 100644 index e390883e1707..000000000000 --- a/grafeas/grafeas/grafeas_v1/proto/attestation_pb2.py +++ /dev/null @@ -1,260 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: grafeas_v1/proto/attestation.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from grafeas.grafeas_v1.proto import common_pb2 as grafeas__v1_dot_proto_dot_common__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="grafeas_v1/proto/attestation.proto", - package="grafeas.v1", - syntax="proto3", - serialized_options=_b( - "\n\rio.grafeas.v1P\001Z8google.golang.org/genproto/googleapis/grafeas/v1;grafeas\242\002\003GRA" - ), - serialized_pb=_b( - '\n"grafeas_v1/proto/attestation.proto\x12\ngrafeas.v1\x1a\x1dgrafeas_v1/proto/common.proto"f\n\x0f\x41ttestationNote\x12.\n\x04hint\x18\x01 \x01(\x0b\x32 .grafeas.v1.AttestationNote.Hint\x1a#\n\x04Hint\x12\x1b\n\x13human_readable_name\x18\x01 \x01(\t"^\n\x15\x41ttestationOccurrence\x12\x1a\n\x12serialized_payload\x18\x01 \x01(\x0c\x12)\n\nsignatures\x18\x02 \x03(\x0b\x32\x15.grafeas.v1.SignatureBQ\n\rio.grafeas.v1P\x01Z8google.golang.org/genproto/googleapis/grafeas/v1;grafeas\xa2\x02\x03GRAb\x06proto3' - ), - dependencies=[grafeas__v1_dot_proto_dot_common__pb2.DESCRIPTOR,], -) - - -_ATTESTATIONNOTE_HINT = _descriptor.Descriptor( - name="Hint", - full_name="grafeas.v1.AttestationNote.Hint", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="human_readable_name", - full_name="grafeas.v1.AttestationNote.Hint.human_readable_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=148, - serialized_end=183, -) - -_ATTESTATIONNOTE = _descriptor.Descriptor( - name="AttestationNote", - full_name="grafeas.v1.AttestationNote", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="hint", - full_name="grafeas.v1.AttestationNote.hint", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_ATTESTATIONNOTE_HINT,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=81, - serialized_end=183, -) - - -_ATTESTATIONOCCURRENCE = _descriptor.Descriptor( - name="AttestationOccurrence", - full_name="grafeas.v1.AttestationOccurrence", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="serialized_payload", - full_name="grafeas.v1.AttestationOccurrence.serialized_payload", - index=0, - number=1, - type=12, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b(""), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="signatures", - full_name="grafeas.v1.AttestationOccurrence.signatures", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=185, - serialized_end=279, -) - -_ATTESTATIONNOTE_HINT.containing_type = _ATTESTATIONNOTE -_ATTESTATIONNOTE.fields_by_name["hint"].message_type = _ATTESTATIONNOTE_HINT -_ATTESTATIONOCCURRENCE.fields_by_name[ - "signatures" -].message_type = grafeas__v1_dot_proto_dot_common__pb2._SIGNATURE -DESCRIPTOR.message_types_by_name["AttestationNote"] = _ATTESTATIONNOTE -DESCRIPTOR.message_types_by_name["AttestationOccurrence"] = _ATTESTATIONOCCURRENCE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -AttestationNote = _reflection.GeneratedProtocolMessageType( - "AttestationNote", - (_message.Message,), - dict( - Hint=_reflection.GeneratedProtocolMessageType( - "Hint", - (_message.Message,), - dict( - DESCRIPTOR=_ATTESTATIONNOTE_HINT, - __module__="grafeas_v1.proto.attestation_pb2", - __doc__="""This submessage provides human-readable hints about the - purpose of the authority. Because the name of a note acts as its - resource reference, it is important to disambiguate the canonical name - of the Note (which might be a UUID for security purposes) from - "readable" names more suitable for debug output. Note that these hints - should not be used to look up authorities in security sensitive - contexts, such as when looking up attestations to verify. - - - Attributes: - human_readable_name: - Required. The human readable name of this attestation - authority, for example "qa". - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.AttestationNote.Hint) - ), - ), - DESCRIPTOR=_ATTESTATIONNOTE, - __module__="grafeas_v1.proto.attestation_pb2", - __doc__="""Note kind that represents a logical attestation "role" or "authority". - For example, an organization might have one ``Authority`` for "QA" and - one for "build". This note is intended to act strictly as a grouping - mechanism for the attached occurrences (Attestations). This grouping - mechanism also provides a security boundary, since IAM ACLs gate the - ability for a principle to attach an occurrence to a given note. It also - provides a single point of lookup to find all attached attestation - occurrences, even if they don't all live in the same project. - - - Attributes: - hint: - Hint hints at the purpose of the attestation authority. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.AttestationNote) - ), -) -_sym_db.RegisterMessage(AttestationNote) -_sym_db.RegisterMessage(AttestationNote.Hint) - -AttestationOccurrence = _reflection.GeneratedProtocolMessageType( - "AttestationOccurrence", - (_message.Message,), - dict( - DESCRIPTOR=_ATTESTATIONOCCURRENCE, - __module__="grafeas_v1.proto.attestation_pb2", - __doc__="""Occurrence that represents a single "attestation". The - authenticity of an attestation can be verified using the attached - signature. If the verifier trusts the public key of the signer, then - verifying the signature is sufficient to establish trust. In this - circumstance, the authority to which this attestation is attached is - primarily useful for lookup (how to find this attestation if you already - know the authority and artifact to be verified) and intent (for which - authority this attestation was intended to sign. - - - Attributes: - serialized_payload: - Required. The serialized payload that is verified by one or - more ``signatures``. - signatures: - One or more signatures over ``serialized_payload``. Verifier - implementations should consider this attestation message - verified if at least one ``signature`` verifies - ``serialized_payload``. See ``Signature`` in common.proto for - more details on signature structure and verification. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.AttestationOccurrence) - ), -) -_sym_db.RegisterMessage(AttestationOccurrence) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/grafeas/grafeas/grafeas_v1/proto/attestation_pb2_grpc.py b/grafeas/grafeas/grafeas_v1/proto/attestation_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/grafeas/grafeas/grafeas_v1/proto/attestation_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/grafeas/grafeas/grafeas_v1/proto/build.proto b/grafeas/grafeas/grafeas_v1/proto/build.proto deleted file mode 100644 index c0e9c7566540..000000000000 --- a/grafeas/grafeas/grafeas_v1/proto/build.proto +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright 2019 The Grafeas Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package grafeas.v1; - -import "grafeas/v1/provenance.proto"; - -option go_package = "google.golang.org/genproto/googleapis/grafeas/v1;grafeas"; -option java_multiple_files = true; -option java_package = "io.grafeas.v1"; -option objc_class_prefix = "GRA"; - -// Note holding the version of the provider's builder and the signature of the -// provenance message in the build details occurrence. -message BuildNote { - // Required. Immutable. Version of the builder which produced this build. - string builder_version = 1; -} - -// Details of a build occurrence. -message BuildOccurrence { - // Required. The actual provenance for the build. - grafeas.v1.BuildProvenance provenance = 1; - - // Serialized JSON representation of the provenance, used in generating the - // build signature in the corresponding build note. After verifying the - // signature, `provenance_bytes` can be unmarshalled and compared to the - // provenance to confirm that it is unchanged. A base64-encoded string - // representation of the provenance bytes is used for the signature in order - // to interoperate with openssl which expects this format for signature - // verification. - // - // The serialized form is captured both to avoid ambiguity in how the - // provenance is marshalled to json as well to prevent incompatibilities with - // future changes. - string provenance_bytes = 2; -} diff --git a/grafeas/grafeas/grafeas_v1/proto/build_pb2.py b/grafeas/grafeas/grafeas_v1/proto/build_pb2.py deleted file mode 100644 index da4fb11156b3..000000000000 --- a/grafeas/grafeas/grafeas_v1/proto/build_pb2.py +++ /dev/null @@ -1,190 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: grafeas_v1/proto/build.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from grafeas.grafeas_v1.proto import ( - provenance_pb2 as grafeas__v1_dot_proto_dot_provenance__pb2, -) - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="grafeas_v1/proto/build.proto", - package="grafeas.v1", - syntax="proto3", - serialized_options=_b( - "\n\rio.grafeas.v1P\001Z8google.golang.org/genproto/googleapis/grafeas/v1;grafeas\242\002\003GRA" - ), - serialized_pb=_b( - '\n\x1cgrafeas_v1/proto/build.proto\x12\ngrafeas.v1\x1a!grafeas_v1/proto/provenance.proto"$\n\tBuildNote\x12\x17\n\x0f\x62uilder_version\x18\x01 \x01(\t"\\\n\x0f\x42uildOccurrence\x12/\n\nprovenance\x18\x01 \x01(\x0b\x32\x1b.grafeas.v1.BuildProvenance\x12\x18\n\x10provenance_bytes\x18\x02 \x01(\tBQ\n\rio.grafeas.v1P\x01Z8google.golang.org/genproto/googleapis/grafeas/v1;grafeas\xa2\x02\x03GRAb\x06proto3' - ), - dependencies=[grafeas__v1_dot_proto_dot_provenance__pb2.DESCRIPTOR,], -) - - -_BUILDNOTE = _descriptor.Descriptor( - name="BuildNote", - full_name="grafeas.v1.BuildNote", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="builder_version", - full_name="grafeas.v1.BuildNote.builder_version", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=79, - serialized_end=115, -) - - -_BUILDOCCURRENCE = _descriptor.Descriptor( - name="BuildOccurrence", - full_name="grafeas.v1.BuildOccurrence", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="provenance", - full_name="grafeas.v1.BuildOccurrence.provenance", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="provenance_bytes", - full_name="grafeas.v1.BuildOccurrence.provenance_bytes", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=117, - serialized_end=209, -) - -_BUILDOCCURRENCE.fields_by_name[ - "provenance" -].message_type = grafeas__v1_dot_proto_dot_provenance__pb2._BUILDPROVENANCE -DESCRIPTOR.message_types_by_name["BuildNote"] = _BUILDNOTE -DESCRIPTOR.message_types_by_name["BuildOccurrence"] = _BUILDOCCURRENCE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -BuildNote = _reflection.GeneratedProtocolMessageType( - "BuildNote", - (_message.Message,), - dict( - DESCRIPTOR=_BUILDNOTE, - __module__="grafeas_v1.proto.build_pb2", - __doc__="""Note holding the version of the provider's builder and the - signature of the provenance message in the build details occurrence. - - - Attributes: - builder_version: - Required. Immutable. Version of the builder which produced - this build. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.BuildNote) - ), -) -_sym_db.RegisterMessage(BuildNote) - -BuildOccurrence = _reflection.GeneratedProtocolMessageType( - "BuildOccurrence", - (_message.Message,), - dict( - DESCRIPTOR=_BUILDOCCURRENCE, - __module__="grafeas_v1.proto.build_pb2", - __doc__="""Details of a build occurrence. - - - Attributes: - provenance: - Required. The actual provenance for the build. - provenance_bytes: - Serialized JSON representation of the provenance, used in - generating the build signature in the corresponding build - note. After verifying the signature, ``provenance_bytes`` can - be unmarshalled and compared to the provenance to confirm that - it is unchanged. A base64-encoded string representation of the - provenance bytes is used for the signature in order to - interoperate with openssl which expects this format for - signature verification. The serialized form is captured both - to avoid ambiguity in how the provenance is marshalled to json - as well to prevent incompatibilities with future changes. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.BuildOccurrence) - ), -) -_sym_db.RegisterMessage(BuildOccurrence) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/grafeas/grafeas/grafeas_v1/proto/build_pb2_grpc.py b/grafeas/grafeas/grafeas_v1/proto/build_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/grafeas/grafeas/grafeas_v1/proto/build_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/grafeas/grafeas/grafeas_v1/proto/common.proto b/grafeas/grafeas/grafeas_v1/proto/common.proto deleted file mode 100644 index db780bb629c3..000000000000 --- a/grafeas/grafeas/grafeas_v1/proto/common.proto +++ /dev/null @@ -1,103 +0,0 @@ -// Copyright 2019 The Grafeas Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package grafeas.v1; - -option go_package = "google.golang.org/genproto/googleapis/grafeas/v1;grafeas"; -option java_multiple_files = true; -option java_package = "io.grafeas.v1"; -option objc_class_prefix = "GRA"; - -// Kind represents the kinds of notes supported. -enum NoteKind { - // Unknown. - NOTE_KIND_UNSPECIFIED = 0; - // The note and occurrence represent a package vulnerability. - VULNERABILITY = 1; - // The note and occurrence assert build provenance. - BUILD = 2; - // This represents an image basis relationship. - IMAGE = 3; - // This represents a package installed via a package manager. - PACKAGE = 4; - // The note and occurrence track deployment events. - DEPLOYMENT = 5; - // The note and occurrence track the initial discovery status of a resource. - DISCOVERY = 6; - // This represents a logical "role" that can attest to artifacts. - ATTESTATION = 7; - // This represents an available package upgrade. - UPGRADE = 8; -} - -// Metadata for any related URL information. -message RelatedUrl { - // Specific URL associated with the resource. - string url = 1; - // Label to describe usage of the URL. - string label = 2; -} - -// Verifiers (e.g. Kritis implementations) MUST verify signatures -// with respect to the trust anchors defined in policy (e.g. a Kritis policy). -// Typically this means that the verifier has been configured with a map from -// `public_key_id` to public key material (and any required parameters, e.g. -// signing algorithm). -// -// In particular, verification implementations MUST NOT treat the signature -// `public_key_id` as anything more than a key lookup hint. The `public_key_id` -// DOES NOT validate or authenticate a public key; it only provides a mechanism -// for quickly selecting a public key ALREADY CONFIGURED on the verifier through -// a trusted channel. Verification implementations MUST reject signatures in any -// of the following circumstances: -// * The `public_key_id` is not recognized by the verifier. -// * The public key that `public_key_id` refers to does not verify the -// signature with respect to the payload. -// -// The `signature` contents SHOULD NOT be "attached" (where the payload is -// included with the serialized `signature` bytes). Verifiers MUST ignore any -// "attached" payload and only verify signatures with respect to explicitly -// provided payload (e.g. a `payload` field on the proto message that holds -// this Signature, or the canonical serialization of the proto message that -// holds this signature). -message Signature { - // The content of the signature, an opaque bytestring. - // The payload that this signature verifies MUST be unambiguously provided - // with the Signature during verification. A wrapper message might provide - // the payload explicitly. Alternatively, a message might have a canonical - // serialization that can always be unambiguously computed to derive the - // payload. - bytes signature = 1; - - // The identifier for the public key that verifies this signature. - // * The `public_key_id` is required. - // * The `public_key_id` MUST be an RFC3986 conformant URI. - // * When possible, the `public_key_id` SHOULD be an immutable reference, - // such as a cryptographic digest. - // - // Examples of valid `public_key_id`s: - // - // OpenPGP V4 public key fingerprint: - // * "openpgp4fpr:74FAF3B861BDA0870C7B6DEF607E48D2A663AEEA" - // See https://www.iana.org/assignments/uri-schemes/prov/openpgp4fpr for more - // details on this scheme. - // - // RFC6920 digest-named SubjectPublicKeyInfo (digest of the DER - // serialization): - // * "ni:///sha-256;cD9o9Cq6LG3jD0iKXqEi_vdjJGecm_iXkbqVoScViaU" - // * "nih:///sha-256;703f68f42aba2c6de30f488a5ea122fef76324679c9bf89791ba95a1271589a5" - string public_key_id = 2; -} diff --git a/grafeas/grafeas/grafeas_v1/proto/common_pb2.py b/grafeas/grafeas/grafeas_v1/proto/common_pb2.py deleted file mode 100644 index 78a377e57a9c..000000000000 --- a/grafeas/grafeas/grafeas_v1/proto/common_pb2.py +++ /dev/null @@ -1,286 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: grafeas_v1/proto/common.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="grafeas_v1/proto/common.proto", - package="grafeas.v1", - syntax="proto3", - serialized_options=_b( - "\n\rio.grafeas.v1P\001Z8google.golang.org/genproto/googleapis/grafeas/v1;grafeas\242\002\003GRA" - ), - serialized_pb=_b( - '\n\x1dgrafeas_v1/proto/common.proto\x12\ngrafeas.v1"(\n\nRelatedUrl\x12\x0b\n\x03url\x18\x01 \x01(\t\x12\r\n\x05label\x18\x02 \x01(\t"5\n\tSignature\x12\x11\n\tsignature\x18\x01 \x01(\x0c\x12\x15\n\rpublic_key_id\x18\x02 \x01(\t*\x98\x01\n\x08NoteKind\x12\x19\n\x15NOTE_KIND_UNSPECIFIED\x10\x00\x12\x11\n\rVULNERABILITY\x10\x01\x12\t\n\x05\x42UILD\x10\x02\x12\t\n\x05IMAGE\x10\x03\x12\x0b\n\x07PACKAGE\x10\x04\x12\x0e\n\nDEPLOYMENT\x10\x05\x12\r\n\tDISCOVERY\x10\x06\x12\x0f\n\x0b\x41TTESTATION\x10\x07\x12\x0b\n\x07UPGRADE\x10\x08\x42Q\n\rio.grafeas.v1P\x01Z8google.golang.org/genproto/googleapis/grafeas/v1;grafeas\xa2\x02\x03GRAb\x06proto3' - ), -) - -_NOTEKIND = _descriptor.EnumDescriptor( - name="NoteKind", - full_name="grafeas.v1.NoteKind", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="NOTE_KIND_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="VULNERABILITY", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="BUILD", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="IMAGE", index=3, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="PACKAGE", index=4, number=4, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="DEPLOYMENT", index=5, number=5, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="DISCOVERY", index=6, number=6, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ATTESTATION", index=7, number=7, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="UPGRADE", index=8, number=8, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=143, - serialized_end=295, -) -_sym_db.RegisterEnumDescriptor(_NOTEKIND) - -NoteKind = enum_type_wrapper.EnumTypeWrapper(_NOTEKIND) -NOTE_KIND_UNSPECIFIED = 0 -VULNERABILITY = 1 -BUILD = 2 -IMAGE = 3 -PACKAGE = 4 -DEPLOYMENT = 5 -DISCOVERY = 6 -ATTESTATION = 7 -UPGRADE = 8 - - -_RELATEDURL = _descriptor.Descriptor( - name="RelatedUrl", - full_name="grafeas.v1.RelatedUrl", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="url", - full_name="grafeas.v1.RelatedUrl.url", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="label", - full_name="grafeas.v1.RelatedUrl.label", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=45, - serialized_end=85, -) - - -_SIGNATURE = _descriptor.Descriptor( - name="Signature", - full_name="grafeas.v1.Signature", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="signature", - full_name="grafeas.v1.Signature.signature", - index=0, - number=1, - type=12, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b(""), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="public_key_id", - full_name="grafeas.v1.Signature.public_key_id", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=87, - serialized_end=140, -) - -DESCRIPTOR.message_types_by_name["RelatedUrl"] = _RELATEDURL -DESCRIPTOR.message_types_by_name["Signature"] = _SIGNATURE -DESCRIPTOR.enum_types_by_name["NoteKind"] = _NOTEKIND -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -RelatedUrl = _reflection.GeneratedProtocolMessageType( - "RelatedUrl", - (_message.Message,), - dict( - DESCRIPTOR=_RELATEDURL, - __module__="grafeas_v1.proto.common_pb2", - __doc__="""Metadata for any related URL information. - - - Attributes: - url: - Specific URL associated with the resource. - label: - Label to describe usage of the URL. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.RelatedUrl) - ), -) -_sym_db.RegisterMessage(RelatedUrl) - -Signature = _reflection.GeneratedProtocolMessageType( - "Signature", - (_message.Message,), - dict( - DESCRIPTOR=_SIGNATURE, - __module__="grafeas_v1.proto.common_pb2", - __doc__="""Verifiers (e.g. Kritis implementations) MUST verify - signatures with respect to the trust anchors defined in policy (e.g. a - Kritis policy). Typically this means that the verifier has been - configured with a map from ``public_key_id`` to public key material (and - any required parameters, e.g. signing algorithm). - - In particular, verification implementations MUST NOT treat the signature - ``public_key_id`` as anything more than a key lookup hint. The - ``public_key_id`` DOES NOT validate or authenticate a public key; it - only provides a mechanism for quickly selecting a public key ALREADY - CONFIGURED on the verifier through a trusted channel. Verification - implementations MUST reject signatures in any of the following - circumstances: \* The ``public_key_id`` is not recognized by the - verifier. \* The public key that ``public_key_id`` refers to does not - verify the signature with respect to the payload. - - The ``signature`` contents SHOULD NOT be "attached" (where the payload - is included with the serialized ``signature`` bytes). Verifiers MUST - ignore any "attached" payload and only verify signatures with respect to - explicitly provided payload (e.g. a ``payload`` field on the proto - message that holds this Signature, or the canonical serialization of the - proto message that holds this signature). - - - Attributes: - signature: - The content of the signature, an opaque bytestring. The - payload that this signature verifies MUST be unambiguously - provided with the Signature during verification. A wrapper - message might provide the payload explicitly. Alternatively, a - message might have a canonical serialization that can always - be unambiguously computed to derive the payload. - public_key_id: - The identifier for the public key that verifies this - signature. \* The ``public_key_id`` is required. \* The - ``public_key_id`` MUST be an RFC3986 conformant URI. \* When - possible, the ``public_key_id`` SHOULD be an immutable - reference, such as a cryptographic digest. Examples of valid - ``public_key_id``\ s: OpenPGP V4 public key fingerprint: \* - "openpgp4fpr:74FAF3B861BDA0870C7B6DEF607E48D2A663AEEA" See - https://www.iana.org/assignments/uri-schemes/prov/openpgp4fpr - for more details on this scheme. RFC6920 digest-named - SubjectPublicKeyInfo (digest of the DER serialization): \* - "ni:///sha-256;cD9o9Cq6LG3jD0iKXqEi\_vdjJGecm\_iXkbqVoScViaU" - \* "nih:///sha-256;703f68f42aba2c6de30f488a5ea122fef76324679c9 - bf89791ba95a1271589a5" - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.Signature) - ), -) -_sym_db.RegisterMessage(Signature) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/grafeas/grafeas/grafeas_v1/proto/common_pb2_grpc.py b/grafeas/grafeas/grafeas_v1/proto/common_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/grafeas/grafeas/grafeas_v1/proto/common_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/grafeas/grafeas/grafeas_v1/proto/cvss.proto b/grafeas/grafeas/grafeas_v1/proto/cvss.proto deleted file mode 100644 index b41cd633d2a9..000000000000 --- a/grafeas/grafeas/grafeas_v1/proto/cvss.proto +++ /dev/null @@ -1,85 +0,0 @@ -// Copyright 2019 The Grafeas Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package grafeas.v1; - -option go_package = "google.golang.org/genproto/googleapis/grafeas/v1;grafeas"; -option java_multiple_files = true; -option java_package = "io.grafeas.v1"; -option objc_class_prefix = "GRA"; - -// Common Vulnerability Scoring System version 3. -// For details, see https://www.first.org/cvss/specification-document -message CVSSv3 { - // The base score is a function of the base metric scores. - float base_score = 1; - - float exploitability_score = 2; - - float impact_score = 3; - - // Base Metrics - // Represents the intrinsic characteristics of a vulnerability that are - // constant over time and across user environments. - AttackVector attack_vector = 5; - AttackComplexity attack_complexity = 6; - PrivilegesRequired privileges_required = 7; - UserInteraction user_interaction = 8; - Scope scope = 9; - Impact confidentiality_impact = 10; - Impact integrity_impact = 11; - Impact availability_impact = 12; - - enum AttackVector { - ATTACK_VECTOR_UNSPECIFIED = 0; - ATTACK_VECTOR_NETWORK = 1; - ATTACK_VECTOR_ADJACENT = 2; - ATTACK_VECTOR_LOCAL = 3; - ATTACK_VECTOR_PHYSICAL = 4; - } - - enum AttackComplexity { - ATTACK_COMPLEXITY_UNSPECIFIED = 0; - ATTACK_COMPLEXITY_LOW = 1; - ATTACK_COMPLEXITY_HIGH = 2; - } - - enum PrivilegesRequired { - PRIVILEGES_REQUIRED_UNSPECIFIED = 0; - PRIVILEGES_REQUIRED_NONE = 1; - PRIVILEGES_REQUIRED_LOW = 2; - PRIVILEGES_REQUIRED_HIGH = 3; - } - - enum UserInteraction { - USER_INTERACTION_UNSPECIFIED = 0; - USER_INTERACTION_NONE = 1; - USER_INTERACTION_REQUIRED = 2; - } - - enum Scope { - SCOPE_UNSPECIFIED = 0; - SCOPE_UNCHANGED = 1; - SCOPE_CHANGED = 2; - } - - enum Impact { - IMPACT_UNSPECIFIED = 0; - IMPACT_HIGH = 1; - IMPACT_LOW = 2; - IMPACT_NONE = 3; - } -} diff --git a/grafeas/grafeas/grafeas_v1/proto/cvss_pb2.py b/grafeas/grafeas/grafeas_v1/proto/cvss_pb2.py deleted file mode 100644 index 5bdfe1f19ba5..000000000000 --- a/grafeas/grafeas/grafeas_v1/proto/cvss_pb2.py +++ /dev/null @@ -1,521 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: grafeas_v1/proto/cvss.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="grafeas_v1/proto/cvss.proto", - package="grafeas.v1", - syntax="proto3", - serialized_options=_b( - "\n\rio.grafeas.v1P\001Z8google.golang.org/genproto/googleapis/grafeas/v1;grafeas\242\002\003GRA" - ), - serialized_pb=_b( - '\n\x1bgrafeas_v1/proto/cvss.proto\x12\ngrafeas.v1"\xc5\t\n\x06\x43VSSv3\x12\x12\n\nbase_score\x18\x01 \x01(\x02\x12\x1c\n\x14\x65xploitability_score\x18\x02 \x01(\x02\x12\x14\n\x0cimpact_score\x18\x03 \x01(\x02\x12\x36\n\rattack_vector\x18\x05 \x01(\x0e\x32\x1f.grafeas.v1.CVSSv3.AttackVector\x12>\n\x11\x61ttack_complexity\x18\x06 \x01(\x0e\x32#.grafeas.v1.CVSSv3.AttackComplexity\x12\x42\n\x13privileges_required\x18\x07 \x01(\x0e\x32%.grafeas.v1.CVSSv3.PrivilegesRequired\x12<\n\x10user_interaction\x18\x08 \x01(\x0e\x32".grafeas.v1.CVSSv3.UserInteraction\x12\'\n\x05scope\x18\t \x01(\x0e\x32\x18.grafeas.v1.CVSSv3.Scope\x12\x39\n\x16\x63onfidentiality_impact\x18\n \x01(\x0e\x32\x19.grafeas.v1.CVSSv3.Impact\x12\x33\n\x10integrity_impact\x18\x0b \x01(\x0e\x32\x19.grafeas.v1.CVSSv3.Impact\x12\x36\n\x13\x61vailability_impact\x18\x0c \x01(\x0e\x32\x19.grafeas.v1.CVSSv3.Impact"\x99\x01\n\x0c\x41ttackVector\x12\x1d\n\x19\x41TTACK_VECTOR_UNSPECIFIED\x10\x00\x12\x19\n\x15\x41TTACK_VECTOR_NETWORK\x10\x01\x12\x1a\n\x16\x41TTACK_VECTOR_ADJACENT\x10\x02\x12\x17\n\x13\x41TTACK_VECTOR_LOCAL\x10\x03\x12\x1a\n\x16\x41TTACK_VECTOR_PHYSICAL\x10\x04"l\n\x10\x41ttackComplexity\x12!\n\x1d\x41TTACK_COMPLEXITY_UNSPECIFIED\x10\x00\x12\x19\n\x15\x41TTACK_COMPLEXITY_LOW\x10\x01\x12\x1a\n\x16\x41TTACK_COMPLEXITY_HIGH\x10\x02"\x92\x01\n\x12PrivilegesRequired\x12#\n\x1fPRIVILEGES_REQUIRED_UNSPECIFIED\x10\x00\x12\x1c\n\x18PRIVILEGES_REQUIRED_NONE\x10\x01\x12\x1b\n\x17PRIVILEGES_REQUIRED_LOW\x10\x02\x12\x1c\n\x18PRIVILEGES_REQUIRED_HIGH\x10\x03"m\n\x0fUserInteraction\x12 \n\x1cUSER_INTERACTION_UNSPECIFIED\x10\x00\x12\x19\n\x15USER_INTERACTION_NONE\x10\x01\x12\x1d\n\x19USER_INTERACTION_REQUIRED\x10\x02"F\n\x05Scope\x12\x15\n\x11SCOPE_UNSPECIFIED\x10\x00\x12\x13\n\x0fSCOPE_UNCHANGED\x10\x01\x12\x11\n\rSCOPE_CHANGED\x10\x02"R\n\x06Impact\x12\x16\n\x12IMPACT_UNSPECIFIED\x10\x00\x12\x0f\n\x0bIMPACT_HIGH\x10\x01\x12\x0e\n\nIMPACT_LOW\x10\x02\x12\x0f\n\x0bIMPACT_NONE\x10\x03\x42Q\n\rio.grafeas.v1P\x01Z8google.golang.org/genproto/googleapis/grafeas/v1;grafeas\xa2\x02\x03GRAb\x06proto3' - ), -) - - -_CVSSV3_ATTACKVECTOR = _descriptor.EnumDescriptor( - name="AttackVector", - full_name="grafeas.v1.CVSSv3.AttackVector", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="ATTACK_VECTOR_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="ATTACK_VECTOR_NETWORK", - index=1, - number=1, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="ATTACK_VECTOR_ADJACENT", - index=2, - number=2, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="ATTACK_VECTOR_LOCAL", - index=3, - number=3, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="ATTACK_VECTOR_PHYSICAL", - index=4, - number=4, - serialized_options=None, - type=None, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=586, - serialized_end=739, -) -_sym_db.RegisterEnumDescriptor(_CVSSV3_ATTACKVECTOR) - -_CVSSV3_ATTACKCOMPLEXITY = _descriptor.EnumDescriptor( - name="AttackComplexity", - full_name="grafeas.v1.CVSSv3.AttackComplexity", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="ATTACK_COMPLEXITY_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="ATTACK_COMPLEXITY_LOW", - index=1, - number=1, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="ATTACK_COMPLEXITY_HIGH", - index=2, - number=2, - serialized_options=None, - type=None, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=741, - serialized_end=849, -) -_sym_db.RegisterEnumDescriptor(_CVSSV3_ATTACKCOMPLEXITY) - -_CVSSV3_PRIVILEGESREQUIRED = _descriptor.EnumDescriptor( - name="PrivilegesRequired", - full_name="grafeas.v1.CVSSv3.PrivilegesRequired", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="PRIVILEGES_REQUIRED_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="PRIVILEGES_REQUIRED_NONE", - index=1, - number=1, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="PRIVILEGES_REQUIRED_LOW", - index=2, - number=2, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="PRIVILEGES_REQUIRED_HIGH", - index=3, - number=3, - serialized_options=None, - type=None, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=852, - serialized_end=998, -) -_sym_db.RegisterEnumDescriptor(_CVSSV3_PRIVILEGESREQUIRED) - -_CVSSV3_USERINTERACTION = _descriptor.EnumDescriptor( - name="UserInteraction", - full_name="grafeas.v1.CVSSv3.UserInteraction", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="USER_INTERACTION_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="USER_INTERACTION_NONE", - index=1, - number=1, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="USER_INTERACTION_REQUIRED", - index=2, - number=2, - serialized_options=None, - type=None, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=1000, - serialized_end=1109, -) -_sym_db.RegisterEnumDescriptor(_CVSSV3_USERINTERACTION) - -_CVSSV3_SCOPE = _descriptor.EnumDescriptor( - name="Scope", - full_name="grafeas.v1.CVSSv3.Scope", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="SCOPE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="SCOPE_UNCHANGED", - index=1, - number=1, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="SCOPE_CHANGED", index=2, number=2, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=1111, - serialized_end=1181, -) -_sym_db.RegisterEnumDescriptor(_CVSSV3_SCOPE) - -_CVSSV3_IMPACT = _descriptor.EnumDescriptor( - name="Impact", - full_name="grafeas.v1.CVSSv3.Impact", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="IMPACT_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="IMPACT_HIGH", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="IMPACT_LOW", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="IMPACT_NONE", index=3, number=3, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=1183, - serialized_end=1265, -) -_sym_db.RegisterEnumDescriptor(_CVSSV3_IMPACT) - - -_CVSSV3 = _descriptor.Descriptor( - name="CVSSv3", - full_name="grafeas.v1.CVSSv3", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="base_score", - full_name="grafeas.v1.CVSSv3.base_score", - index=0, - number=1, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="exploitability_score", - full_name="grafeas.v1.CVSSv3.exploitability_score", - index=1, - number=2, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="impact_score", - full_name="grafeas.v1.CVSSv3.impact_score", - index=2, - number=3, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="attack_vector", - full_name="grafeas.v1.CVSSv3.attack_vector", - index=3, - number=5, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="attack_complexity", - full_name="grafeas.v1.CVSSv3.attack_complexity", - index=4, - number=6, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="privileges_required", - full_name="grafeas.v1.CVSSv3.privileges_required", - index=5, - number=7, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="user_interaction", - full_name="grafeas.v1.CVSSv3.user_interaction", - index=6, - number=8, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="scope", - full_name="grafeas.v1.CVSSv3.scope", - index=7, - number=9, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="confidentiality_impact", - full_name="grafeas.v1.CVSSv3.confidentiality_impact", - index=8, - number=10, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="integrity_impact", - full_name="grafeas.v1.CVSSv3.integrity_impact", - index=9, - number=11, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="availability_impact", - full_name="grafeas.v1.CVSSv3.availability_impact", - index=10, - number=12, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[ - _CVSSV3_ATTACKVECTOR, - _CVSSV3_ATTACKCOMPLEXITY, - _CVSSV3_PRIVILEGESREQUIRED, - _CVSSV3_USERINTERACTION, - _CVSSV3_SCOPE, - _CVSSV3_IMPACT, - ], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=44, - serialized_end=1265, -) - -_CVSSV3.fields_by_name["attack_vector"].enum_type = _CVSSV3_ATTACKVECTOR -_CVSSV3.fields_by_name["attack_complexity"].enum_type = _CVSSV3_ATTACKCOMPLEXITY -_CVSSV3.fields_by_name["privileges_required"].enum_type = _CVSSV3_PRIVILEGESREQUIRED -_CVSSV3.fields_by_name["user_interaction"].enum_type = _CVSSV3_USERINTERACTION -_CVSSV3.fields_by_name["scope"].enum_type = _CVSSV3_SCOPE -_CVSSV3.fields_by_name["confidentiality_impact"].enum_type = _CVSSV3_IMPACT -_CVSSV3.fields_by_name["integrity_impact"].enum_type = _CVSSV3_IMPACT -_CVSSV3.fields_by_name["availability_impact"].enum_type = _CVSSV3_IMPACT -_CVSSV3_ATTACKVECTOR.containing_type = _CVSSV3 -_CVSSV3_ATTACKCOMPLEXITY.containing_type = _CVSSV3 -_CVSSV3_PRIVILEGESREQUIRED.containing_type = _CVSSV3 -_CVSSV3_USERINTERACTION.containing_type = _CVSSV3 -_CVSSV3_SCOPE.containing_type = _CVSSV3 -_CVSSV3_IMPACT.containing_type = _CVSSV3 -DESCRIPTOR.message_types_by_name["CVSSv3"] = _CVSSV3 -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -CVSSv3 = _reflection.GeneratedProtocolMessageType( - "CVSSv3", - (_message.Message,), - dict( - DESCRIPTOR=_CVSSV3, - __module__="grafeas_v1.proto.cvss_pb2", - __doc__="""Common Vulnerability Scoring System version 3. For - details, see https://www.first.org/cvss/specification-document - - - Attributes: - base_score: - The base score is a function of the base metric scores. - attack_vector: - Base Metrics Represents the intrinsic characteristics of a - vulnerability that are constant over time and across user - environments. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.CVSSv3) - ), -) -_sym_db.RegisterMessage(CVSSv3) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/grafeas/grafeas/grafeas_v1/proto/cvss_pb2_grpc.py b/grafeas/grafeas/grafeas_v1/proto/cvss_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/grafeas/grafeas/grafeas_v1/proto/cvss_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/grafeas/grafeas/grafeas_v1/proto/deployment.proto b/grafeas/grafeas/grafeas_v1/proto/deployment.proto deleted file mode 100644 index 5204004fd646..000000000000 --- a/grafeas/grafeas/grafeas_v1/proto/deployment.proto +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright 2019 The Grafeas Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package grafeas.v1; - -import "google/protobuf/timestamp.proto"; - -option go_package = "google.golang.org/genproto/googleapis/grafeas/v1;grafeas"; -option java_multiple_files = true; -option java_package = "io.grafeas.v1"; -option objc_class_prefix = "GRA"; - -// An artifact that can be deployed in some runtime. -message DeploymentNote { - // Required. Resource URI for the artifact being deployed. - repeated string resource_uri = 1; -} - -// The period during which some deployable was active in a runtime. -message DeploymentOccurrence { - // Identity of the user that triggered this deployment. - string user_email = 1; - - // Required. Beginning of the lifetime of this deployment. - google.protobuf.Timestamp deploy_time = 2; - - // End of the lifetime of this deployment. - google.protobuf.Timestamp undeploy_time = 3; - - // Configuration used to create this deployment. - string config = 4; - - // Address of the runtime element hosting this deployment. - string address = 5; - - // Output only. Resource URI for the artifact being deployed taken from - // the deployable field with the same name. - repeated string resource_uri = 6; - - // Types of platforms. - enum Platform { - // Unknown. - PLATFORM_UNSPECIFIED = 0; - // Google Container Engine. - GKE = 1; - // Google App Engine: Flexible Environment. - FLEX = 2; - // Custom user-defined platform. - CUSTOM = 3; - } - // Platform hosting this deployment. - Platform platform = 7; -} diff --git a/grafeas/grafeas/grafeas_v1/proto/deployment_pb2.py b/grafeas/grafeas/grafeas_v1/proto/deployment_pb2.py deleted file mode 100644 index 556749ae2e2c..000000000000 --- a/grafeas/grafeas/grafeas_v1/proto/deployment_pb2.py +++ /dev/null @@ -1,317 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: grafeas_v1/proto/deployment.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="grafeas_v1/proto/deployment.proto", - package="grafeas.v1", - syntax="proto3", - serialized_options=_b( - "\n\rio.grafeas.v1P\001Z8google.golang.org/genproto/googleapis/grafeas/v1;grafeas\242\002\003GRA" - ), - serialized_pb=_b( - '\n!grafeas_v1/proto/deployment.proto\x12\ngrafeas.v1\x1a\x1fgoogle/protobuf/timestamp.proto"&\n\x0e\x44\x65ploymentNote\x12\x14\n\x0cresource_uri\x18\x01 \x03(\t"\xc7\x02\n\x14\x44\x65ploymentOccurrence\x12\x12\n\nuser_email\x18\x01 \x01(\t\x12/\n\x0b\x64\x65ploy_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rundeploy_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0e\n\x06\x63onfig\x18\x04 \x01(\t\x12\x0f\n\x07\x61\x64\x64ress\x18\x05 \x01(\t\x12\x14\n\x0cresource_uri\x18\x06 \x03(\t\x12;\n\x08platform\x18\x07 \x01(\x0e\x32).grafeas.v1.DeploymentOccurrence.Platform"C\n\x08Platform\x12\x18\n\x14PLATFORM_UNSPECIFIED\x10\x00\x12\x07\n\x03GKE\x10\x01\x12\x08\n\x04\x46LEX\x10\x02\x12\n\n\x06\x43USTOM\x10\x03\x42Q\n\rio.grafeas.v1P\x01Z8google.golang.org/genproto/googleapis/grafeas/v1;grafeas\xa2\x02\x03GRAb\x06proto3' - ), - dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,], -) - - -_DEPLOYMENTOCCURRENCE_PLATFORM = _descriptor.EnumDescriptor( - name="Platform", - full_name="grafeas.v1.DeploymentOccurrence.Platform", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="PLATFORM_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="GKE", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="FLEX", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="CUSTOM", index=3, number=3, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=383, - serialized_end=450, -) -_sym_db.RegisterEnumDescriptor(_DEPLOYMENTOCCURRENCE_PLATFORM) - - -_DEPLOYMENTNOTE = _descriptor.Descriptor( - name="DeploymentNote", - full_name="grafeas.v1.DeploymentNote", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="resource_uri", - full_name="grafeas.v1.DeploymentNote.resource_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=82, - serialized_end=120, -) - - -_DEPLOYMENTOCCURRENCE = _descriptor.Descriptor( - name="DeploymentOccurrence", - full_name="grafeas.v1.DeploymentOccurrence", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="user_email", - full_name="grafeas.v1.DeploymentOccurrence.user_email", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="deploy_time", - full_name="grafeas.v1.DeploymentOccurrence.deploy_time", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="undeploy_time", - full_name="grafeas.v1.DeploymentOccurrence.undeploy_time", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="config", - full_name="grafeas.v1.DeploymentOccurrence.config", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="address", - full_name="grafeas.v1.DeploymentOccurrence.address", - index=4, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="resource_uri", - full_name="grafeas.v1.DeploymentOccurrence.resource_uri", - index=5, - number=6, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="platform", - full_name="grafeas.v1.DeploymentOccurrence.platform", - index=6, - number=7, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_DEPLOYMENTOCCURRENCE_PLATFORM,], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=123, - serialized_end=450, -) - -_DEPLOYMENTOCCURRENCE.fields_by_name[ - "deploy_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_DEPLOYMENTOCCURRENCE.fields_by_name[ - "undeploy_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_DEPLOYMENTOCCURRENCE.fields_by_name[ - "platform" -].enum_type = _DEPLOYMENTOCCURRENCE_PLATFORM -_DEPLOYMENTOCCURRENCE_PLATFORM.containing_type = _DEPLOYMENTOCCURRENCE -DESCRIPTOR.message_types_by_name["DeploymentNote"] = _DEPLOYMENTNOTE -DESCRIPTOR.message_types_by_name["DeploymentOccurrence"] = _DEPLOYMENTOCCURRENCE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -DeploymentNote = _reflection.GeneratedProtocolMessageType( - "DeploymentNote", - (_message.Message,), - dict( - DESCRIPTOR=_DEPLOYMENTNOTE, - __module__="grafeas_v1.proto.deployment_pb2", - __doc__="""An artifact that can be deployed in some runtime. - - - Attributes: - resource_uri: - Required. Resource URI for the artifact being deployed. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.DeploymentNote) - ), -) -_sym_db.RegisterMessage(DeploymentNote) - -DeploymentOccurrence = _reflection.GeneratedProtocolMessageType( - "DeploymentOccurrence", - (_message.Message,), - dict( - DESCRIPTOR=_DEPLOYMENTOCCURRENCE, - __module__="grafeas_v1.proto.deployment_pb2", - __doc__="""The period during which some deployable was active in a - runtime. - - - Attributes: - user_email: - Identity of the user that triggered this deployment. - deploy_time: - Required. Beginning of the lifetime of this deployment. - undeploy_time: - End of the lifetime of this deployment. - config: - Configuration used to create this deployment. - address: - Address of the runtime element hosting this deployment. - resource_uri: - Output only. Resource URI for the artifact being deployed - taken from the deployable field with the same name. - platform: - Platform hosting this deployment. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.DeploymentOccurrence) - ), -) -_sym_db.RegisterMessage(DeploymentOccurrence) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/grafeas/grafeas/grafeas_v1/proto/deployment_pb2_grpc.py b/grafeas/grafeas/grafeas_v1/proto/deployment_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/grafeas/grafeas/grafeas_v1/proto/deployment_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/grafeas/grafeas/grafeas_v1/proto/discovery.proto b/grafeas/grafeas/grafeas_v1/proto/discovery.proto deleted file mode 100644 index e07992557de2..000000000000 --- a/grafeas/grafeas/grafeas_v1/proto/discovery.proto +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright 2019 The Grafeas Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package grafeas.v1; - -import "google/protobuf/timestamp.proto"; -import "google/rpc/status.proto"; -import "grafeas/v1/common.proto"; - -option go_package = "google.golang.org/genproto/googleapis/grafeas/v1;grafeas"; -option java_multiple_files = true; -option java_package = "io.grafeas.v1"; -option objc_class_prefix = "GRA"; - -// A note that indicates a type of analysis a provider would perform. This note -// exists in a provider's project. A `Discovery` occurrence is created in a -// consumer's project at the start of analysis. -message DiscoveryNote { - // Required. Immutable. The kind of analysis that is handled by this - // discovery. - grafeas.v1.NoteKind analysis_kind = 1; -} - -// Provides information about the analysis status of a discovered resource. -message DiscoveryOccurrence { - // Whether the resource is continuously analyzed. - enum ContinuousAnalysis { - // Unknown. - CONTINUOUS_ANALYSIS_UNSPECIFIED = 0; - // The resource is continuously analyzed. - ACTIVE = 1; - // The resource is ignored for continuous analysis. - INACTIVE = 2; - } - - // Whether the resource is continuously analyzed. - ContinuousAnalysis continuous_analysis = 1; - - // Analysis status for a resource. Currently for initial analysis only (not - // updated in continuous analysis). - enum AnalysisStatus { - // Unknown. - ANALYSIS_STATUS_UNSPECIFIED = 0; - // Resource is known but no action has been taken yet. - PENDING = 1; - // Resource is being analyzed. - SCANNING = 2; - // Analysis has finished successfully. - FINISHED_SUCCESS = 3; - // Analysis has finished unsuccessfully, the analysis itself is in a bad - // state. - FINISHED_FAILED = 4; - // The resource is known not to be supported - FINISHED_UNSUPPORTED = 5; - } - - // The status of discovery for the resource. - AnalysisStatus analysis_status = 2; - - // When an error is encountered this will contain a LocalizedMessage under - // details to show to the user. The LocalizedMessage is output only and - // populated by the API. - google.rpc.Status analysis_status_error = 3; - - // The CPE of the resource being scanned. - string cpe = 4; - - // The last time this resource was scanned. - google.protobuf.Timestamp last_scan_time = 5; -} diff --git a/grafeas/grafeas/grafeas_v1/proto/discovery_pb2.py b/grafeas/grafeas/grafeas_v1/proto/discovery_pb2.py deleted file mode 100644 index 65976d7924bc..000000000000 --- a/grafeas/grafeas/grafeas_v1/proto/discovery_pb2.py +++ /dev/null @@ -1,342 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: grafeas_v1/proto/discovery.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 -from grafeas.grafeas_v1.proto import common_pb2 as grafeas__v1_dot_proto_dot_common__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="grafeas_v1/proto/discovery.proto", - package="grafeas.v1", - syntax="proto3", - serialized_options=_b( - "\n\rio.grafeas.v1P\001Z8google.golang.org/genproto/googleapis/grafeas/v1;grafeas\242\002\003GRA" - ), - serialized_pb=_b( - '\n grafeas_v1/proto/discovery.proto\x12\ngrafeas.v1\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x1dgrafeas_v1/proto/common.proto"<\n\rDiscoveryNote\x12+\n\ranalysis_kind\x18\x01 \x01(\x0e\x32\x14.grafeas.v1.NoteKind"\x8c\x04\n\x13\x44iscoveryOccurrence\x12O\n\x13\x63ontinuous_analysis\x18\x01 \x01(\x0e\x32\x32.grafeas.v1.DiscoveryOccurrence.ContinuousAnalysis\x12G\n\x0f\x61nalysis_status\x18\x02 \x01(\x0e\x32..grafeas.v1.DiscoveryOccurrence.AnalysisStatus\x12\x31\n\x15\x61nalysis_status_error\x18\x03 \x01(\x0b\x32\x12.google.rpc.Status\x12\x0b\n\x03\x63pe\x18\x04 \x01(\t\x12\x32\n\x0elast_scan_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"S\n\x12\x43ontinuousAnalysis\x12#\n\x1f\x43ONTINUOUS_ANALYSIS_UNSPECIFIED\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\x0c\n\x08INACTIVE\x10\x02"\x91\x01\n\x0e\x41nalysisStatus\x12\x1f\n\x1b\x41NALYSIS_STATUS_UNSPECIFIED\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0c\n\x08SCANNING\x10\x02\x12\x14\n\x10\x46INISHED_SUCCESS\x10\x03\x12\x13\n\x0f\x46INISHED_FAILED\x10\x04\x12\x18\n\x14\x46INISHED_UNSUPPORTED\x10\x05\x42Q\n\rio.grafeas.v1P\x01Z8google.golang.org/genproto/googleapis/grafeas/v1;grafeas\xa2\x02\x03GRAb\x06proto3' - ), - dependencies=[ - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_rpc_dot_status__pb2.DESCRIPTOR, - grafeas__v1_dot_proto_dot_common__pb2.DESCRIPTOR, - ], -) - - -_DISCOVERYOCCURRENCE_CONTINUOUSANALYSIS = _descriptor.EnumDescriptor( - name="ContinuousAnalysis", - full_name="grafeas.v1.DiscoveryOccurrence.ContinuousAnalysis", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="CONTINUOUS_ANALYSIS_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="ACTIVE", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="INACTIVE", index=2, number=2, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=493, - serialized_end=576, -) -_sym_db.RegisterEnumDescriptor(_DISCOVERYOCCURRENCE_CONTINUOUSANALYSIS) - -_DISCOVERYOCCURRENCE_ANALYSISSTATUS = _descriptor.EnumDescriptor( - name="AnalysisStatus", - full_name="grafeas.v1.DiscoveryOccurrence.AnalysisStatus", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="ANALYSIS_STATUS_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="PENDING", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="SCANNING", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="FINISHED_SUCCESS", - index=3, - number=3, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="FINISHED_FAILED", - index=4, - number=4, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="FINISHED_UNSUPPORTED", - index=5, - number=5, - serialized_options=None, - type=None, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=579, - serialized_end=724, -) -_sym_db.RegisterEnumDescriptor(_DISCOVERYOCCURRENCE_ANALYSISSTATUS) - - -_DISCOVERYNOTE = _descriptor.Descriptor( - name="DiscoveryNote", - full_name="grafeas.v1.DiscoveryNote", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="analysis_kind", - full_name="grafeas.v1.DiscoveryNote.analysis_kind", - index=0, - number=1, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=137, - serialized_end=197, -) - - -_DISCOVERYOCCURRENCE = _descriptor.Descriptor( - name="DiscoveryOccurrence", - full_name="grafeas.v1.DiscoveryOccurrence", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="continuous_analysis", - full_name="grafeas.v1.DiscoveryOccurrence.continuous_analysis", - index=0, - number=1, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="analysis_status", - full_name="grafeas.v1.DiscoveryOccurrence.analysis_status", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="analysis_status_error", - full_name="grafeas.v1.DiscoveryOccurrence.analysis_status_error", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cpe", - full_name="grafeas.v1.DiscoveryOccurrence.cpe", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="last_scan_time", - full_name="grafeas.v1.DiscoveryOccurrence.last_scan_time", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[ - _DISCOVERYOCCURRENCE_CONTINUOUSANALYSIS, - _DISCOVERYOCCURRENCE_ANALYSISSTATUS, - ], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=200, - serialized_end=724, -) - -_DISCOVERYNOTE.fields_by_name[ - "analysis_kind" -].enum_type = grafeas__v1_dot_proto_dot_common__pb2._NOTEKIND -_DISCOVERYOCCURRENCE.fields_by_name[ - "continuous_analysis" -].enum_type = _DISCOVERYOCCURRENCE_CONTINUOUSANALYSIS -_DISCOVERYOCCURRENCE.fields_by_name[ - "analysis_status" -].enum_type = _DISCOVERYOCCURRENCE_ANALYSISSTATUS -_DISCOVERYOCCURRENCE.fields_by_name[ - "analysis_status_error" -].message_type = google_dot_rpc_dot_status__pb2._STATUS -_DISCOVERYOCCURRENCE.fields_by_name[ - "last_scan_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_DISCOVERYOCCURRENCE_CONTINUOUSANALYSIS.containing_type = _DISCOVERYOCCURRENCE -_DISCOVERYOCCURRENCE_ANALYSISSTATUS.containing_type = _DISCOVERYOCCURRENCE -DESCRIPTOR.message_types_by_name["DiscoveryNote"] = _DISCOVERYNOTE -DESCRIPTOR.message_types_by_name["DiscoveryOccurrence"] = _DISCOVERYOCCURRENCE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -DiscoveryNote = _reflection.GeneratedProtocolMessageType( - "DiscoveryNote", - (_message.Message,), - dict( - DESCRIPTOR=_DISCOVERYNOTE, - __module__="grafeas_v1.proto.discovery_pb2", - __doc__="""A note that indicates a type of analysis a provider would - perform. This note exists in a provider's project. A ``Discovery`` - occurrence is created in a consumer's project at the start of analysis. - - - Attributes: - analysis_kind: - Required. Immutable. The kind of analysis that is handled by - this discovery. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.DiscoveryNote) - ), -) -_sym_db.RegisterMessage(DiscoveryNote) - -DiscoveryOccurrence = _reflection.GeneratedProtocolMessageType( - "DiscoveryOccurrence", - (_message.Message,), - dict( - DESCRIPTOR=_DISCOVERYOCCURRENCE, - __module__="grafeas_v1.proto.discovery_pb2", - __doc__="""Provides information about the analysis status of a - discovered resource. - - - Attributes: - continuous_analysis: - Whether the resource is continuously analyzed. - analysis_status: - The status of discovery for the resource. - analysis_status_error: - When an error is encountered this will contain a - LocalizedMessage under details to show to the user. The - LocalizedMessage is output only and populated by the API. - cpe: - The CPE of the resource being scanned. - last_scan_time: - The last time this resource was scanned. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.DiscoveryOccurrence) - ), -) -_sym_db.RegisterMessage(DiscoveryOccurrence) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/grafeas/grafeas/grafeas_v1/proto/discovery_pb2_grpc.py b/grafeas/grafeas/grafeas_v1/proto/discovery_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/grafeas/grafeas/grafeas_v1/proto/discovery_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/grafeas/grafeas/grafeas_v1/proto/grafeas.proto b/grafeas/grafeas/grafeas_v1/proto/grafeas.proto deleted file mode 100644 index 57b2fc23e5f9..000000000000 --- a/grafeas/grafeas/grafeas_v1/proto/grafeas.proto +++ /dev/null @@ -1,531 +0,0 @@ -// Copyright 2019 The Grafeas Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package grafeas.v1; - -import "google/api/annotations.proto"; -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/field_mask.proto"; -import "google/protobuf/timestamp.proto"; -import "grafeas/v1/attestation.proto"; -import "grafeas/v1/build.proto"; -import "grafeas/v1/common.proto"; -import "grafeas/v1/deployment.proto"; -import "grafeas/v1/discovery.proto"; -import "grafeas/v1/image.proto"; -import "grafeas/v1/package.proto"; -import "grafeas/v1/upgrade.proto"; -import "grafeas/v1/vulnerability.proto"; - -option go_package = "google.golang.org/genproto/googleapis/grafeas/v1;grafeas"; -option java_multiple_files = true; -option java_package = "io.grafeas.v1"; -option objc_class_prefix = "GRA"; -option (google.api.resource_definition) = { - type: "grafeas.io/Project" - pattern: "projects/{project}" -}; - -// [Grafeas](https://grafeas.io) API. -// -// Retrieves analysis results of Cloud components such as Docker container -// images. -// -// Analysis results are stored as a series of occurrences. An `Occurrence` -// contains information about a specific analysis instance on a resource. An -// occurrence refers to a `Note`. A note contains details describing the -// analysis and is generally stored in a separate project, called a `Provider`. -// Multiple occurrences can refer to the same note. -// -// For example, an SSL vulnerability could affect multiple images. In this case, -// there would be one note for the vulnerability and an occurrence for each -// image with the vulnerability referring to that note. -service Grafeas { - option (google.api.default_host) = "containeranalysis.googleapis.com"; - - // Gets the specified occurrence. - rpc GetOccurrence(GetOccurrenceRequest) returns (Occurrence) { - option (google.api.http) = { - get: "/v1/{name=projects/*/occurrences/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Lists occurrences for the specified project. - rpc ListOccurrences(ListOccurrencesRequest) - returns (ListOccurrencesResponse) { - option (google.api.http) = { - get: "/v1/{parent=projects/*}/occurrences" - }; - option (google.api.method_signature) = "parent,filter"; - } - - // Deletes the specified occurrence. For example, use this method to delete an - // occurrence when the occurrence is no longer applicable for the given - // resource. - rpc DeleteOccurrence(DeleteOccurrenceRequest) - returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v1/{name=projects/*/occurrences/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Creates a new occurrence. - rpc CreateOccurrence(CreateOccurrenceRequest) returns (Occurrence) { - option (google.api.http) = { - post: "/v1/{parent=projects/*}/occurrences" - body: "occurrence" - }; - option (google.api.method_signature) = "parent,occurrence"; - } - - // Creates new occurrences in batch. - rpc BatchCreateOccurrences(BatchCreateOccurrencesRequest) - returns (BatchCreateOccurrencesResponse) { - option (google.api.http) = { - post: "/v1/{parent=projects/*}/occurrences:batchCreate" - body: "*" - }; - option (google.api.method_signature) = "parent,occurrences"; - } - - // Updates the specified occurrence. - rpc UpdateOccurrence(UpdateOccurrenceRequest) returns (Occurrence) { - option (google.api.http) = { - patch: "/v1/{name=projects/*/occurrences/*}" - body: "occurrence" - }; - option (google.api.method_signature) = "name,occurrence,update_mask"; - } - - // Gets the note attached to the specified occurrence. Consumer projects can - // use this method to get a note that belongs to a provider project. - rpc GetOccurrenceNote(GetOccurrenceNoteRequest) returns (Note) { - option (google.api.http) = { - get: "/v1/{name=projects/*/occurrences/*}/notes" - }; - option (google.api.method_signature) = "name"; - } - - // Gets the specified note. - rpc GetNote(GetNoteRequest) returns (Note) { - option (google.api.http) = { - get: "/v1/{name=projects/*/notes/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Lists notes for the specified project. - rpc ListNotes(ListNotesRequest) returns (ListNotesResponse) { - option (google.api.http) = { - get: "/v1/{parent=projects/*}/notes" - }; - option (google.api.method_signature) = "parent,filter"; - } - - // Deletes the specified note. - rpc DeleteNote(DeleteNoteRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v1/{name=projects/*/notes/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Creates a new note. - rpc CreateNote(CreateNoteRequest) returns (Note) { - option (google.api.http) = { - post: "/v1/{parent=projects/*}/notes" - body: "note" - }; - option (google.api.method_signature) = "parent,note_id,note"; - } - - // Creates new notes in batch. - rpc BatchCreateNotes(BatchCreateNotesRequest) - returns (BatchCreateNotesResponse) { - option (google.api.http) = { - post: "/v1/{parent=projects/*}/notes:batchCreate" - body: "*" - }; - option (google.api.method_signature) = "parent,notes"; - } - - // Updates the specified note. - rpc UpdateNote(UpdateNoteRequest) returns (Note) { - option (google.api.http) = { - patch: "/v1/{name=projects/*/notes/*}" - body: "note" - }; - option (google.api.method_signature) = "name,note,update_mask"; - } - - // Lists occurrences referencing the specified note. Provider projects can use - // this method to get all occurrences across consumer projects referencing the - // specified note. - rpc ListNoteOccurrences(ListNoteOccurrencesRequest) - returns (ListNoteOccurrencesResponse) { - option (google.api.http) = { - get: "/v1/{name=projects/*/notes/*}/occurrences" - }; - option (google.api.method_signature) = "name,filter"; - } -} - -// An instance of an analysis type that has been found on a resource. -message Occurrence { - option (google.api.resource) = { - type: "grafeas.io/Occurrence" - pattern: "projects/{project}/occurrences/{occurrence}" - }; - - // Output only. The name of the occurrence in the form of - // `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`. - string name = 1; - - // Required. Immutable. A URI that represents the resource for which the - // occurrence applies. For example, - // `https://gcr.io/project/image@sha256:123abc` for a Docker image. - string resource_uri = 2; - - // Required. Immutable. The analysis note associated with this occurrence, in - // the form of `projects/[PROVIDER_ID]/notes/[NOTE_ID]`. This field can be - // used as a filter in list requests. - string note_name = 3; - - // Output only. This explicitly denotes which of the occurrence details are - // specified. This field can be used as a filter in list requests. - grafeas.v1.NoteKind kind = 4; - - // A description of actions that can be taken to remedy the note. - string remediation = 5; - - // Output only. The time this occurrence was created. - google.protobuf.Timestamp create_time = 6; - - // Output only. The time this occurrence was last updated. - google.protobuf.Timestamp update_time = 7; - - // Required. Immutable. Describes the details of the note kind found on this - // resource. - oneof details { - // Describes a security vulnerability. - grafeas.v1.VulnerabilityOccurrence vulnerability = 8; - // Describes a verifiable build. - grafeas.v1.BuildOccurrence build = 9; - // Describes how this resource derives from the basis in the associated - // note. - grafeas.v1.ImageOccurrence image = 10; - // Describes the installation of a package on the linked resource. - grafeas.v1.PackageOccurrence package = 11; - // Describes the deployment of an artifact on a runtime. - grafeas.v1.DeploymentOccurrence deployment = 12; - // Describes when a resource was discovered. - grafeas.v1.DiscoveryOccurrence discovery = 13; - // Describes an attestation of an artifact. - grafeas.v1.AttestationOccurrence attestation = 14; - // Describes an available package upgrade on the linked resource. - grafeas.v1.UpgradeOccurrence upgrade = 15; - } -} - -// A type of analysis that can be done for a resource. -message Note { - option (google.api.resource) = { - type: "grafeas.io/Note" - pattern: "projects/{project}/notes/{note}" - }; - - // Output only. The name of the note in the form of - // `projects/[PROVIDER_ID]/notes/[NOTE_ID]`. - string name = 1; - - // A one sentence description of this note. - string short_description = 2; - - // A detailed description of this note. - string long_description = 3; - - // Output only. The type of analysis. This field can be used as a filter in - // list requests. - grafeas.v1.NoteKind kind = 4; - - // URLs associated with this note. - repeated grafeas.v1.RelatedUrl related_url = 5; - - // Time of expiration for this note. Empty if note does not expire. - google.protobuf.Timestamp expiration_time = 6; - - // Output only. The time this note was created. This field can be used as a - // filter in list requests. - google.protobuf.Timestamp create_time = 7; - - // Output only. The time this note was last updated. This field can be used as - // a filter in list requests. - google.protobuf.Timestamp update_time = 8; - - // Other notes related to this note. - repeated string related_note_names = 9; - - // Required. Immutable. The type of analysis this note represents. - oneof type { - // A note describing a package vulnerability. - grafeas.v1.VulnerabilityNote vulnerability = 10; - // A note describing build provenance for a verifiable build. - grafeas.v1.BuildNote build = 11; - // A note describing a base image. - grafeas.v1.ImageNote image = 12; - // A note describing a package hosted by various package managers. - grafeas.v1.PackageNote package = 13; - // A note describing something that can be deployed. - grafeas.v1.DeploymentNote deployment = 14; - // A note describing the initial analysis of a resource. - grafeas.v1.DiscoveryNote discovery = 15; - // A note describing an attestation role. - grafeas.v1.AttestationNote attestation = 16; - // A note describing available package upgrades. - grafeas.v1.UpgradeNote upgrade = 17; - } -} - -// Request to get an occurrence. -message GetOccurrenceRequest { - // The name of the occurrence in the form of - // `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`. - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference).type = "grafeas.io/Occurrence" - ]; -} - -// Request to list occurrences. -message ListOccurrencesRequest { - // The name of the project to list occurrences for in the form of - // `projects/[PROJECT_ID]`. - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference).type = "grafeas.io/Project" - ]; - - // The filter expression. - string filter = 2; - - // Number of occurrences to return in the list. Must be positive. Max allowed - // page size is 1000. If not specified, page size defaults to 20. - int32 page_size = 3; - - // Token to provide to skip to a particular spot in the list. - string page_token = 4; -} - -// Response for listing occurrences. -message ListOccurrencesResponse { - // The occurrences requested. - repeated Occurrence occurrences = 1; - // The next pagination token in the list response. It should be used as - // `page_token` for the following request. An empty value means no more - // results. - string next_page_token = 2; -} - -// Request to delete an occurrence. -message DeleteOccurrenceRequest { - // The name of the occurrence in the form of - // `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`. - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference).type = "grafeas.io/Occurrence" - ]; -} - -// Request to create a new occurrence. -message CreateOccurrenceRequest { - // The name of the project in the form of `projects/[PROJECT_ID]`, under which - // the occurrence is to be created. - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference).type = "grafeas.io/Project" - ]; - // The occurrence to create. - Occurrence occurrence = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// Request to update an occurrence. -message UpdateOccurrenceRequest { - // The name of the occurrence in the form of - // `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`. - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference).type = "grafeas.io/Occurrence" - ]; - // The updated occurrence. - Occurrence occurrence = 2 [(google.api.field_behavior) = REQUIRED]; - // The fields to update. - google.protobuf.FieldMask update_mask = 3; -} - -// Request to get a note. -message GetNoteRequest { - // The name of the note in the form of - // `projects/[PROVIDER_ID]/notes/[NOTE_ID]`. - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference).type = "grafeas.io/Note" - ]; -} - -// Request to get the note to which the specified occurrence is attached. -message GetOccurrenceNoteRequest { - // The name of the occurrence in the form of - // `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`. - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference).type = "grafeas.io/Occurrence" - ]; -} - -// Request to list notes. -message ListNotesRequest { - // The name of the project to list notes for in the form of - // `projects/[PROJECT_ID]`. - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference).type = "grafeas.io/Project" - ]; - - // The filter expression. - string filter = 2; - - // Number of notes to return in the list. Must be positive. Max allowed page - // size is 1000. If not specified, page size defaults to 20. - int32 page_size = 3; - - // Token to provide to skip to a particular spot in the list. - string page_token = 4; -} - -// Response for listing notes. -message ListNotesResponse { - // The notes requested. - repeated Note notes = 1; - // The next pagination token in the list response. It should be used as - // `page_token` for the following request. An empty value means no more - // results. - string next_page_token = 2; -} - -// Request to delete a note. -message DeleteNoteRequest { - // The name of the note in the form of - // `projects/[PROVIDER_ID]/notes/[NOTE_ID]`. - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference).type = "grafeas.io/Note" - ]; -} - -// Request to create a new note. -message CreateNoteRequest { - // The name of the project in the form of `projects/[PROJECT_ID]`, under which - // the note is to be created. - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference).type = "grafeas.io/Project" - ]; - // The ID to use for this note. - string note_id = 2 [(google.api.field_behavior) = REQUIRED]; - // The note to create. - Note note = 3 [(google.api.field_behavior) = REQUIRED]; -} - -// Request to update a note. -message UpdateNoteRequest { - // The name of the note in the form of - // `projects/[PROVIDER_ID]/notes/[NOTE_ID]`. - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference).type = "grafeas.io/Note" - ]; - // The updated note. - Note note = 2 [(google.api.field_behavior) = REQUIRED]; - // The fields to update. - google.protobuf.FieldMask update_mask = 3; -} - -// Request to list occurrences for a note. -message ListNoteOccurrencesRequest { - // The name of the note to list occurrences for in the form of - // `projects/[PROVIDER_ID]/notes/[NOTE_ID]`. - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference).type = "grafeas.io/Note" - ]; - // The filter expression. - string filter = 2; - // Number of occurrences to return in the list. - int32 page_size = 3; - // Token to provide to skip to a particular spot in the list. - string page_token = 4; -} - -// Response for listing occurrences for a note. -message ListNoteOccurrencesResponse { - // The occurrences attached to the specified note. - repeated Occurrence occurrences = 1; - // Token to provide to skip to a particular spot in the list. - string next_page_token = 2; -} - -// Request to create notes in batch. -message BatchCreateNotesRequest { - // The name of the project in the form of `projects/[PROJECT_ID]`, under which - // the notes are to be created. - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference).type = "grafeas.io/Project" - ]; - - // The notes to create. Max allowed length is 1000. - map notes = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// Response for creating notes in batch. -message BatchCreateNotesResponse { - // The notes that were created. - repeated Note notes = 1; -} - -// Request to create occurrences in batch. -message BatchCreateOccurrencesRequest { - // The name of the project in the form of `projects/[PROJECT_ID]`, under which - // the occurrences are to be created. - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference).type = "grafeas.io/Project" - ]; - - // The occurrences to create. Max allowed length is 1000. - repeated Occurrence occurrences = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// Response for creating occurrences in batch. -message BatchCreateOccurrencesResponse { - // The occurrences that were created. - repeated Occurrence occurrences = 1; -} diff --git a/grafeas/grafeas/grafeas_v1/proto/grafeas_pb2.py b/grafeas/grafeas/grafeas_v1/proto/grafeas_pb2.py deleted file mode 100644 index 96f3e2bd210b..000000000000 --- a/grafeas/grafeas/grafeas_v1/proto/grafeas_pb2.py +++ /dev/null @@ -1,2810 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: grafeas_v1/proto/grafeas.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from grafeas.grafeas_v1.proto import ( - attestation_pb2 as grafeas__v1_dot_proto_dot_attestation__pb2, -) -from grafeas.grafeas_v1.proto import build_pb2 as grafeas__v1_dot_proto_dot_build__pb2 -from grafeas.grafeas_v1.proto import common_pb2 as grafeas__v1_dot_proto_dot_common__pb2 -from grafeas.grafeas_v1.proto import ( - deployment_pb2 as grafeas__v1_dot_proto_dot_deployment__pb2, -) -from grafeas.grafeas_v1.proto import ( - discovery_pb2 as grafeas__v1_dot_proto_dot_discovery__pb2, -) -from grafeas.grafeas_v1.proto import image_pb2 as grafeas__v1_dot_proto_dot_image__pb2 -from grafeas.grafeas_v1.proto import ( - package_pb2 as grafeas__v1_dot_proto_dot_package__pb2, -) -from grafeas.grafeas_v1.proto import ( - upgrade_pb2 as grafeas__v1_dot_proto_dot_upgrade__pb2, -) -from grafeas.grafeas_v1.proto import ( - vulnerability_pb2 as grafeas__v1_dot_proto_dot_vulnerability__pb2, -) - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="grafeas_v1/proto/grafeas.proto", - package="grafeas.v1", - syntax="proto3", - serialized_options=_b( - "\n\rio.grafeas.v1P\001Z8google.golang.org/genproto/googleapis/grafeas/v1;grafeas\242\002\003GRA\352A(\n\022grafeas.io/Project\022\022projects/{project}" - ), - serialized_pb=_b( - '\n\x1egrafeas_v1/proto/grafeas.proto\x12\ngrafeas.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a"grafeas_v1/proto/attestation.proto\x1a\x1cgrafeas_v1/proto/build.proto\x1a\x1dgrafeas_v1/proto/common.proto\x1a!grafeas_v1/proto/deployment.proto\x1a grafeas_v1/proto/discovery.proto\x1a\x1cgrafeas_v1/proto/image.proto\x1a\x1egrafeas_v1/proto/package.proto\x1a\x1egrafeas_v1/proto/upgrade.proto\x1a$grafeas_v1/proto/vulnerability.proto"\xd8\x05\n\nOccurrence\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0cresource_uri\x18\x02 \x01(\t\x12\x11\n\tnote_name\x18\x03 \x01(\t\x12"\n\x04kind\x18\x04 \x01(\x0e\x32\x14.grafeas.v1.NoteKind\x12\x13\n\x0bremediation\x18\x05 \x01(\t\x12/\n\x0b\x63reate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12<\n\rvulnerability\x18\x08 \x01(\x0b\x32#.grafeas.v1.VulnerabilityOccurrenceH\x00\x12,\n\x05\x62uild\x18\t \x01(\x0b\x32\x1b.grafeas.v1.BuildOccurrenceH\x00\x12,\n\x05image\x18\n \x01(\x0b\x32\x1b.grafeas.v1.ImageOccurrenceH\x00\x12\x30\n\x07package\x18\x0b \x01(\x0b\x32\x1d.grafeas.v1.PackageOccurrenceH\x00\x12\x36\n\ndeployment\x18\x0c \x01(\x0b\x32 .grafeas.v1.DeploymentOccurrenceH\x00\x12\x34\n\tdiscovery\x18\r \x01(\x0b\x32\x1f.grafeas.v1.DiscoveryOccurrenceH\x00\x12\x38\n\x0b\x61ttestation\x18\x0e \x01(\x0b\x32!.grafeas.v1.AttestationOccurrenceH\x00\x12\x30\n\x07upgrade\x18\x0f \x01(\x0b\x32\x1d.grafeas.v1.UpgradeOccurrenceH\x00:G\xea\x41\x44\n\x15grafeas.io/Occurrence\x12+projects/{project}/occurrences/{occurrence}B\t\n\x07\x64\x65tails"\x82\x06\n\x04Note\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x19\n\x11short_description\x18\x02 \x01(\t\x12\x18\n\x10long_description\x18\x03 \x01(\t\x12"\n\x04kind\x18\x04 \x01(\x0e\x32\x14.grafeas.v1.NoteKind\x12+\n\x0brelated_url\x18\x05 \x03(\x0b\x32\x16.grafeas.v1.RelatedUrl\x12\x33\n\x0f\x65xpiration_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0b\x63reate_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1a\n\x12related_note_names\x18\t \x03(\t\x12\x36\n\rvulnerability\x18\n \x01(\x0b\x32\x1d.grafeas.v1.VulnerabilityNoteH\x00\x12&\n\x05\x62uild\x18\x0b \x01(\x0b\x32\x15.grafeas.v1.BuildNoteH\x00\x12&\n\x05image\x18\x0c \x01(\x0b\x32\x15.grafeas.v1.ImageNoteH\x00\x12*\n\x07package\x18\r \x01(\x0b\x32\x17.grafeas.v1.PackageNoteH\x00\x12\x30\n\ndeployment\x18\x0e \x01(\x0b\x32\x1a.grafeas.v1.DeploymentNoteH\x00\x12.\n\tdiscovery\x18\x0f \x01(\x0b\x32\x19.grafeas.v1.DiscoveryNoteH\x00\x12\x32\n\x0b\x61ttestation\x18\x10 \x01(\x0b\x32\x1b.grafeas.v1.AttestationNoteH\x00\x12*\n\x07upgrade\x18\x11 \x01(\x0b\x32\x17.grafeas.v1.UpgradeNoteH\x00:5\xea\x41\x32\n\x0fgrafeas.io/Note\x12\x1fprojects/{project}/notes/{note}B\x06\n\x04type"C\n\x14GetOccurrenceRequest\x12+\n\x04name\x18\x01 \x01(\tB\x1d\xe0\x41\x02\xfa\x41\x17\n\x15grafeas.io/Occurrence"{\n\x16ListOccurrencesRequest\x12*\n\x06parent\x18\x01 \x01(\tB\x1a\xe0\x41\x02\xfa\x41\x14\n\x12grafeas.io/Project\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"_\n\x17ListOccurrencesResponse\x12+\n\x0boccurrences\x18\x01 \x03(\x0b\x32\x16.grafeas.v1.Occurrence\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"F\n\x17\x44\x65leteOccurrenceRequest\x12+\n\x04name\x18\x01 \x01(\tB\x1d\xe0\x41\x02\xfa\x41\x17\n\x15grafeas.io/Occurrence"v\n\x17\x43reateOccurrenceRequest\x12*\n\x06parent\x18\x01 \x01(\tB\x1a\xe0\x41\x02\xfa\x41\x14\n\x12grafeas.io/Project\x12/\n\noccurrence\x18\x02 \x01(\x0b\x32\x16.grafeas.v1.OccurrenceB\x03\xe0\x41\x02"\xa8\x01\n\x17UpdateOccurrenceRequest\x12+\n\x04name\x18\x01 \x01(\tB\x1d\xe0\x41\x02\xfa\x41\x17\n\x15grafeas.io/Occurrence\x12/\n\noccurrence\x18\x02 \x01(\x0b\x32\x16.grafeas.v1.OccurrenceB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"7\n\x0eGetNoteRequest\x12%\n\x04name\x18\x01 \x01(\tB\x17\xe0\x41\x02\xfa\x41\x11\n\x0fgrafeas.io/Note"G\n\x18GetOccurrenceNoteRequest\x12+\n\x04name\x18\x01 \x01(\tB\x1d\xe0\x41\x02\xfa\x41\x17\n\x15grafeas.io/Occurrence"u\n\x10ListNotesRequest\x12*\n\x06parent\x18\x01 \x01(\tB\x1a\xe0\x41\x02\xfa\x41\x14\n\x12grafeas.io/Project\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"M\n\x11ListNotesResponse\x12\x1f\n\x05notes\x18\x01 \x03(\x0b\x32\x10.grafeas.v1.Note\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t":\n\x11\x44\x65leteNoteRequest\x12%\n\x04name\x18\x01 \x01(\tB\x17\xe0\x41\x02\xfa\x41\x11\n\x0fgrafeas.io/Note"z\n\x11\x43reateNoteRequest\x12*\n\x06parent\x18\x01 \x01(\tB\x1a\xe0\x41\x02\xfa\x41\x14\n\x12grafeas.io/Project\x12\x14\n\x07note_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12#\n\x04note\x18\x03 \x01(\x0b\x32\x10.grafeas.v1.NoteB\x03\xe0\x41\x02"\x90\x01\n\x11UpdateNoteRequest\x12%\n\x04name\x18\x01 \x01(\tB\x17\xe0\x41\x02\xfa\x41\x11\n\x0fgrafeas.io/Note\x12#\n\x04note\x18\x02 \x01(\x0b\x32\x10.grafeas.v1.NoteB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"z\n\x1aListNoteOccurrencesRequest\x12%\n\x04name\x18\x01 \x01(\tB\x17\xe0\x41\x02\xfa\x41\x11\n\x0fgrafeas.io/Note\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"c\n\x1bListNoteOccurrencesResponse\x12+\n\x0boccurrences\x18\x01 \x03(\x0b\x32\x16.grafeas.v1.Occurrence\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\xc9\x01\n\x17\x42\x61tchCreateNotesRequest\x12*\n\x06parent\x18\x01 \x01(\tB\x1a\xe0\x41\x02\xfa\x41\x14\n\x12grafeas.io/Project\x12\x42\n\x05notes\x18\x02 \x03(\x0b\x32..grafeas.v1.BatchCreateNotesRequest.NotesEntryB\x03\xe0\x41\x02\x1a>\n\nNotesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1f\n\x05value\x18\x02 \x01(\x0b\x32\x10.grafeas.v1.Note:\x02\x38\x01";\n\x18\x42\x61tchCreateNotesResponse\x12\x1f\n\x05notes\x18\x01 \x03(\x0b\x32\x10.grafeas.v1.Note"}\n\x1d\x42\x61tchCreateOccurrencesRequest\x12*\n\x06parent\x18\x01 \x01(\tB\x1a\xe0\x41\x02\xfa\x41\x14\n\x12grafeas.io/Project\x12\x30\n\x0boccurrences\x18\x02 \x03(\x0b\x32\x16.grafeas.v1.OccurrenceB\x03\xe0\x41\x02"M\n\x1e\x42\x61tchCreateOccurrencesResponse\x12+\n\x0boccurrences\x18\x01 \x03(\x0b\x32\x16.grafeas.v1.Occurrence2\x92\x10\n\x07Grafeas\x12}\n\rGetOccurrence\x12 .grafeas.v1.GetOccurrenceRequest\x1a\x16.grafeas.v1.Occurrence"2\x82\xd3\xe4\x93\x02%\x12#/v1/{name=projects/*/occurrences/*}\xda\x41\x04name\x12\x97\x01\n\x0fListOccurrences\x12".grafeas.v1.ListOccurrencesRequest\x1a#.grafeas.v1.ListOccurrencesResponse";\x82\xd3\xe4\x93\x02%\x12#/v1/{parent=projects/*}/occurrences\xda\x41\rparent,filter\x12\x83\x01\n\x10\x44\x65leteOccurrence\x12#.grafeas.v1.DeleteOccurrenceRequest\x1a\x16.google.protobuf.Empty"2\x82\xd3\xe4\x93\x02%*#/v1/{name=projects/*/occurrences/*}\xda\x41\x04name\x12\x9c\x01\n\x10\x43reateOccurrence\x12#.grafeas.v1.CreateOccurrenceRequest\x1a\x16.grafeas.v1.Occurrence"K\x82\xd3\xe4\x93\x02\x31"#/v1/{parent=projects/*}/occurrences:\noccurrence\xda\x41\x11parent,occurrence\x12\xc0\x01\n\x16\x42\x61tchCreateOccurrences\x12).grafeas.v1.BatchCreateOccurrencesRequest\x1a*.grafeas.v1.BatchCreateOccurrencesResponse"O\x82\xd3\xe4\x93\x02\x34"//v1/{parent=projects/*}/occurrences:batchCreate:\x01*\xda\x41\x12parent,occurrences\x12\xa6\x01\n\x10UpdateOccurrence\x12#.grafeas.v1.UpdateOccurrenceRequest\x1a\x16.grafeas.v1.Occurrence"U\x82\xd3\xe4\x93\x02\x31\x32#/v1/{name=projects/*/occurrences/*}:\noccurrence\xda\x41\x1bname,occurrence,update_mask\x12\x85\x01\n\x11GetOccurrenceNote\x12$.grafeas.v1.GetOccurrenceNoteRequest\x1a\x10.grafeas.v1.Note"8\x82\xd3\xe4\x93\x02+\x12)/v1/{name=projects/*/occurrences/*}/notes\xda\x41\x04name\x12\x65\n\x07GetNote\x12\x1a.grafeas.v1.GetNoteRequest\x1a\x10.grafeas.v1.Note",\x82\xd3\xe4\x93\x02\x1f\x12\x1d/v1/{name=projects/*/notes/*}\xda\x41\x04name\x12\x7f\n\tListNotes\x12\x1c.grafeas.v1.ListNotesRequest\x1a\x1d.grafeas.v1.ListNotesResponse"5\x82\xd3\xe4\x93\x02\x1f\x12\x1d/v1/{parent=projects/*}/notes\xda\x41\rparent,filter\x12q\n\nDeleteNote\x12\x1d.grafeas.v1.DeleteNoteRequest\x1a\x16.google.protobuf.Empty",\x82\xd3\xe4\x93\x02\x1f*\x1d/v1/{name=projects/*/notes/*}\xda\x41\x04name\x12\x80\x01\n\nCreateNote\x12\x1d.grafeas.v1.CreateNoteRequest\x1a\x10.grafeas.v1.Note"A\x82\xd3\xe4\x93\x02%"\x1d/v1/{parent=projects/*}/notes:\x04note\xda\x41\x13parent,note_id,note\x12\xa2\x01\n\x10\x42\x61tchCreateNotes\x12#.grafeas.v1.BatchCreateNotesRequest\x1a$.grafeas.v1.BatchCreateNotesResponse"C\x82\xd3\xe4\x93\x02.")/v1/{parent=projects/*}/notes:batchCreate:\x01*\xda\x41\x0cparent,notes\x12\x82\x01\n\nUpdateNote\x12\x1d.grafeas.v1.UpdateNoteRequest\x1a\x10.grafeas.v1.Note"C\x82\xd3\xe4\x93\x02%2\x1d/v1/{name=projects/*/notes/*}:\x04note\xda\x41\x15name,note,update_mask\x12\xa7\x01\n\x13ListNoteOccurrences\x12&.grafeas.v1.ListNoteOccurrencesRequest\x1a\'.grafeas.v1.ListNoteOccurrencesResponse"?\x82\xd3\xe4\x93\x02+\x12)/v1/{name=projects/*/notes/*}/occurrences\xda\x41\x0bname,filter\x1a#\xca\x41 containeranalysis.googleapis.comB|\n\rio.grafeas.v1P\x01Z8google.golang.org/genproto/googleapis/grafeas/v1;grafeas\xa2\x02\x03GRA\xea\x41(\n\x12grafeas.io/Project\x12\x12projects/{project}b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_api_dot_resource__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - grafeas__v1_dot_proto_dot_attestation__pb2.DESCRIPTOR, - grafeas__v1_dot_proto_dot_build__pb2.DESCRIPTOR, - grafeas__v1_dot_proto_dot_common__pb2.DESCRIPTOR, - grafeas__v1_dot_proto_dot_deployment__pb2.DESCRIPTOR, - grafeas__v1_dot_proto_dot_discovery__pb2.DESCRIPTOR, - grafeas__v1_dot_proto_dot_image__pb2.DESCRIPTOR, - grafeas__v1_dot_proto_dot_package__pb2.DESCRIPTOR, - grafeas__v1_dot_proto_dot_upgrade__pb2.DESCRIPTOR, - grafeas__v1_dot_proto_dot_vulnerability__pb2.DESCRIPTOR, - ], -) - - -_OCCURRENCE = _descriptor.Descriptor( - name="Occurrence", - full_name="grafeas.v1.Occurrence", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="grafeas.v1.Occurrence.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="resource_uri", - full_name="grafeas.v1.Occurrence.resource_uri", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="note_name", - full_name="grafeas.v1.Occurrence.note_name", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="kind", - full_name="grafeas.v1.Occurrence.kind", - index=3, - number=4, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="remediation", - full_name="grafeas.v1.Occurrence.remediation", - index=4, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="create_time", - full_name="grafeas.v1.Occurrence.create_time", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_time", - full_name="grafeas.v1.Occurrence.update_time", - index=6, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="vulnerability", - full_name="grafeas.v1.Occurrence.vulnerability", - index=7, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="build", - full_name="grafeas.v1.Occurrence.build", - index=8, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="image", - full_name="grafeas.v1.Occurrence.image", - index=9, - number=10, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="package", - full_name="grafeas.v1.Occurrence.package", - index=10, - number=11, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="deployment", - full_name="grafeas.v1.Occurrence.deployment", - index=11, - number=12, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="discovery", - full_name="grafeas.v1.Occurrence.discovery", - index=12, - number=13, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="attestation", - full_name="grafeas.v1.Occurrence.attestation", - index=13, - number=14, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="upgrade", - full_name="grafeas.v1.Occurrence.upgrade", - index=14, - number=15, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b( - "\352AD\n\025grafeas.io/Occurrence\022+projects/{project}/occurrences/{occurrence}" - ), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="details", - full_name="grafeas.v1.Occurrence.details", - index=0, - containing_type=None, - fields=[], - ), - ], - serialized_start=556, - serialized_end=1284, -) - - -_NOTE = _descriptor.Descriptor( - name="Note", - full_name="grafeas.v1.Note", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="grafeas.v1.Note.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="short_description", - full_name="grafeas.v1.Note.short_description", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="long_description", - full_name="grafeas.v1.Note.long_description", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="kind", - full_name="grafeas.v1.Note.kind", - index=3, - number=4, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="related_url", - full_name="grafeas.v1.Note.related_url", - index=4, - number=5, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="expiration_time", - full_name="grafeas.v1.Note.expiration_time", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="create_time", - full_name="grafeas.v1.Note.create_time", - index=6, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_time", - full_name="grafeas.v1.Note.update_time", - index=7, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="related_note_names", - full_name="grafeas.v1.Note.related_note_names", - index=8, - number=9, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="vulnerability", - full_name="grafeas.v1.Note.vulnerability", - index=9, - number=10, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="build", - full_name="grafeas.v1.Note.build", - index=10, - number=11, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="image", - full_name="grafeas.v1.Note.image", - index=11, - number=12, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="package", - full_name="grafeas.v1.Note.package", - index=12, - number=13, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="deployment", - full_name="grafeas.v1.Note.deployment", - index=13, - number=14, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="discovery", - full_name="grafeas.v1.Note.discovery", - index=14, - number=15, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="attestation", - full_name="grafeas.v1.Note.attestation", - index=15, - number=16, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="upgrade", - full_name="grafeas.v1.Note.upgrade", - index=16, - number=17, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b( - "\352A2\n\017grafeas.io/Note\022\037projects/{project}/notes/{note}" - ), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="type", - full_name="grafeas.v1.Note.type", - index=0, - containing_type=None, - fields=[], - ), - ], - serialized_start=1287, - serialized_end=2057, -) - - -_GETOCCURRENCEREQUEST = _descriptor.Descriptor( - name="GetOccurrenceRequest", - full_name="grafeas.v1.GetOccurrenceRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="grafeas.v1.GetOccurrenceRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002\372A\027\n\025grafeas.io/Occurrence"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2059, - serialized_end=2126, -) - - -_LISTOCCURRENCESREQUEST = _descriptor.Descriptor( - name="ListOccurrencesRequest", - full_name="grafeas.v1.ListOccurrencesRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="grafeas.v1.ListOccurrencesRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002\372A\024\n\022grafeas.io/Project"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="grafeas.v1.ListOccurrencesRequest.filter", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="grafeas.v1.ListOccurrencesRequest.page_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="grafeas.v1.ListOccurrencesRequest.page_token", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2128, - serialized_end=2251, -) - - -_LISTOCCURRENCESRESPONSE = _descriptor.Descriptor( - name="ListOccurrencesResponse", - full_name="grafeas.v1.ListOccurrencesResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="occurrences", - full_name="grafeas.v1.ListOccurrencesResponse.occurrences", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="grafeas.v1.ListOccurrencesResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2253, - serialized_end=2348, -) - - -_DELETEOCCURRENCEREQUEST = _descriptor.Descriptor( - name="DeleteOccurrenceRequest", - full_name="grafeas.v1.DeleteOccurrenceRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="grafeas.v1.DeleteOccurrenceRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002\372A\027\n\025grafeas.io/Occurrence"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2350, - serialized_end=2420, -) - - -_CREATEOCCURRENCEREQUEST = _descriptor.Descriptor( - name="CreateOccurrenceRequest", - full_name="grafeas.v1.CreateOccurrenceRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="grafeas.v1.CreateOccurrenceRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002\372A\024\n\022grafeas.io/Project"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="occurrence", - full_name="grafeas.v1.CreateOccurrenceRequest.occurrence", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2422, - serialized_end=2540, -) - - -_UPDATEOCCURRENCEREQUEST = _descriptor.Descriptor( - name="UpdateOccurrenceRequest", - full_name="grafeas.v1.UpdateOccurrenceRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="grafeas.v1.UpdateOccurrenceRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002\372A\027\n\025grafeas.io/Occurrence"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="occurrence", - full_name="grafeas.v1.UpdateOccurrenceRequest.occurrence", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="grafeas.v1.UpdateOccurrenceRequest.update_mask", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2543, - serialized_end=2711, -) - - -_GETNOTEREQUEST = _descriptor.Descriptor( - name="GetNoteRequest", - full_name="grafeas.v1.GetNoteRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="grafeas.v1.GetNoteRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002\372A\021\n\017grafeas.io/Note"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2713, - serialized_end=2768, -) - - -_GETOCCURRENCENOTEREQUEST = _descriptor.Descriptor( - name="GetOccurrenceNoteRequest", - full_name="grafeas.v1.GetOccurrenceNoteRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="grafeas.v1.GetOccurrenceNoteRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002\372A\027\n\025grafeas.io/Occurrence"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2770, - serialized_end=2841, -) - - -_LISTNOTESREQUEST = _descriptor.Descriptor( - name="ListNotesRequest", - full_name="grafeas.v1.ListNotesRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="grafeas.v1.ListNotesRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002\372A\024\n\022grafeas.io/Project"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="grafeas.v1.ListNotesRequest.filter", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="grafeas.v1.ListNotesRequest.page_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="grafeas.v1.ListNotesRequest.page_token", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2843, - serialized_end=2960, -) - - -_LISTNOTESRESPONSE = _descriptor.Descriptor( - name="ListNotesResponse", - full_name="grafeas.v1.ListNotesResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="notes", - full_name="grafeas.v1.ListNotesResponse.notes", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="grafeas.v1.ListNotesResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2962, - serialized_end=3039, -) - - -_DELETENOTEREQUEST = _descriptor.Descriptor( - name="DeleteNoteRequest", - full_name="grafeas.v1.DeleteNoteRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="grafeas.v1.DeleteNoteRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002\372A\021\n\017grafeas.io/Note"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3041, - serialized_end=3099, -) - - -_CREATENOTEREQUEST = _descriptor.Descriptor( - name="CreateNoteRequest", - full_name="grafeas.v1.CreateNoteRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="grafeas.v1.CreateNoteRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002\372A\024\n\022grafeas.io/Project"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="note_id", - full_name="grafeas.v1.CreateNoteRequest.note_id", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="note", - full_name="grafeas.v1.CreateNoteRequest.note", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3101, - serialized_end=3223, -) - - -_UPDATENOTEREQUEST = _descriptor.Descriptor( - name="UpdateNoteRequest", - full_name="grafeas.v1.UpdateNoteRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="grafeas.v1.UpdateNoteRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002\372A\021\n\017grafeas.io/Note"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="note", - full_name="grafeas.v1.UpdateNoteRequest.note", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="grafeas.v1.UpdateNoteRequest.update_mask", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3226, - serialized_end=3370, -) - - -_LISTNOTEOCCURRENCESREQUEST = _descriptor.Descriptor( - name="ListNoteOccurrencesRequest", - full_name="grafeas.v1.ListNoteOccurrencesRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="grafeas.v1.ListNoteOccurrencesRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002\372A\021\n\017grafeas.io/Note"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="grafeas.v1.ListNoteOccurrencesRequest.filter", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="grafeas.v1.ListNoteOccurrencesRequest.page_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="grafeas.v1.ListNoteOccurrencesRequest.page_token", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3372, - serialized_end=3494, -) - - -_LISTNOTEOCCURRENCESRESPONSE = _descriptor.Descriptor( - name="ListNoteOccurrencesResponse", - full_name="grafeas.v1.ListNoteOccurrencesResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="occurrences", - full_name="grafeas.v1.ListNoteOccurrencesResponse.occurrences", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="grafeas.v1.ListNoteOccurrencesResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3496, - serialized_end=3595, -) - - -_BATCHCREATENOTESREQUEST_NOTESENTRY = _descriptor.Descriptor( - name="NotesEntry", - full_name="grafeas.v1.BatchCreateNotesRequest.NotesEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="grafeas.v1.BatchCreateNotesRequest.NotesEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="grafeas.v1.BatchCreateNotesRequest.NotesEntry.value", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3737, - serialized_end=3799, -) - -_BATCHCREATENOTESREQUEST = _descriptor.Descriptor( - name="BatchCreateNotesRequest", - full_name="grafeas.v1.BatchCreateNotesRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="grafeas.v1.BatchCreateNotesRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002\372A\024\n\022grafeas.io/Project"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="notes", - full_name="grafeas.v1.BatchCreateNotesRequest.notes", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_BATCHCREATENOTESREQUEST_NOTESENTRY,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3598, - serialized_end=3799, -) - - -_BATCHCREATENOTESRESPONSE = _descriptor.Descriptor( - name="BatchCreateNotesResponse", - full_name="grafeas.v1.BatchCreateNotesResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="notes", - full_name="grafeas.v1.BatchCreateNotesResponse.notes", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3801, - serialized_end=3860, -) - - -_BATCHCREATEOCCURRENCESREQUEST = _descriptor.Descriptor( - name="BatchCreateOccurrencesRequest", - full_name="grafeas.v1.BatchCreateOccurrencesRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="grafeas.v1.BatchCreateOccurrencesRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002\372A\024\n\022grafeas.io/Project"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="occurrences", - full_name="grafeas.v1.BatchCreateOccurrencesRequest.occurrences", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3862, - serialized_end=3987, -) - - -_BATCHCREATEOCCURRENCESRESPONSE = _descriptor.Descriptor( - name="BatchCreateOccurrencesResponse", - full_name="grafeas.v1.BatchCreateOccurrencesResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="occurrences", - full_name="grafeas.v1.BatchCreateOccurrencesResponse.occurrences", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3989, - serialized_end=4066, -) - -_OCCURRENCE.fields_by_name[ - "kind" -].enum_type = grafeas__v1_dot_proto_dot_common__pb2._NOTEKIND -_OCCURRENCE.fields_by_name[ - "create_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_OCCURRENCE.fields_by_name[ - "update_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_OCCURRENCE.fields_by_name[ - "vulnerability" -].message_type = grafeas__v1_dot_proto_dot_vulnerability__pb2._VULNERABILITYOCCURRENCE -_OCCURRENCE.fields_by_name[ - "build" -].message_type = grafeas__v1_dot_proto_dot_build__pb2._BUILDOCCURRENCE -_OCCURRENCE.fields_by_name[ - "image" -].message_type = grafeas__v1_dot_proto_dot_image__pb2._IMAGEOCCURRENCE -_OCCURRENCE.fields_by_name[ - "package" -].message_type = grafeas__v1_dot_proto_dot_package__pb2._PACKAGEOCCURRENCE -_OCCURRENCE.fields_by_name[ - "deployment" -].message_type = grafeas__v1_dot_proto_dot_deployment__pb2._DEPLOYMENTOCCURRENCE -_OCCURRENCE.fields_by_name[ - "discovery" -].message_type = grafeas__v1_dot_proto_dot_discovery__pb2._DISCOVERYOCCURRENCE -_OCCURRENCE.fields_by_name[ - "attestation" -].message_type = grafeas__v1_dot_proto_dot_attestation__pb2._ATTESTATIONOCCURRENCE -_OCCURRENCE.fields_by_name[ - "upgrade" -].message_type = grafeas__v1_dot_proto_dot_upgrade__pb2._UPGRADEOCCURRENCE -_OCCURRENCE.oneofs_by_name["details"].fields.append( - _OCCURRENCE.fields_by_name["vulnerability"] -) -_OCCURRENCE.fields_by_name[ - "vulnerability" -].containing_oneof = _OCCURRENCE.oneofs_by_name["details"] -_OCCURRENCE.oneofs_by_name["details"].fields.append(_OCCURRENCE.fields_by_name["build"]) -_OCCURRENCE.fields_by_name["build"].containing_oneof = _OCCURRENCE.oneofs_by_name[ - "details" -] -_OCCURRENCE.oneofs_by_name["details"].fields.append(_OCCURRENCE.fields_by_name["image"]) -_OCCURRENCE.fields_by_name["image"].containing_oneof = _OCCURRENCE.oneofs_by_name[ - "details" -] -_OCCURRENCE.oneofs_by_name["details"].fields.append( - _OCCURRENCE.fields_by_name["package"] -) -_OCCURRENCE.fields_by_name["package"].containing_oneof = _OCCURRENCE.oneofs_by_name[ - "details" -] -_OCCURRENCE.oneofs_by_name["details"].fields.append( - _OCCURRENCE.fields_by_name["deployment"] -) -_OCCURRENCE.fields_by_name["deployment"].containing_oneof = _OCCURRENCE.oneofs_by_name[ - "details" -] -_OCCURRENCE.oneofs_by_name["details"].fields.append( - _OCCURRENCE.fields_by_name["discovery"] -) -_OCCURRENCE.fields_by_name["discovery"].containing_oneof = _OCCURRENCE.oneofs_by_name[ - "details" -] -_OCCURRENCE.oneofs_by_name["details"].fields.append( - _OCCURRENCE.fields_by_name["attestation"] -) -_OCCURRENCE.fields_by_name["attestation"].containing_oneof = _OCCURRENCE.oneofs_by_name[ - "details" -] -_OCCURRENCE.oneofs_by_name["details"].fields.append( - _OCCURRENCE.fields_by_name["upgrade"] -) -_OCCURRENCE.fields_by_name["upgrade"].containing_oneof = _OCCURRENCE.oneofs_by_name[ - "details" -] -_NOTE.fields_by_name["kind"].enum_type = grafeas__v1_dot_proto_dot_common__pb2._NOTEKIND -_NOTE.fields_by_name[ - "related_url" -].message_type = grafeas__v1_dot_proto_dot_common__pb2._RELATEDURL -_NOTE.fields_by_name[ - "expiration_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_NOTE.fields_by_name[ - "create_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_NOTE.fields_by_name[ - "update_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_NOTE.fields_by_name[ - "vulnerability" -].message_type = grafeas__v1_dot_proto_dot_vulnerability__pb2._VULNERABILITYNOTE -_NOTE.fields_by_name[ - "build" -].message_type = grafeas__v1_dot_proto_dot_build__pb2._BUILDNOTE -_NOTE.fields_by_name[ - "image" -].message_type = grafeas__v1_dot_proto_dot_image__pb2._IMAGENOTE -_NOTE.fields_by_name[ - "package" -].message_type = grafeas__v1_dot_proto_dot_package__pb2._PACKAGENOTE -_NOTE.fields_by_name[ - "deployment" -].message_type = grafeas__v1_dot_proto_dot_deployment__pb2._DEPLOYMENTNOTE -_NOTE.fields_by_name[ - "discovery" -].message_type = grafeas__v1_dot_proto_dot_discovery__pb2._DISCOVERYNOTE -_NOTE.fields_by_name[ - "attestation" -].message_type = grafeas__v1_dot_proto_dot_attestation__pb2._ATTESTATIONNOTE -_NOTE.fields_by_name[ - "upgrade" -].message_type = grafeas__v1_dot_proto_dot_upgrade__pb2._UPGRADENOTE -_NOTE.oneofs_by_name["type"].fields.append(_NOTE.fields_by_name["vulnerability"]) -_NOTE.fields_by_name["vulnerability"].containing_oneof = _NOTE.oneofs_by_name["type"] -_NOTE.oneofs_by_name["type"].fields.append(_NOTE.fields_by_name["build"]) -_NOTE.fields_by_name["build"].containing_oneof = _NOTE.oneofs_by_name["type"] -_NOTE.oneofs_by_name["type"].fields.append(_NOTE.fields_by_name["image"]) -_NOTE.fields_by_name["image"].containing_oneof = _NOTE.oneofs_by_name["type"] -_NOTE.oneofs_by_name["type"].fields.append(_NOTE.fields_by_name["package"]) -_NOTE.fields_by_name["package"].containing_oneof = _NOTE.oneofs_by_name["type"] -_NOTE.oneofs_by_name["type"].fields.append(_NOTE.fields_by_name["deployment"]) -_NOTE.fields_by_name["deployment"].containing_oneof = _NOTE.oneofs_by_name["type"] -_NOTE.oneofs_by_name["type"].fields.append(_NOTE.fields_by_name["discovery"]) -_NOTE.fields_by_name["discovery"].containing_oneof = _NOTE.oneofs_by_name["type"] -_NOTE.oneofs_by_name["type"].fields.append(_NOTE.fields_by_name["attestation"]) -_NOTE.fields_by_name["attestation"].containing_oneof = _NOTE.oneofs_by_name["type"] -_NOTE.oneofs_by_name["type"].fields.append(_NOTE.fields_by_name["upgrade"]) -_NOTE.fields_by_name["upgrade"].containing_oneof = _NOTE.oneofs_by_name["type"] -_LISTOCCURRENCESRESPONSE.fields_by_name["occurrences"].message_type = _OCCURRENCE -_CREATEOCCURRENCEREQUEST.fields_by_name["occurrence"].message_type = _OCCURRENCE -_UPDATEOCCURRENCEREQUEST.fields_by_name["occurrence"].message_type = _OCCURRENCE -_UPDATEOCCURRENCEREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_LISTNOTESRESPONSE.fields_by_name["notes"].message_type = _NOTE -_CREATENOTEREQUEST.fields_by_name["note"].message_type = _NOTE -_UPDATENOTEREQUEST.fields_by_name["note"].message_type = _NOTE -_UPDATENOTEREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_LISTNOTEOCCURRENCESRESPONSE.fields_by_name["occurrences"].message_type = _OCCURRENCE -_BATCHCREATENOTESREQUEST_NOTESENTRY.fields_by_name["value"].message_type = _NOTE -_BATCHCREATENOTESREQUEST_NOTESENTRY.containing_type = _BATCHCREATENOTESREQUEST -_BATCHCREATENOTESREQUEST.fields_by_name[ - "notes" -].message_type = _BATCHCREATENOTESREQUEST_NOTESENTRY -_BATCHCREATENOTESRESPONSE.fields_by_name["notes"].message_type = _NOTE -_BATCHCREATEOCCURRENCESREQUEST.fields_by_name["occurrences"].message_type = _OCCURRENCE -_BATCHCREATEOCCURRENCESRESPONSE.fields_by_name["occurrences"].message_type = _OCCURRENCE -DESCRIPTOR.message_types_by_name["Occurrence"] = _OCCURRENCE -DESCRIPTOR.message_types_by_name["Note"] = _NOTE -DESCRIPTOR.message_types_by_name["GetOccurrenceRequest"] = _GETOCCURRENCEREQUEST -DESCRIPTOR.message_types_by_name["ListOccurrencesRequest"] = _LISTOCCURRENCESREQUEST -DESCRIPTOR.message_types_by_name["ListOccurrencesResponse"] = _LISTOCCURRENCESRESPONSE -DESCRIPTOR.message_types_by_name["DeleteOccurrenceRequest"] = _DELETEOCCURRENCEREQUEST -DESCRIPTOR.message_types_by_name["CreateOccurrenceRequest"] = _CREATEOCCURRENCEREQUEST -DESCRIPTOR.message_types_by_name["UpdateOccurrenceRequest"] = _UPDATEOCCURRENCEREQUEST -DESCRIPTOR.message_types_by_name["GetNoteRequest"] = _GETNOTEREQUEST -DESCRIPTOR.message_types_by_name["GetOccurrenceNoteRequest"] = _GETOCCURRENCENOTEREQUEST -DESCRIPTOR.message_types_by_name["ListNotesRequest"] = _LISTNOTESREQUEST -DESCRIPTOR.message_types_by_name["ListNotesResponse"] = _LISTNOTESRESPONSE -DESCRIPTOR.message_types_by_name["DeleteNoteRequest"] = _DELETENOTEREQUEST -DESCRIPTOR.message_types_by_name["CreateNoteRequest"] = _CREATENOTEREQUEST -DESCRIPTOR.message_types_by_name["UpdateNoteRequest"] = _UPDATENOTEREQUEST -DESCRIPTOR.message_types_by_name[ - "ListNoteOccurrencesRequest" -] = _LISTNOTEOCCURRENCESREQUEST -DESCRIPTOR.message_types_by_name[ - "ListNoteOccurrencesResponse" -] = _LISTNOTEOCCURRENCESRESPONSE -DESCRIPTOR.message_types_by_name["BatchCreateNotesRequest"] = _BATCHCREATENOTESREQUEST -DESCRIPTOR.message_types_by_name["BatchCreateNotesResponse"] = _BATCHCREATENOTESRESPONSE -DESCRIPTOR.message_types_by_name[ - "BatchCreateOccurrencesRequest" -] = _BATCHCREATEOCCURRENCESREQUEST -DESCRIPTOR.message_types_by_name[ - "BatchCreateOccurrencesResponse" -] = _BATCHCREATEOCCURRENCESRESPONSE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Occurrence = _reflection.GeneratedProtocolMessageType( - "Occurrence", - (_message.Message,), - dict( - DESCRIPTOR=_OCCURRENCE, - __module__="grafeas_v1.proto.grafeas_pb2", - __doc__="""An instance of an analysis type that has been found on a - resource. - - - Attributes: - name: - Output only. The name of the occurrence in the form of - ``projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]``. - resource_uri: - Required. Immutable. A URI that represents the resource for - which the occurrence applies. For example, - ``https://gcr.io/project/image@sha256:123abc`` for a Docker - image. - note_name: - Required. Immutable. The analysis note associated with this - occurrence, in the form of - ``projects/[PROVIDER_ID]/notes/[NOTE_ID]``. This field can be - used as a filter in list requests. - kind: - Output only. This explicitly denotes which of the occurrence - details are specified. This field can be used as a filter in - list requests. - remediation: - A description of actions that can be taken to remedy the note. - create_time: - Output only. The time this occurrence was created. - update_time: - Output only. The time this occurrence was last updated. - details: - Required. Immutable. Describes the details of the note kind - found on this resource. - vulnerability: - Describes a security vulnerability. - build: - Describes a verifiable build. - image: - Describes how this resource derives from the basis in the - associated note. - package: - Describes the installation of a package on the linked - resource. - deployment: - Describes the deployment of an artifact on a runtime. - discovery: - Describes when a resource was discovered. - attestation: - Describes an attestation of an artifact. - upgrade: - Describes an available package upgrade on the linked resource. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.Occurrence) - ), -) -_sym_db.RegisterMessage(Occurrence) - -Note = _reflection.GeneratedProtocolMessageType( - "Note", - (_message.Message,), - dict( - DESCRIPTOR=_NOTE, - __module__="grafeas_v1.proto.grafeas_pb2", - __doc__="""A type of analysis that can be done for a resource. - - - Attributes: - name: - Output only. The name of the note in the form of - ``projects/[PROVIDER_ID]/notes/[NOTE_ID]``. - short_description: - A one sentence description of this note. - long_description: - A detailed description of this note. - kind: - Output only. The type of analysis. This field can be used as a - filter in list requests. - related_url: - URLs associated with this note. - expiration_time: - Time of expiration for this note. Empty if note does not - expire. - create_time: - Output only. The time this note was created. This field can be - used as a filter in list requests. - update_time: - Output only. The time this note was last updated. This field - can be used as a filter in list requests. - related_note_names: - Other notes related to this note. - type: - Required. Immutable. The type of analysis this note - represents. - vulnerability: - A note describing a package vulnerability. - build: - A note describing build provenance for a verifiable build. - image: - A note describing a base image. - package: - A note describing a package hosted by various package - managers. - deployment: - A note describing something that can be deployed. - discovery: - A note describing the initial analysis of a resource. - attestation: - A note describing an attestation role. - upgrade: - A note describing available package upgrades. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.Note) - ), -) -_sym_db.RegisterMessage(Note) - -GetOccurrenceRequest = _reflection.GeneratedProtocolMessageType( - "GetOccurrenceRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETOCCURRENCEREQUEST, - __module__="grafeas_v1.proto.grafeas_pb2", - __doc__="""Request to get an occurrence. - - - Attributes: - name: - The name of the occurrence in the form of - ``projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]``. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.GetOccurrenceRequest) - ), -) -_sym_db.RegisterMessage(GetOccurrenceRequest) - -ListOccurrencesRequest = _reflection.GeneratedProtocolMessageType( - "ListOccurrencesRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTOCCURRENCESREQUEST, - __module__="grafeas_v1.proto.grafeas_pb2", - __doc__="""Request to list occurrences. - - - Attributes: - parent: - The name of the project to list occurrences for in the form of - ``projects/[PROJECT_ID]``. - filter: - The filter expression. - page_size: - Number of occurrences to return in the list. Must be positive. - Max allowed page size is 1000. If not specified, page size - defaults to 20. - page_token: - Token to provide to skip to a particular spot in the list. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.ListOccurrencesRequest) - ), -) -_sym_db.RegisterMessage(ListOccurrencesRequest) - -ListOccurrencesResponse = _reflection.GeneratedProtocolMessageType( - "ListOccurrencesResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTOCCURRENCESRESPONSE, - __module__="grafeas_v1.proto.grafeas_pb2", - __doc__="""Response for listing occurrences. - - - Attributes: - occurrences: - The occurrences requested. - next_page_token: - The next pagination token in the list response. It should be - used as ``page_token`` for the following request. An empty - value means no more results. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.ListOccurrencesResponse) - ), -) -_sym_db.RegisterMessage(ListOccurrencesResponse) - -DeleteOccurrenceRequest = _reflection.GeneratedProtocolMessageType( - "DeleteOccurrenceRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETEOCCURRENCEREQUEST, - __module__="grafeas_v1.proto.grafeas_pb2", - __doc__="""Request to delete an occurrence. - - - Attributes: - name: - The name of the occurrence in the form of - ``projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]``. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.DeleteOccurrenceRequest) - ), -) -_sym_db.RegisterMessage(DeleteOccurrenceRequest) - -CreateOccurrenceRequest = _reflection.GeneratedProtocolMessageType( - "CreateOccurrenceRequest", - (_message.Message,), - dict( - DESCRIPTOR=_CREATEOCCURRENCEREQUEST, - __module__="grafeas_v1.proto.grafeas_pb2", - __doc__="""Request to create a new occurrence. - - - Attributes: - parent: - The name of the project in the form of - ``projects/[PROJECT_ID]``, under which the occurrence is to be - created. - occurrence: - The occurrence to create. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.CreateOccurrenceRequest) - ), -) -_sym_db.RegisterMessage(CreateOccurrenceRequest) - -UpdateOccurrenceRequest = _reflection.GeneratedProtocolMessageType( - "UpdateOccurrenceRequest", - (_message.Message,), - dict( - DESCRIPTOR=_UPDATEOCCURRENCEREQUEST, - __module__="grafeas_v1.proto.grafeas_pb2", - __doc__="""Request to update an occurrence. - - - Attributes: - name: - The name of the occurrence in the form of - ``projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]``. - occurrence: - The updated occurrence. - update_mask: - The fields to update. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.UpdateOccurrenceRequest) - ), -) -_sym_db.RegisterMessage(UpdateOccurrenceRequest) - -GetNoteRequest = _reflection.GeneratedProtocolMessageType( - "GetNoteRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETNOTEREQUEST, - __module__="grafeas_v1.proto.grafeas_pb2", - __doc__="""Request to get a note. - - - Attributes: - name: - The name of the note in the form of - ``projects/[PROVIDER_ID]/notes/[NOTE_ID]``. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.GetNoteRequest) - ), -) -_sym_db.RegisterMessage(GetNoteRequest) - -GetOccurrenceNoteRequest = _reflection.GeneratedProtocolMessageType( - "GetOccurrenceNoteRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETOCCURRENCENOTEREQUEST, - __module__="grafeas_v1.proto.grafeas_pb2", - __doc__="""Request to get the note to which the specified occurrence - is attached. - - - Attributes: - name: - The name of the occurrence in the form of - ``projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]``. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.GetOccurrenceNoteRequest) - ), -) -_sym_db.RegisterMessage(GetOccurrenceNoteRequest) - -ListNotesRequest = _reflection.GeneratedProtocolMessageType( - "ListNotesRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTNOTESREQUEST, - __module__="grafeas_v1.proto.grafeas_pb2", - __doc__="""Request to list notes. - - - Attributes: - parent: - The name of the project to list notes for in the form of - ``projects/[PROJECT_ID]``. - filter: - The filter expression. - page_size: - Number of notes to return in the list. Must be positive. Max - allowed page size is 1000. If not specified, page size - defaults to 20. - page_token: - Token to provide to skip to a particular spot in the list. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.ListNotesRequest) - ), -) -_sym_db.RegisterMessage(ListNotesRequest) - -ListNotesResponse = _reflection.GeneratedProtocolMessageType( - "ListNotesResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTNOTESRESPONSE, - __module__="grafeas_v1.proto.grafeas_pb2", - __doc__="""Response for listing notes. - - - Attributes: - notes: - The notes requested. - next_page_token: - The next pagination token in the list response. It should be - used as ``page_token`` for the following request. An empty - value means no more results. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.ListNotesResponse) - ), -) -_sym_db.RegisterMessage(ListNotesResponse) - -DeleteNoteRequest = _reflection.GeneratedProtocolMessageType( - "DeleteNoteRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETENOTEREQUEST, - __module__="grafeas_v1.proto.grafeas_pb2", - __doc__="""Request to delete a note. - - - Attributes: - name: - The name of the note in the form of - ``projects/[PROVIDER_ID]/notes/[NOTE_ID]``. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.DeleteNoteRequest) - ), -) -_sym_db.RegisterMessage(DeleteNoteRequest) - -CreateNoteRequest = _reflection.GeneratedProtocolMessageType( - "CreateNoteRequest", - (_message.Message,), - dict( - DESCRIPTOR=_CREATENOTEREQUEST, - __module__="grafeas_v1.proto.grafeas_pb2", - __doc__="""Request to create a new note. - - - Attributes: - parent: - The name of the project in the form of - ``projects/[PROJECT_ID]``, under which the note is to be - created. - note_id: - The ID to use for this note. - note: - The note to create. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.CreateNoteRequest) - ), -) -_sym_db.RegisterMessage(CreateNoteRequest) - -UpdateNoteRequest = _reflection.GeneratedProtocolMessageType( - "UpdateNoteRequest", - (_message.Message,), - dict( - DESCRIPTOR=_UPDATENOTEREQUEST, - __module__="grafeas_v1.proto.grafeas_pb2", - __doc__="""Request to update a note. - - - Attributes: - name: - The name of the note in the form of - ``projects/[PROVIDER_ID]/notes/[NOTE_ID]``. - note: - The updated note. - update_mask: - The fields to update. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.UpdateNoteRequest) - ), -) -_sym_db.RegisterMessage(UpdateNoteRequest) - -ListNoteOccurrencesRequest = _reflection.GeneratedProtocolMessageType( - "ListNoteOccurrencesRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTNOTEOCCURRENCESREQUEST, - __module__="grafeas_v1.proto.grafeas_pb2", - __doc__="""Request to list occurrences for a note. - - - Attributes: - name: - The name of the note to list occurrences for in the form of - ``projects/[PROVIDER_ID]/notes/[NOTE_ID]``. - filter: - The filter expression. - page_size: - Number of occurrences to return in the list. - page_token: - Token to provide to skip to a particular spot in the list. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.ListNoteOccurrencesRequest) - ), -) -_sym_db.RegisterMessage(ListNoteOccurrencesRequest) - -ListNoteOccurrencesResponse = _reflection.GeneratedProtocolMessageType( - "ListNoteOccurrencesResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTNOTEOCCURRENCESRESPONSE, - __module__="grafeas_v1.proto.grafeas_pb2", - __doc__="""Response for listing occurrences for a note. - - - Attributes: - occurrences: - The occurrences attached to the specified note. - next_page_token: - Token to provide to skip to a particular spot in the list. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.ListNoteOccurrencesResponse) - ), -) -_sym_db.RegisterMessage(ListNoteOccurrencesResponse) - -BatchCreateNotesRequest = _reflection.GeneratedProtocolMessageType( - "BatchCreateNotesRequest", - (_message.Message,), - dict( - NotesEntry=_reflection.GeneratedProtocolMessageType( - "NotesEntry", - (_message.Message,), - dict( - DESCRIPTOR=_BATCHCREATENOTESREQUEST_NOTESENTRY, - __module__="grafeas_v1.proto.grafeas_pb2" - # @@protoc_insertion_point(class_scope:grafeas.v1.BatchCreateNotesRequest.NotesEntry) - ), - ), - DESCRIPTOR=_BATCHCREATENOTESREQUEST, - __module__="grafeas_v1.proto.grafeas_pb2", - __doc__="""Request to create notes in batch. - - - Attributes: - parent: - The name of the project in the form of - ``projects/[PROJECT_ID]``, under which the notes are to be - created. - notes: - The notes to create. Max allowed length is 1000. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.BatchCreateNotesRequest) - ), -) -_sym_db.RegisterMessage(BatchCreateNotesRequest) -_sym_db.RegisterMessage(BatchCreateNotesRequest.NotesEntry) - -BatchCreateNotesResponse = _reflection.GeneratedProtocolMessageType( - "BatchCreateNotesResponse", - (_message.Message,), - dict( - DESCRIPTOR=_BATCHCREATENOTESRESPONSE, - __module__="grafeas_v1.proto.grafeas_pb2", - __doc__="""Response for creating notes in batch. - - - Attributes: - notes: - The notes that were created. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.BatchCreateNotesResponse) - ), -) -_sym_db.RegisterMessage(BatchCreateNotesResponse) - -BatchCreateOccurrencesRequest = _reflection.GeneratedProtocolMessageType( - "BatchCreateOccurrencesRequest", - (_message.Message,), - dict( - DESCRIPTOR=_BATCHCREATEOCCURRENCESREQUEST, - __module__="grafeas_v1.proto.grafeas_pb2", - __doc__="""Request to create occurrences in batch. - - - Attributes: - parent: - The name of the project in the form of - ``projects/[PROJECT_ID]``, under which the occurrences are to - be created. - occurrences: - The occurrences to create. Max allowed length is 1000. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.BatchCreateOccurrencesRequest) - ), -) -_sym_db.RegisterMessage(BatchCreateOccurrencesRequest) - -BatchCreateOccurrencesResponse = _reflection.GeneratedProtocolMessageType( - "BatchCreateOccurrencesResponse", - (_message.Message,), - dict( - DESCRIPTOR=_BATCHCREATEOCCURRENCESRESPONSE, - __module__="grafeas_v1.proto.grafeas_pb2", - __doc__="""Response for creating occurrences in batch. - - - Attributes: - occurrences: - The occurrences that were created. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.BatchCreateOccurrencesResponse) - ), -) -_sym_db.RegisterMessage(BatchCreateOccurrencesResponse) - - -DESCRIPTOR._options = None -_OCCURRENCE._options = None -_NOTE._options = None -_GETOCCURRENCEREQUEST.fields_by_name["name"]._options = None -_LISTOCCURRENCESREQUEST.fields_by_name["parent"]._options = None -_DELETEOCCURRENCEREQUEST.fields_by_name["name"]._options = None -_CREATEOCCURRENCEREQUEST.fields_by_name["parent"]._options = None -_CREATEOCCURRENCEREQUEST.fields_by_name["occurrence"]._options = None -_UPDATEOCCURRENCEREQUEST.fields_by_name["name"]._options = None -_UPDATEOCCURRENCEREQUEST.fields_by_name["occurrence"]._options = None -_GETNOTEREQUEST.fields_by_name["name"]._options = None -_GETOCCURRENCENOTEREQUEST.fields_by_name["name"]._options = None -_LISTNOTESREQUEST.fields_by_name["parent"]._options = None -_DELETENOTEREQUEST.fields_by_name["name"]._options = None -_CREATENOTEREQUEST.fields_by_name["parent"]._options = None -_CREATENOTEREQUEST.fields_by_name["note_id"]._options = None -_CREATENOTEREQUEST.fields_by_name["note"]._options = None -_UPDATENOTEREQUEST.fields_by_name["name"]._options = None -_UPDATENOTEREQUEST.fields_by_name["note"]._options = None -_LISTNOTEOCCURRENCESREQUEST.fields_by_name["name"]._options = None -_BATCHCREATENOTESREQUEST_NOTESENTRY._options = None -_BATCHCREATENOTESREQUEST.fields_by_name["parent"]._options = None -_BATCHCREATENOTESREQUEST.fields_by_name["notes"]._options = None -_BATCHCREATEOCCURRENCESREQUEST.fields_by_name["parent"]._options = None -_BATCHCREATEOCCURRENCESREQUEST.fields_by_name["occurrences"]._options = None - -_GRAFEAS = _descriptor.ServiceDescriptor( - name="Grafeas", - full_name="grafeas.v1.Grafeas", - file=DESCRIPTOR, - index=0, - serialized_options=_b("\312A containeranalysis.googleapis.com"), - serialized_start=4069, - serialized_end=6135, - methods=[ - _descriptor.MethodDescriptor( - name="GetOccurrence", - full_name="grafeas.v1.Grafeas.GetOccurrence", - index=0, - containing_service=None, - input_type=_GETOCCURRENCEREQUEST, - output_type=_OCCURRENCE, - serialized_options=_b( - "\202\323\344\223\002%\022#/v1/{name=projects/*/occurrences/*}\332A\004name" - ), - ), - _descriptor.MethodDescriptor( - name="ListOccurrences", - full_name="grafeas.v1.Grafeas.ListOccurrences", - index=1, - containing_service=None, - input_type=_LISTOCCURRENCESREQUEST, - output_type=_LISTOCCURRENCESRESPONSE, - serialized_options=_b( - "\202\323\344\223\002%\022#/v1/{parent=projects/*}/occurrences\332A\rparent,filter" - ), - ), - _descriptor.MethodDescriptor( - name="DeleteOccurrence", - full_name="grafeas.v1.Grafeas.DeleteOccurrence", - index=2, - containing_service=None, - input_type=_DELETEOCCURRENCEREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - "\202\323\344\223\002%*#/v1/{name=projects/*/occurrences/*}\332A\004name" - ), - ), - _descriptor.MethodDescriptor( - name="CreateOccurrence", - full_name="grafeas.v1.Grafeas.CreateOccurrence", - index=3, - containing_service=None, - input_type=_CREATEOCCURRENCEREQUEST, - output_type=_OCCURRENCE, - serialized_options=_b( - '\202\323\344\223\0021"#/v1/{parent=projects/*}/occurrences:\noccurrence\332A\021parent,occurrence' - ), - ), - _descriptor.MethodDescriptor( - name="BatchCreateOccurrences", - full_name="grafeas.v1.Grafeas.BatchCreateOccurrences", - index=4, - containing_service=None, - input_type=_BATCHCREATEOCCURRENCESREQUEST, - output_type=_BATCHCREATEOCCURRENCESRESPONSE, - serialized_options=_b( - '\202\323\344\223\0024"//v1/{parent=projects/*}/occurrences:batchCreate:\001*\332A\022parent,occurrences' - ), - ), - _descriptor.MethodDescriptor( - name="UpdateOccurrence", - full_name="grafeas.v1.Grafeas.UpdateOccurrence", - index=5, - containing_service=None, - input_type=_UPDATEOCCURRENCEREQUEST, - output_type=_OCCURRENCE, - serialized_options=_b( - "\202\323\344\223\00212#/v1/{name=projects/*/occurrences/*}:\noccurrence\332A\033name,occurrence,update_mask" - ), - ), - _descriptor.MethodDescriptor( - name="GetOccurrenceNote", - full_name="grafeas.v1.Grafeas.GetOccurrenceNote", - index=6, - containing_service=None, - input_type=_GETOCCURRENCENOTEREQUEST, - output_type=_NOTE, - serialized_options=_b( - "\202\323\344\223\002+\022)/v1/{name=projects/*/occurrences/*}/notes\332A\004name" - ), - ), - _descriptor.MethodDescriptor( - name="GetNote", - full_name="grafeas.v1.Grafeas.GetNote", - index=7, - containing_service=None, - input_type=_GETNOTEREQUEST, - output_type=_NOTE, - serialized_options=_b( - "\202\323\344\223\002\037\022\035/v1/{name=projects/*/notes/*}\332A\004name" - ), - ), - _descriptor.MethodDescriptor( - name="ListNotes", - full_name="grafeas.v1.Grafeas.ListNotes", - index=8, - containing_service=None, - input_type=_LISTNOTESREQUEST, - output_type=_LISTNOTESRESPONSE, - serialized_options=_b( - "\202\323\344\223\002\037\022\035/v1/{parent=projects/*}/notes\332A\rparent,filter" - ), - ), - _descriptor.MethodDescriptor( - name="DeleteNote", - full_name="grafeas.v1.Grafeas.DeleteNote", - index=9, - containing_service=None, - input_type=_DELETENOTEREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - "\202\323\344\223\002\037*\035/v1/{name=projects/*/notes/*}\332A\004name" - ), - ), - _descriptor.MethodDescriptor( - name="CreateNote", - full_name="grafeas.v1.Grafeas.CreateNote", - index=10, - containing_service=None, - input_type=_CREATENOTEREQUEST, - output_type=_NOTE, - serialized_options=_b( - '\202\323\344\223\002%"\035/v1/{parent=projects/*}/notes:\004note\332A\023parent,note_id,note' - ), - ), - _descriptor.MethodDescriptor( - name="BatchCreateNotes", - full_name="grafeas.v1.Grafeas.BatchCreateNotes", - index=11, - containing_service=None, - input_type=_BATCHCREATENOTESREQUEST, - output_type=_BATCHCREATENOTESRESPONSE, - serialized_options=_b( - '\202\323\344\223\002.")/v1/{parent=projects/*}/notes:batchCreate:\001*\332A\014parent,notes' - ), - ), - _descriptor.MethodDescriptor( - name="UpdateNote", - full_name="grafeas.v1.Grafeas.UpdateNote", - index=12, - containing_service=None, - input_type=_UPDATENOTEREQUEST, - output_type=_NOTE, - serialized_options=_b( - "\202\323\344\223\002%2\035/v1/{name=projects/*/notes/*}:\004note\332A\025name,note,update_mask" - ), - ), - _descriptor.MethodDescriptor( - name="ListNoteOccurrences", - full_name="grafeas.v1.Grafeas.ListNoteOccurrences", - index=13, - containing_service=None, - input_type=_LISTNOTEOCCURRENCESREQUEST, - output_type=_LISTNOTEOCCURRENCESRESPONSE, - serialized_options=_b( - "\202\323\344\223\002+\022)/v1/{name=projects/*/notes/*}/occurrences\332A\013name,filter" - ), - ), - ], -) -_sym_db.RegisterServiceDescriptor(_GRAFEAS) - -DESCRIPTOR.services_by_name["Grafeas"] = _GRAFEAS - -# @@protoc_insertion_point(module_scope) diff --git a/grafeas/grafeas/grafeas_v1/proto/grafeas_pb2_grpc.py b/grafeas/grafeas/grafeas_v1/proto/grafeas_pb2_grpc.py deleted file mode 100644 index 70057e822c51..000000000000 --- a/grafeas/grafeas/grafeas_v1/proto/grafeas_pb2_grpc.py +++ /dev/null @@ -1,302 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from grafeas.grafeas_v1.proto import ( - grafeas_pb2 as grafeas__v1_dot_proto_dot_grafeas__pb2, -) - - -class GrafeasStub(object): - """[Grafeas](https://grafeas.io) API. - - Retrieves analysis results of Cloud components such as Docker container - images. - - Analysis results are stored as a series of occurrences. An `Occurrence` - contains information about a specific analysis instance on a resource. An - occurrence refers to a `Note`. A note contains details describing the - analysis and is generally stored in a separate project, called a `Provider`. - Multiple occurrences can refer to the same note. - - For example, an SSL vulnerability could affect multiple images. In this case, - there would be one note for the vulnerability and an occurrence for each - image with the vulnerability referring to that note. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.GetOccurrence = channel.unary_unary( - "/grafeas.v1.Grafeas/GetOccurrence", - request_serializer=grafeas__v1_dot_proto_dot_grafeas__pb2.GetOccurrenceRequest.SerializeToString, - response_deserializer=grafeas__v1_dot_proto_dot_grafeas__pb2.Occurrence.FromString, - ) - self.ListOccurrences = channel.unary_unary( - "/grafeas.v1.Grafeas/ListOccurrences", - request_serializer=grafeas__v1_dot_proto_dot_grafeas__pb2.ListOccurrencesRequest.SerializeToString, - response_deserializer=grafeas__v1_dot_proto_dot_grafeas__pb2.ListOccurrencesResponse.FromString, - ) - self.DeleteOccurrence = channel.unary_unary( - "/grafeas.v1.Grafeas/DeleteOccurrence", - request_serializer=grafeas__v1_dot_proto_dot_grafeas__pb2.DeleteOccurrenceRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.CreateOccurrence = channel.unary_unary( - "/grafeas.v1.Grafeas/CreateOccurrence", - request_serializer=grafeas__v1_dot_proto_dot_grafeas__pb2.CreateOccurrenceRequest.SerializeToString, - response_deserializer=grafeas__v1_dot_proto_dot_grafeas__pb2.Occurrence.FromString, - ) - self.BatchCreateOccurrences = channel.unary_unary( - "/grafeas.v1.Grafeas/BatchCreateOccurrences", - request_serializer=grafeas__v1_dot_proto_dot_grafeas__pb2.BatchCreateOccurrencesRequest.SerializeToString, - response_deserializer=grafeas__v1_dot_proto_dot_grafeas__pb2.BatchCreateOccurrencesResponse.FromString, - ) - self.UpdateOccurrence = channel.unary_unary( - "/grafeas.v1.Grafeas/UpdateOccurrence", - request_serializer=grafeas__v1_dot_proto_dot_grafeas__pb2.UpdateOccurrenceRequest.SerializeToString, - response_deserializer=grafeas__v1_dot_proto_dot_grafeas__pb2.Occurrence.FromString, - ) - self.GetOccurrenceNote = channel.unary_unary( - "/grafeas.v1.Grafeas/GetOccurrenceNote", - request_serializer=grafeas__v1_dot_proto_dot_grafeas__pb2.GetOccurrenceNoteRequest.SerializeToString, - response_deserializer=grafeas__v1_dot_proto_dot_grafeas__pb2.Note.FromString, - ) - self.GetNote = channel.unary_unary( - "/grafeas.v1.Grafeas/GetNote", - request_serializer=grafeas__v1_dot_proto_dot_grafeas__pb2.GetNoteRequest.SerializeToString, - response_deserializer=grafeas__v1_dot_proto_dot_grafeas__pb2.Note.FromString, - ) - self.ListNotes = channel.unary_unary( - "/grafeas.v1.Grafeas/ListNotes", - request_serializer=grafeas__v1_dot_proto_dot_grafeas__pb2.ListNotesRequest.SerializeToString, - response_deserializer=grafeas__v1_dot_proto_dot_grafeas__pb2.ListNotesResponse.FromString, - ) - self.DeleteNote = channel.unary_unary( - "/grafeas.v1.Grafeas/DeleteNote", - request_serializer=grafeas__v1_dot_proto_dot_grafeas__pb2.DeleteNoteRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.CreateNote = channel.unary_unary( - "/grafeas.v1.Grafeas/CreateNote", - request_serializer=grafeas__v1_dot_proto_dot_grafeas__pb2.CreateNoteRequest.SerializeToString, - response_deserializer=grafeas__v1_dot_proto_dot_grafeas__pb2.Note.FromString, - ) - self.BatchCreateNotes = channel.unary_unary( - "/grafeas.v1.Grafeas/BatchCreateNotes", - request_serializer=grafeas__v1_dot_proto_dot_grafeas__pb2.BatchCreateNotesRequest.SerializeToString, - response_deserializer=grafeas__v1_dot_proto_dot_grafeas__pb2.BatchCreateNotesResponse.FromString, - ) - self.UpdateNote = channel.unary_unary( - "/grafeas.v1.Grafeas/UpdateNote", - request_serializer=grafeas__v1_dot_proto_dot_grafeas__pb2.UpdateNoteRequest.SerializeToString, - response_deserializer=grafeas__v1_dot_proto_dot_grafeas__pb2.Note.FromString, - ) - self.ListNoteOccurrences = channel.unary_unary( - "/grafeas.v1.Grafeas/ListNoteOccurrences", - request_serializer=grafeas__v1_dot_proto_dot_grafeas__pb2.ListNoteOccurrencesRequest.SerializeToString, - response_deserializer=grafeas__v1_dot_proto_dot_grafeas__pb2.ListNoteOccurrencesResponse.FromString, - ) - - -class GrafeasServicer(object): - """[Grafeas](https://grafeas.io) API. - - Retrieves analysis results of Cloud components such as Docker container - images. - - Analysis results are stored as a series of occurrences. An `Occurrence` - contains information about a specific analysis instance on a resource. An - occurrence refers to a `Note`. A note contains details describing the - analysis and is generally stored in a separate project, called a `Provider`. - Multiple occurrences can refer to the same note. - - For example, an SSL vulnerability could affect multiple images. In this case, - there would be one note for the vulnerability and an occurrence for each - image with the vulnerability referring to that note. - """ - - def GetOccurrence(self, request, context): - """Gets the specified occurrence. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListOccurrences(self, request, context): - """Lists occurrences for the specified project. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteOccurrence(self, request, context): - """Deletes the specified occurrence. For example, use this method to delete an - occurrence when the occurrence is no longer applicable for the given - resource. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateOccurrence(self, request, context): - """Creates a new occurrence. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def BatchCreateOccurrences(self, request, context): - """Creates new occurrences in batch. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateOccurrence(self, request, context): - """Updates the specified occurrence. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetOccurrenceNote(self, request, context): - """Gets the note attached to the specified occurrence. Consumer projects can - use this method to get a note that belongs to a provider project. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetNote(self, request, context): - """Gets the specified note. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListNotes(self, request, context): - """Lists notes for the specified project. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteNote(self, request, context): - """Deletes the specified note. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateNote(self, request, context): - """Creates a new note. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def BatchCreateNotes(self, request, context): - """Creates new notes in batch. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateNote(self, request, context): - """Updates the specified note. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListNoteOccurrences(self, request, context): - """Lists occurrences referencing the specified note. Provider projects can use - this method to get all occurrences across consumer projects referencing the - specified note. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_GrafeasServicer_to_server(servicer, server): - rpc_method_handlers = { - "GetOccurrence": grpc.unary_unary_rpc_method_handler( - servicer.GetOccurrence, - request_deserializer=grafeas__v1_dot_proto_dot_grafeas__pb2.GetOccurrenceRequest.FromString, - response_serializer=grafeas__v1_dot_proto_dot_grafeas__pb2.Occurrence.SerializeToString, - ), - "ListOccurrences": grpc.unary_unary_rpc_method_handler( - servicer.ListOccurrences, - request_deserializer=grafeas__v1_dot_proto_dot_grafeas__pb2.ListOccurrencesRequest.FromString, - response_serializer=grafeas__v1_dot_proto_dot_grafeas__pb2.ListOccurrencesResponse.SerializeToString, - ), - "DeleteOccurrence": grpc.unary_unary_rpc_method_handler( - servicer.DeleteOccurrence, - request_deserializer=grafeas__v1_dot_proto_dot_grafeas__pb2.DeleteOccurrenceRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "CreateOccurrence": grpc.unary_unary_rpc_method_handler( - servicer.CreateOccurrence, - request_deserializer=grafeas__v1_dot_proto_dot_grafeas__pb2.CreateOccurrenceRequest.FromString, - response_serializer=grafeas__v1_dot_proto_dot_grafeas__pb2.Occurrence.SerializeToString, - ), - "BatchCreateOccurrences": grpc.unary_unary_rpc_method_handler( - servicer.BatchCreateOccurrences, - request_deserializer=grafeas__v1_dot_proto_dot_grafeas__pb2.BatchCreateOccurrencesRequest.FromString, - response_serializer=grafeas__v1_dot_proto_dot_grafeas__pb2.BatchCreateOccurrencesResponse.SerializeToString, - ), - "UpdateOccurrence": grpc.unary_unary_rpc_method_handler( - servicer.UpdateOccurrence, - request_deserializer=grafeas__v1_dot_proto_dot_grafeas__pb2.UpdateOccurrenceRequest.FromString, - response_serializer=grafeas__v1_dot_proto_dot_grafeas__pb2.Occurrence.SerializeToString, - ), - "GetOccurrenceNote": grpc.unary_unary_rpc_method_handler( - servicer.GetOccurrenceNote, - request_deserializer=grafeas__v1_dot_proto_dot_grafeas__pb2.GetOccurrenceNoteRequest.FromString, - response_serializer=grafeas__v1_dot_proto_dot_grafeas__pb2.Note.SerializeToString, - ), - "GetNote": grpc.unary_unary_rpc_method_handler( - servicer.GetNote, - request_deserializer=grafeas__v1_dot_proto_dot_grafeas__pb2.GetNoteRequest.FromString, - response_serializer=grafeas__v1_dot_proto_dot_grafeas__pb2.Note.SerializeToString, - ), - "ListNotes": grpc.unary_unary_rpc_method_handler( - servicer.ListNotes, - request_deserializer=grafeas__v1_dot_proto_dot_grafeas__pb2.ListNotesRequest.FromString, - response_serializer=grafeas__v1_dot_proto_dot_grafeas__pb2.ListNotesResponse.SerializeToString, - ), - "DeleteNote": grpc.unary_unary_rpc_method_handler( - servicer.DeleteNote, - request_deserializer=grafeas__v1_dot_proto_dot_grafeas__pb2.DeleteNoteRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "CreateNote": grpc.unary_unary_rpc_method_handler( - servicer.CreateNote, - request_deserializer=grafeas__v1_dot_proto_dot_grafeas__pb2.CreateNoteRequest.FromString, - response_serializer=grafeas__v1_dot_proto_dot_grafeas__pb2.Note.SerializeToString, - ), - "BatchCreateNotes": grpc.unary_unary_rpc_method_handler( - servicer.BatchCreateNotes, - request_deserializer=grafeas__v1_dot_proto_dot_grafeas__pb2.BatchCreateNotesRequest.FromString, - response_serializer=grafeas__v1_dot_proto_dot_grafeas__pb2.BatchCreateNotesResponse.SerializeToString, - ), - "UpdateNote": grpc.unary_unary_rpc_method_handler( - servicer.UpdateNote, - request_deserializer=grafeas__v1_dot_proto_dot_grafeas__pb2.UpdateNoteRequest.FromString, - response_serializer=grafeas__v1_dot_proto_dot_grafeas__pb2.Note.SerializeToString, - ), - "ListNoteOccurrences": grpc.unary_unary_rpc_method_handler( - servicer.ListNoteOccurrences, - request_deserializer=grafeas__v1_dot_proto_dot_grafeas__pb2.ListNoteOccurrencesRequest.FromString, - response_serializer=grafeas__v1_dot_proto_dot_grafeas__pb2.ListNoteOccurrencesResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "grafeas.v1.Grafeas", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/grafeas/grafeas/grafeas_v1/proto/image.proto b/grafeas/grafeas/grafeas_v1/proto/image.proto deleted file mode 100644 index 9ac162cec22f..000000000000 --- a/grafeas/grafeas/grafeas_v1/proto/image.proto +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright 2019 The Grafeas Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package grafeas.v1; - -option go_package = "google.golang.org/genproto/googleapis/grafeas/v1;grafeas"; -option java_multiple_files = true; -option java_package = "io.grafeas.v1"; -option objc_class_prefix = "GRA"; - -// Layer holds metadata specific to a layer of a Docker image. -message Layer { - // Required. The recovered Dockerfile directive used to construct this layer. - // See https://docs.docker.com/engine/reference/builder/ for more information. - string directive = 1; - - // The recovered arguments to the Dockerfile directive. - string arguments = 2; -} - -// A set of properties that uniquely identify a given Docker image. -message Fingerprint { - // Required. The layer ID of the final layer in the Docker image's v1 - // representation. - string v1_name = 1; - - // Required. The ordered list of v2 blobs that represent a given image. - repeated string v2_blob = 2; - - // Output only. The name of the image's v2 blobs computed via: - // [bottom] := v2_blob[bottom] - // [N] := sha256(v2_blob[N] + " " + v2_name[N+1]) - // Only the name of the final blob is kept. - string v2_name = 3; -} - -// Basis describes the base image portion (Note) of the DockerImage -// relationship. Linked occurrences are derived from this or an equivalent image -// via: -// FROM -// Or an equivalent reference, e.g., a tag of the resource_url. -message ImageNote { - // Required. Immutable. The resource_url for the resource representing the - // basis of associated occurrence images. - string resource_url = 1; - - // Required. Immutable. The fingerprint of the base image. - Fingerprint fingerprint = 2; -} - -// Details of the derived image portion of the DockerImage relationship. This -// image would be produced from a Dockerfile with FROM . -message ImageOccurrence { - // Required. The fingerprint of the derived image. - Fingerprint fingerprint = 1; - - // Output only. The number of layers by which this image differs from the - // associated image basis. - int32 distance = 2; - - // This contains layer-specific metadata, if populated it has length - // "distance" and is ordered with [distance] being the layer immediately - // following the base image and [1] being the final layer. - repeated Layer layer_info = 3; - - // Output only. This contains the base image URL for the derived image - // occurrence. - string base_resource_url = 4; -} diff --git a/grafeas/grafeas/grafeas_v1/proto/image_pb2.py b/grafeas/grafeas/grafeas_v1/proto/image_pb2.py deleted file mode 100644 index 29648e78624b..000000000000 --- a/grafeas/grafeas/grafeas_v1/proto/image_pb2.py +++ /dev/null @@ -1,428 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: grafeas_v1/proto/image.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="grafeas_v1/proto/image.proto", - package="grafeas.v1", - syntax="proto3", - serialized_options=_b( - "\n\rio.grafeas.v1P\001Z8google.golang.org/genproto/googleapis/grafeas/v1;grafeas\242\002\003GRA" - ), - serialized_pb=_b( - '\n\x1cgrafeas_v1/proto/image.proto\x12\ngrafeas.v1"-\n\x05Layer\x12\x11\n\tdirective\x18\x01 \x01(\t\x12\x11\n\targuments\x18\x02 \x01(\t"@\n\x0b\x46ingerprint\x12\x0f\n\x07v1_name\x18\x01 \x01(\t\x12\x0f\n\x07v2_blob\x18\x02 \x03(\t\x12\x0f\n\x07v2_name\x18\x03 \x01(\t"O\n\tImageNote\x12\x14\n\x0cresource_url\x18\x01 \x01(\t\x12,\n\x0b\x66ingerprint\x18\x02 \x01(\x0b\x32\x17.grafeas.v1.Fingerprint"\x93\x01\n\x0fImageOccurrence\x12,\n\x0b\x66ingerprint\x18\x01 \x01(\x0b\x32\x17.grafeas.v1.Fingerprint\x12\x10\n\x08\x64istance\x18\x02 \x01(\x05\x12%\n\nlayer_info\x18\x03 \x03(\x0b\x32\x11.grafeas.v1.Layer\x12\x19\n\x11\x62\x61se_resource_url\x18\x04 \x01(\tBQ\n\rio.grafeas.v1P\x01Z8google.golang.org/genproto/googleapis/grafeas/v1;grafeas\xa2\x02\x03GRAb\x06proto3' - ), -) - - -_LAYER = _descriptor.Descriptor( - name="Layer", - full_name="grafeas.v1.Layer", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="directive", - full_name="grafeas.v1.Layer.directive", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="arguments", - full_name="grafeas.v1.Layer.arguments", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=44, - serialized_end=89, -) - - -_FINGERPRINT = _descriptor.Descriptor( - name="Fingerprint", - full_name="grafeas.v1.Fingerprint", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="v1_name", - full_name="grafeas.v1.Fingerprint.v1_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="v2_blob", - full_name="grafeas.v1.Fingerprint.v2_blob", - index=1, - number=2, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="v2_name", - full_name="grafeas.v1.Fingerprint.v2_name", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=91, - serialized_end=155, -) - - -_IMAGENOTE = _descriptor.Descriptor( - name="ImageNote", - full_name="grafeas.v1.ImageNote", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="resource_url", - full_name="grafeas.v1.ImageNote.resource_url", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="fingerprint", - full_name="grafeas.v1.ImageNote.fingerprint", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=157, - serialized_end=236, -) - - -_IMAGEOCCURRENCE = _descriptor.Descriptor( - name="ImageOccurrence", - full_name="grafeas.v1.ImageOccurrence", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="fingerprint", - full_name="grafeas.v1.ImageOccurrence.fingerprint", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="distance", - full_name="grafeas.v1.ImageOccurrence.distance", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="layer_info", - full_name="grafeas.v1.ImageOccurrence.layer_info", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="base_resource_url", - full_name="grafeas.v1.ImageOccurrence.base_resource_url", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=239, - serialized_end=386, -) - -_IMAGENOTE.fields_by_name["fingerprint"].message_type = _FINGERPRINT -_IMAGEOCCURRENCE.fields_by_name["fingerprint"].message_type = _FINGERPRINT -_IMAGEOCCURRENCE.fields_by_name["layer_info"].message_type = _LAYER -DESCRIPTOR.message_types_by_name["Layer"] = _LAYER -DESCRIPTOR.message_types_by_name["Fingerprint"] = _FINGERPRINT -DESCRIPTOR.message_types_by_name["ImageNote"] = _IMAGENOTE -DESCRIPTOR.message_types_by_name["ImageOccurrence"] = _IMAGEOCCURRENCE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Layer = _reflection.GeneratedProtocolMessageType( - "Layer", - (_message.Message,), - dict( - DESCRIPTOR=_LAYER, - __module__="grafeas_v1.proto.image_pb2", - __doc__="""Layer holds metadata specific to a layer of a Docker - image. - - - Attributes: - directive: - Required. The recovered Dockerfile directive used to construct - this layer. See - https://docs.docker.com/engine/reference/builder/ for more - information. - arguments: - The recovered arguments to the Dockerfile directive. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.Layer) - ), -) -_sym_db.RegisterMessage(Layer) - -Fingerprint = _reflection.GeneratedProtocolMessageType( - "Fingerprint", - (_message.Message,), - dict( - DESCRIPTOR=_FINGERPRINT, - __module__="grafeas_v1.proto.image_pb2", - __doc__="""A set of properties that uniquely identify a given Docker - image. - - - Attributes: - v1_name: - Required. The layer ID of the final layer in the Docker - image's v1 representation. - v2_blob: - Required. The ordered list of v2 blobs that represent a given - image. - v2_name: - Output only. The name of the image's v2 blobs computed via: - [bottom] := v2\_blob[bottom][N] := sha256(v2\_blob[N] + " " + - v2\_name[N+1]) Only the name of the final blob is kept. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.Fingerprint) - ), -) -_sym_db.RegisterMessage(Fingerprint) - -ImageNote = _reflection.GeneratedProtocolMessageType( - "ImageNote", - (_message.Message,), - dict( - DESCRIPTOR=_IMAGENOTE, - __module__="grafeas_v1.proto.image_pb2", - __doc__="""Basis describes the base image portion (Note) of the - DockerImage relationship. Linked occurrences are derived from this or an - equivalent image via: FROM Or an equivalent reference, e.g., a tag of - the resource\_url. - - - Attributes: - resource_url: - Required. Immutable. The resource\_url for the resource - representing the basis of associated occurrence images. - fingerprint: - Required. Immutable. The fingerprint of the base image. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.ImageNote) - ), -) -_sym_db.RegisterMessage(ImageNote) - -ImageOccurrence = _reflection.GeneratedProtocolMessageType( - "ImageOccurrence", - (_message.Message,), - dict( - DESCRIPTOR=_IMAGEOCCURRENCE, - __module__="grafeas_v1.proto.image_pb2", - __doc__="""Details of the derived image portion of the DockerImage - relationship. This image would be produced from a Dockerfile with FROM . - - - Attributes: - fingerprint: - Required. The fingerprint of the derived image. - distance: - Output only. The number of layers by which this image differs - from the associated image basis. - layer_info: - This contains layer-specific metadata, if populated it has - length "distance" and is ordered with [distance] being the - layer immediately following the base image and [1] being the - final layer. - base_resource_url: - Output only. This contains the base image URL for the derived - image occurrence. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.ImageOccurrence) - ), -) -_sym_db.RegisterMessage(ImageOccurrence) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/grafeas/grafeas/grafeas_v1/proto/image_pb2_grpc.py b/grafeas/grafeas/grafeas_v1/proto/image_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/grafeas/grafeas/grafeas_v1/proto/image_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/grafeas/grafeas/grafeas_v1/proto/package.proto b/grafeas/grafeas/grafeas_v1/proto/package.proto deleted file mode 100644 index b04686d9fc33..000000000000 --- a/grafeas/grafeas/grafeas_v1/proto/package.proto +++ /dev/null @@ -1,124 +0,0 @@ -// Copyright 2019 The Grafeas Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package grafeas.v1; - -option go_package = "google.golang.org/genproto/googleapis/grafeas/v1;grafeas"; -option java_multiple_files = true; -option java_package = "io.grafeas.v1"; -option objc_class_prefix = "GRA"; - -// Instruction set architectures supported by various package managers. -enum Architecture { - // Unknown architecture. - ARCHITECTURE_UNSPECIFIED = 0; - // X86 architecture. - X86 = 1; - // X64 architecture. - X64 = 2; -} - -// This represents a particular channel of distribution for a given package. -// E.g., Debian's jessie-backports dpkg mirror. -message Distribution { - // Required. The cpe_uri in [CPE format](https://cpe.mitre.org/specification/) - // denoting the package manager version distributing a package. - string cpe_uri = 1; - - // The CPU architecture for which packages in this distribution channel were - // built. - Architecture architecture = 2; - - // The latest available version of this package in this distribution channel. - Version latest_version = 3; - - // A freeform string denoting the maintainer of this package. - string maintainer = 4; - - // The distribution channel-specific homepage for this package. - string url = 5; - - // The distribution channel-specific description of this package. - string description = 6; -} - -// An occurrence of a particular package installation found within a system's -// filesystem. E.g., glibc was found in `/var/lib/dpkg/status`. -message Location { - // Required. The CPE URI in [CPE format](https://cpe.mitre.org/specification/) - // denoting the package manager version distributing a package. - string cpe_uri = 1; - - // The version installed at this location. - Version version = 2; - - // The path from which we gathered that this package/version is installed. - string path = 3; -} - -// This represents a particular package that is distributed over various -// channels. E.g., glibc (aka libc6) is distributed by many, at various -// versions. -message PackageNote { - // Required. Immutable. The name of the package. - string name = 1; - - // The various channels by which a package is distributed. - repeated Distribution distribution = 10; -} - -// Details on how a particular software package was installed on a system. -message PackageOccurrence { - // Output only. The name of the installed package. - string name = 1; - - // Required. All of the places within the filesystem versions of this package - // have been found. - repeated Location location = 2; -} - -// Version contains structured information about the version of a package. -message Version { - // Used to correct mistakes in the version numbering scheme. - int32 epoch = 1; - - // Required only when version kind is NORMAL. The main part of the version - // name. - string name = 2; - - // The iteration of the package build from the above version. - string revision = 3; - - // Whether this is an ordinary package version or a sentinel MIN/MAX version. - enum VersionKind { - // Unknown. - VERSION_KIND_UNSPECIFIED = 0; - // A standard package version. - NORMAL = 1; - // A special version representing negative infinity. - MINIMUM = 2; - // A special version representing positive infinity. - MAXIMUM = 3; - }; - - // Required. Distinguishes between sentinel MIN/MAX versions and normal - // versions. - VersionKind kind = 4; - - // Human readable version string. This string is of the form - // :- and is only set when kind is NORMAL. - string full_name = 5; -} diff --git a/grafeas/grafeas/grafeas_v1/proto/package_pb2.py b/grafeas/grafeas/grafeas_v1/proto/package_pb2.py deleted file mode 100644 index 3bdab00e710d..000000000000 --- a/grafeas/grafeas/grafeas_v1/proto/package_pb2.py +++ /dev/null @@ -1,674 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: grafeas_v1/proto/package.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="grafeas_v1/proto/package.proto", - package="grafeas.v1", - syntax="proto3", - serialized_options=_b( - "\n\rio.grafeas.v1P\001Z8google.golang.org/genproto/googleapis/grafeas/v1;grafeas\242\002\003GRA" - ), - serialized_pb=_b( - '\n\x1egrafeas_v1/proto/package.proto\x12\ngrafeas.v1"\xb2\x01\n\x0c\x44istribution\x12\x0f\n\x07\x63pe_uri\x18\x01 \x01(\t\x12.\n\x0c\x61rchitecture\x18\x02 \x01(\x0e\x32\x18.grafeas.v1.Architecture\x12+\n\x0elatest_version\x18\x03 \x01(\x0b\x32\x13.grafeas.v1.Version\x12\x12\n\nmaintainer\x18\x04 \x01(\t\x12\x0b\n\x03url\x18\x05 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x06 \x01(\t"O\n\x08Location\x12\x0f\n\x07\x63pe_uri\x18\x01 \x01(\t\x12$\n\x07version\x18\x02 \x01(\x0b\x32\x13.grafeas.v1.Version\x12\x0c\n\x04path\x18\x03 \x01(\t"K\n\x0bPackageNote\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\x0c\x64istribution\x18\n \x03(\x0b\x32\x18.grafeas.v1.Distribution"I\n\x11PackageOccurrence\x12\x0c\n\x04name\x18\x01 \x01(\t\x12&\n\x08location\x18\x02 \x03(\x0b\x32\x14.grafeas.v1.Location"\xcd\x01\n\x07Version\x12\r\n\x05\x65poch\x18\x01 \x01(\x05\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08revision\x18\x03 \x01(\t\x12-\n\x04kind\x18\x04 \x01(\x0e\x32\x1f.grafeas.v1.Version.VersionKind\x12\x11\n\tfull_name\x18\x05 \x01(\t"Q\n\x0bVersionKind\x12\x1c\n\x18VERSION_KIND_UNSPECIFIED\x10\x00\x12\n\n\x06NORMAL\x10\x01\x12\x0b\n\x07MINIMUM\x10\x02\x12\x0b\n\x07MAXIMUM\x10\x03*>\n\x0c\x41rchitecture\x12\x1c\n\x18\x41RCHITECTURE_UNSPECIFIED\x10\x00\x12\x07\n\x03X86\x10\x01\x12\x07\n\x03X64\x10\x02\x42Q\n\rio.grafeas.v1P\x01Z8google.golang.org/genproto/googleapis/grafeas/v1;grafeas\xa2\x02\x03GRAb\x06proto3' - ), -) - -_ARCHITECTURE = _descriptor.EnumDescriptor( - name="Architecture", - full_name="grafeas.v1.Architecture", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="ARCHITECTURE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="X86", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="X64", index=2, number=2, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=668, - serialized_end=730, -) -_sym_db.RegisterEnumDescriptor(_ARCHITECTURE) - -Architecture = enum_type_wrapper.EnumTypeWrapper(_ARCHITECTURE) -ARCHITECTURE_UNSPECIFIED = 0 -X86 = 1 -X64 = 2 - - -_VERSION_VERSIONKIND = _descriptor.EnumDescriptor( - name="VersionKind", - full_name="grafeas.v1.Version.VersionKind", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="VERSION_KIND_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="NORMAL", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="MINIMUM", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="MAXIMUM", index=3, number=3, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=585, - serialized_end=666, -) -_sym_db.RegisterEnumDescriptor(_VERSION_VERSIONKIND) - - -_DISTRIBUTION = _descriptor.Descriptor( - name="Distribution", - full_name="grafeas.v1.Distribution", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="cpe_uri", - full_name="grafeas.v1.Distribution.cpe_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="architecture", - full_name="grafeas.v1.Distribution.architecture", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="latest_version", - full_name="grafeas.v1.Distribution.latest_version", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="maintainer", - full_name="grafeas.v1.Distribution.maintainer", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="url", - full_name="grafeas.v1.Distribution.url", - index=4, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="description", - full_name="grafeas.v1.Distribution.description", - index=5, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=47, - serialized_end=225, -) - - -_LOCATION = _descriptor.Descriptor( - name="Location", - full_name="grafeas.v1.Location", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="cpe_uri", - full_name="grafeas.v1.Location.cpe_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="version", - full_name="grafeas.v1.Location.version", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="path", - full_name="grafeas.v1.Location.path", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=227, - serialized_end=306, -) - - -_PACKAGENOTE = _descriptor.Descriptor( - name="PackageNote", - full_name="grafeas.v1.PackageNote", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="grafeas.v1.PackageNote.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="distribution", - full_name="grafeas.v1.PackageNote.distribution", - index=1, - number=10, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=308, - serialized_end=383, -) - - -_PACKAGEOCCURRENCE = _descriptor.Descriptor( - name="PackageOccurrence", - full_name="grafeas.v1.PackageOccurrence", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="grafeas.v1.PackageOccurrence.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="location", - full_name="grafeas.v1.PackageOccurrence.location", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=385, - serialized_end=458, -) - - -_VERSION = _descriptor.Descriptor( - name="Version", - full_name="grafeas.v1.Version", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="epoch", - full_name="grafeas.v1.Version.epoch", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="name", - full_name="grafeas.v1.Version.name", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="revision", - full_name="grafeas.v1.Version.revision", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="kind", - full_name="grafeas.v1.Version.kind", - index=3, - number=4, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="full_name", - full_name="grafeas.v1.Version.full_name", - index=4, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_VERSION_VERSIONKIND,], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=461, - serialized_end=666, -) - -_DISTRIBUTION.fields_by_name["architecture"].enum_type = _ARCHITECTURE -_DISTRIBUTION.fields_by_name["latest_version"].message_type = _VERSION -_LOCATION.fields_by_name["version"].message_type = _VERSION -_PACKAGENOTE.fields_by_name["distribution"].message_type = _DISTRIBUTION -_PACKAGEOCCURRENCE.fields_by_name["location"].message_type = _LOCATION -_VERSION.fields_by_name["kind"].enum_type = _VERSION_VERSIONKIND -_VERSION_VERSIONKIND.containing_type = _VERSION -DESCRIPTOR.message_types_by_name["Distribution"] = _DISTRIBUTION -DESCRIPTOR.message_types_by_name["Location"] = _LOCATION -DESCRIPTOR.message_types_by_name["PackageNote"] = _PACKAGENOTE -DESCRIPTOR.message_types_by_name["PackageOccurrence"] = _PACKAGEOCCURRENCE -DESCRIPTOR.message_types_by_name["Version"] = _VERSION -DESCRIPTOR.enum_types_by_name["Architecture"] = _ARCHITECTURE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Distribution = _reflection.GeneratedProtocolMessageType( - "Distribution", - (_message.Message,), - dict( - DESCRIPTOR=_DISTRIBUTION, - __module__="grafeas_v1.proto.package_pb2", - __doc__="""This represents a particular channel of distribution for a - given package. E.g., Debian's jessie-backports dpkg mirror. - - - Attributes: - cpe_uri: - Required. The cpe\_uri in `CPE format - `__ denoting the package - manager version distributing a package. - architecture: - The CPU architecture for which packages in this distribution - channel were built. - latest_version: - The latest available version of this package in this - distribution channel. - maintainer: - A freeform string denoting the maintainer of this package. - url: - The distribution channel-specific homepage for this package. - description: - The distribution channel-specific description of this package. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.Distribution) - ), -) -_sym_db.RegisterMessage(Distribution) - -Location = _reflection.GeneratedProtocolMessageType( - "Location", - (_message.Message,), - dict( - DESCRIPTOR=_LOCATION, - __module__="grafeas_v1.proto.package_pb2", - __doc__="""An occurrence of a particular package installation found - within a system's filesystem. E.g., glibc was found in - ``/var/lib/dpkg/status``. - - - Attributes: - cpe_uri: - Required. The CPE URI in `CPE format - `__ denoting the package - manager version distributing a package. - version: - The version installed at this location. - path: - The path from which we gathered that this package/version is - installed. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.Location) - ), -) -_sym_db.RegisterMessage(Location) - -PackageNote = _reflection.GeneratedProtocolMessageType( - "PackageNote", - (_message.Message,), - dict( - DESCRIPTOR=_PACKAGENOTE, - __module__="grafeas_v1.proto.package_pb2", - __doc__="""This represents a particular package that is distributed - over various channels. E.g., glibc (aka libc6) is distributed by many, - at various versions. - - - Attributes: - name: - Required. Immutable. The name of the package. - distribution: - The various channels by which a package is distributed. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.PackageNote) - ), -) -_sym_db.RegisterMessage(PackageNote) - -PackageOccurrence = _reflection.GeneratedProtocolMessageType( - "PackageOccurrence", - (_message.Message,), - dict( - DESCRIPTOR=_PACKAGEOCCURRENCE, - __module__="grafeas_v1.proto.package_pb2", - __doc__="""Details on how a particular software package was installed - on a system. - - - Attributes: - name: - Output only. The name of the installed package. - location: - Required. All of the places within the filesystem versions of - this package have been found. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.PackageOccurrence) - ), -) -_sym_db.RegisterMessage(PackageOccurrence) - -Version = _reflection.GeneratedProtocolMessageType( - "Version", - (_message.Message,), - dict( - DESCRIPTOR=_VERSION, - __module__="grafeas_v1.proto.package_pb2", - __doc__="""Version contains structured information about the version - of a package. - - - Attributes: - epoch: - Used to correct mistakes in the version numbering scheme. - name: - Required only when version kind is NORMAL. The main part of - the version name. - revision: - The iteration of the package build from the above version. - kind: - Required. Distinguishes between sentinel MIN/MAX versions and - normal versions. - full_name: - Human readable version string. This string is of the form :- - and is only set when kind is NORMAL. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.Version) - ), -) -_sym_db.RegisterMessage(Version) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/grafeas/grafeas/grafeas_v1/proto/package_pb2_grpc.py b/grafeas/grafeas/grafeas_v1/proto/package_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/grafeas/grafeas/grafeas_v1/proto/package_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/grafeas/grafeas/grafeas_v1/proto/provenance.proto b/grafeas/grafeas/grafeas_v1/proto/provenance.proto deleted file mode 100644 index 06b109785f8a..000000000000 --- a/grafeas/grafeas/grafeas_v1/proto/provenance.proto +++ /dev/null @@ -1,265 +0,0 @@ -// Copyright 2019 The Grafeas Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package grafeas.v1; - -import "google/protobuf/timestamp.proto"; - -option go_package = "google.golang.org/genproto/googleapis/grafeas/v1;grafeas"; -option java_multiple_files = true; -option java_package = "io.grafeas.v1"; -option objc_class_prefix = "GRA"; - -// Provenance of a build. Contains all information needed to verify the full -// details about the build from source to completion. -message BuildProvenance { - // Required. Unique identifier of the build. - string id = 1; - - // ID of the project. - string project_id = 2; - - // Commands requested by the build. - repeated Command commands = 3; - - // Output of the build. - repeated Artifact built_artifacts = 4; - - // Time at which the build was created. - google.protobuf.Timestamp create_time = 5; - - // Time at which execution of the build was started. - google.protobuf.Timestamp start_time = 6; - - // Time at which execution of the build was finished. - google.protobuf.Timestamp end_time = 7; - - // E-mail address of the user who initiated this build. Note that this was the - // user's e-mail address at the time the build was initiated; this address may - // not represent the same end-user for all time. - string creator = 8; - - // URI where any logs for this provenance were written. - string logs_uri = 9; - - // Details of the Source input to the build. - Source source_provenance = 10; - - // Trigger identifier if the build was triggered automatically; empty if not. - string trigger_id = 11; - - // Special options applied to this build. This is a catch-all field where - // build providers can enter any desired additional details. - map build_options = 12; - - // Version string of the builder at the time this build was executed. - string builder_version = 13; -} - -// Source describes the location of the source used for the build. -message Source { - // If provided, the input binary artifacts for the build came from this - // location. - string artifact_storage_source_uri = 1; - - // Hash(es) of the build source, which can be used to verify that the original - // source integrity was maintained in the build. - // - // The keys to this map are file paths used as build source and the values - // contain the hash values for those files. - // - // If the build source came in a single package such as a gzipped tarfile - // (.tar.gz), the FileHash will be for the single path to that file. - map file_hashes = 2; - - // If provided, the source code used for the build came from this location. - SourceContext context = 3; - - // If provided, some of the source code used for the build may be found in - // these locations, in the case where the source repository had multiple - // remotes or submodules. This list will not include the context specified in - // the context field. - repeated SourceContext additional_contexts = 4; -} - -// Container message for hashes of byte content of files, used in source -// messages to verify integrity of source input to the build. -message FileHashes { - // Required. Collection of file hashes. - repeated Hash file_hash = 1; -} - -// Container message for hash values. -message Hash { - // Required. The type of hash that was performed, e.g. "SHA-256". - string type = 1; - // Required. The hash value. - bytes value = 2; -} - -// Command describes a step performed as part of the build pipeline. -message Command { - // Required. Name of the command, as presented on the command line, or if the - // command is packaged as a Docker container, as presented to `docker pull`. - string name = 1; - - // Environment variables set before running this command. - repeated string env = 2; - - // Command-line arguments used when executing this command. - repeated string args = 3; - - // Working directory (relative to project source root) used when running this - // command. - string dir = 4; - - // Optional unique identifier for this command, used in wait_for to reference - // this command as a dependency. - string id = 5; - - // The ID(s) of the command(s) that this command depends on. - repeated string wait_for = 6; -} - -// Artifact describes a build product. -message Artifact { - // Hash or checksum value of a binary, or Docker Registry 2.0 digest of a - // container. - string checksum = 1; - - // Artifact ID, if any; for container images, this will be a URL by digest - // like `gcr.io/projectID/imagename@sha256:123456`. - string id = 2; - - // Related artifact names. This may be the path to a binary or jar file, or in - // the case of a container build, the name used to push the container image to - // Google Container Registry, as presented to `docker push`. Note that a - // single Artifact ID can have multiple names, for example if two tags are - // applied to one image. - repeated string names = 3; -} - -// A SourceContext is a reference to a tree of files. A SourceContext together -// with a path point to a unique revision of a single file or directory. -message SourceContext { - // A SourceContext can refer any one of the following types of repositories. - oneof context { - // A SourceContext referring to a revision in a Google Cloud Source Repo. - CloudRepoSourceContext cloud_repo = 1; - - // A SourceContext referring to a Gerrit project. - GerritSourceContext gerrit = 2; - - // A SourceContext referring to any third party Git repo (e.g., GitHub). - GitSourceContext git = 3; - } - - // Labels with user defined metadata. - map labels = 4; -} - -// An alias to a repo revision. -message AliasContext { - // The type of an alias. - enum Kind { - // Unknown. - KIND_UNSPECIFIED = 0; - // Git tag. - FIXED = 1; - // Git branch. - MOVABLE = 2; - // Used to specify non-standard aliases. For example, if a Git repo has a - // ref named "refs/foo/bar". - OTHER = 4; - } - - // The alias kind. - Kind kind = 1; - - // The alias name. - string name = 2; -} - -// A CloudRepoSourceContext denotes a particular revision in a Google Cloud -// Source Repo. -message CloudRepoSourceContext { - // The ID of the repo. - RepoId repo_id = 1; - - // A revision in a Cloud Repo can be identified by either its revision ID or - // its alias. - oneof revision { - // A revision ID. - string revision_id = 2; - - // An alias, which may be a branch or tag. - AliasContext alias_context = 3; - } -} - -// A SourceContext referring to a Gerrit project. -message GerritSourceContext { - // The URI of a running Gerrit instance. - string host_uri = 1; - - // The full project name within the host. Projects may be nested, so - // "project/subproject" is a valid project name. The "repo name" is the - // hostURI/project. - string gerrit_project = 2; - - // A revision in a Gerrit project can be identified by either its revision ID - // or its alias. - oneof revision { - // A revision (commit) ID. - string revision_id = 3; - - // An alias, which may be a branch or tag. - AliasContext alias_context = 4; - } -} - -// A GitSourceContext denotes a particular revision in a third party Git -// repository (e.g., GitHub). -message GitSourceContext { - // Git repository URL. - string url = 1; - - // Git commit hash. - string revision_id = 2; -} - -// A unique identifier for a Cloud Repo. -message RepoId { - // A cloud repo can be identified by either its project ID and repository name - // combination, or its globally unique identifier. - oneof id { - // A combination of a project ID and a repo name. - ProjectRepoId project_repo_id = 1; - - // A server-assigned, globally unique identifier. - string uid = 2; - } -} - -// Selects a repo using a Google Cloud Platform project ID (e.g., -// winged-cargo-31) and a repo name within that project. -message ProjectRepoId { - // The ID of the project. - string project_id = 1; - - // The name of the repo. Leave empty for the default repo. - string repo_name = 2; -} diff --git a/grafeas/grafeas/grafeas_v1/proto/provenance_pb2.py b/grafeas/grafeas/grafeas_v1/proto/provenance_pb2.py deleted file mode 100644 index 637ba1803e0d..000000000000 --- a/grafeas/grafeas/grafeas_v1/proto/provenance_pb2.py +++ /dev/null @@ -1,1893 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: grafeas_v1/proto/provenance.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="grafeas_v1/proto/provenance.proto", - package="grafeas.v1", - syntax="proto3", - serialized_options=_b( - "\n\rio.grafeas.v1P\001Z8google.golang.org/genproto/googleapis/grafeas/v1;grafeas\242\002\003GRA" - ), - serialized_pb=_b( - '\n!grafeas_v1/proto/provenance.proto\x12\ngrafeas.v1\x1a\x1fgoogle/protobuf/timestamp.proto"\x90\x04\n\x0f\x42uildProvenance\x12\n\n\x02id\x18\x01 \x01(\t\x12\x12\n\nproject_id\x18\x02 \x01(\t\x12%\n\x08\x63ommands\x18\x03 \x03(\x0b\x32\x13.grafeas.v1.Command\x12-\n\x0f\x62uilt_artifacts\x18\x04 \x03(\x0b\x32\x14.grafeas.v1.Artifact\x12/\n\x0b\x63reate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12.\n\nstart_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07\x63reator\x18\x08 \x01(\t\x12\x10\n\x08logs_uri\x18\t \x01(\t\x12-\n\x11source_provenance\x18\n \x01(\x0b\x32\x12.grafeas.v1.Source\x12\x12\n\ntrigger_id\x18\x0b \x01(\t\x12\x44\n\rbuild_options\x18\x0c \x03(\x0b\x32-.grafeas.v1.BuildProvenance.BuildOptionsEntry\x12\x17\n\x0f\x62uilder_version\x18\r \x01(\t\x1a\x33\n\x11\x42uildOptionsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x95\x02\n\x06Source\x12#\n\x1b\x61rtifact_storage_source_uri\x18\x01 \x01(\t\x12\x37\n\x0b\x66ile_hashes\x18\x02 \x03(\x0b\x32".grafeas.v1.Source.FileHashesEntry\x12*\n\x07\x63ontext\x18\x03 \x01(\x0b\x32\x19.grafeas.v1.SourceContext\x12\x36\n\x13\x61\x64\x64itional_contexts\x18\x04 \x03(\x0b\x32\x19.grafeas.v1.SourceContext\x1aI\n\x0f\x46ileHashesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.grafeas.v1.FileHashes:\x02\x38\x01"1\n\nFileHashes\x12#\n\tfile_hash\x18\x01 \x03(\x0b\x32\x10.grafeas.v1.Hash"#\n\x04Hash\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c"]\n\x07\x43ommand\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0b\n\x03\x65nv\x18\x02 \x03(\t\x12\x0c\n\x04\x61rgs\x18\x03 \x03(\t\x12\x0b\n\x03\x64ir\x18\x04 \x01(\t\x12\n\n\x02id\x18\x05 \x01(\t\x12\x10\n\x08wait_for\x18\x06 \x03(\t"7\n\x08\x41rtifact\x12\x10\n\x08\x63hecksum\x18\x01 \x01(\t\x12\n\n\x02id\x18\x02 \x01(\t\x12\r\n\x05names\x18\x03 \x03(\t"\x9a\x02\n\rSourceContext\x12\x38\n\ncloud_repo\x18\x01 \x01(\x0b\x32".grafeas.v1.CloudRepoSourceContextH\x00\x12\x31\n\x06gerrit\x18\x02 \x01(\x0b\x32\x1f.grafeas.v1.GerritSourceContextH\x00\x12+\n\x03git\x18\x03 \x01(\x0b\x32\x1c.grafeas.v1.GitSourceContextH\x00\x12\x35\n\x06labels\x18\x04 \x03(\x0b\x32%.grafeas.v1.SourceContext.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07\x63ontext"\x8a\x01\n\x0c\x41liasContext\x12+\n\x04kind\x18\x01 \x01(\x0e\x32\x1d.grafeas.v1.AliasContext.Kind\x12\x0c\n\x04name\x18\x02 \x01(\t"?\n\x04Kind\x12\x14\n\x10KIND_UNSPECIFIED\x10\x00\x12\t\n\x05\x46IXED\x10\x01\x12\x0b\n\x07MOVABLE\x10\x02\x12\t\n\x05OTHER\x10\x04"\x93\x01\n\x16\x43loudRepoSourceContext\x12#\n\x07repo_id\x18\x01 \x01(\x0b\x32\x12.grafeas.v1.RepoId\x12\x15\n\x0brevision_id\x18\x02 \x01(\tH\x00\x12\x31\n\ralias_context\x18\x03 \x01(\x0b\x32\x18.grafeas.v1.AliasContextH\x00\x42\n\n\x08revision"\x95\x01\n\x13GerritSourceContext\x12\x10\n\x08host_uri\x18\x01 \x01(\t\x12\x16\n\x0egerrit_project\x18\x02 \x01(\t\x12\x15\n\x0brevision_id\x18\x03 \x01(\tH\x00\x12\x31\n\ralias_context\x18\x04 \x01(\x0b\x32\x18.grafeas.v1.AliasContextH\x00\x42\n\n\x08revision"4\n\x10GitSourceContext\x12\x0b\n\x03url\x18\x01 \x01(\t\x12\x13\n\x0brevision_id\x18\x02 \x01(\t"S\n\x06RepoId\x12\x34\n\x0fproject_repo_id\x18\x01 \x01(\x0b\x32\x19.grafeas.v1.ProjectRepoIdH\x00\x12\r\n\x03uid\x18\x02 \x01(\tH\x00\x42\x04\n\x02id"6\n\rProjectRepoId\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x11\n\trepo_name\x18\x02 \x01(\tBQ\n\rio.grafeas.v1P\x01Z8google.golang.org/genproto/googleapis/grafeas/v1;grafeas\xa2\x02\x03GRAb\x06proto3' - ), - dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,], -) - - -_ALIASCONTEXT_KIND = _descriptor.EnumDescriptor( - name="Kind", - full_name="grafeas.v1.AliasContext.Kind", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="KIND_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="FIXED", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="MOVABLE", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="OTHER", index=3, number=4, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=1494, - serialized_end=1557, -) -_sym_db.RegisterEnumDescriptor(_ALIASCONTEXT_KIND) - - -_BUILDPROVENANCE_BUILDOPTIONSENTRY = _descriptor.Descriptor( - name="BuildOptionsEntry", - full_name="grafeas.v1.BuildProvenance.BuildOptionsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="grafeas.v1.BuildProvenance.BuildOptionsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="grafeas.v1.BuildProvenance.BuildOptionsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=560, - serialized_end=611, -) - -_BUILDPROVENANCE = _descriptor.Descriptor( - name="BuildProvenance", - full_name="grafeas.v1.BuildProvenance", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="id", - full_name="grafeas.v1.BuildProvenance.id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="project_id", - full_name="grafeas.v1.BuildProvenance.project_id", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="commands", - full_name="grafeas.v1.BuildProvenance.commands", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="built_artifacts", - full_name="grafeas.v1.BuildProvenance.built_artifacts", - index=3, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="create_time", - full_name="grafeas.v1.BuildProvenance.create_time", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="start_time", - full_name="grafeas.v1.BuildProvenance.start_time", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="end_time", - full_name="grafeas.v1.BuildProvenance.end_time", - index=6, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="creator", - full_name="grafeas.v1.BuildProvenance.creator", - index=7, - number=8, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="logs_uri", - full_name="grafeas.v1.BuildProvenance.logs_uri", - index=8, - number=9, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="source_provenance", - full_name="grafeas.v1.BuildProvenance.source_provenance", - index=9, - number=10, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="trigger_id", - full_name="grafeas.v1.BuildProvenance.trigger_id", - index=10, - number=11, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="build_options", - full_name="grafeas.v1.BuildProvenance.build_options", - index=11, - number=12, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="builder_version", - full_name="grafeas.v1.BuildProvenance.builder_version", - index=12, - number=13, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_BUILDPROVENANCE_BUILDOPTIONSENTRY,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=83, - serialized_end=611, -) - - -_SOURCE_FILEHASHESENTRY = _descriptor.Descriptor( - name="FileHashesEntry", - full_name="grafeas.v1.Source.FileHashesEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="grafeas.v1.Source.FileHashesEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="grafeas.v1.Source.FileHashesEntry.value", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=818, - serialized_end=891, -) - -_SOURCE = _descriptor.Descriptor( - name="Source", - full_name="grafeas.v1.Source", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="artifact_storage_source_uri", - full_name="grafeas.v1.Source.artifact_storage_source_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="file_hashes", - full_name="grafeas.v1.Source.file_hashes", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="context", - full_name="grafeas.v1.Source.context", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="additional_contexts", - full_name="grafeas.v1.Source.additional_contexts", - index=3, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_SOURCE_FILEHASHESENTRY,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=614, - serialized_end=891, -) - - -_FILEHASHES = _descriptor.Descriptor( - name="FileHashes", - full_name="grafeas.v1.FileHashes", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="file_hash", - full_name="grafeas.v1.FileHashes.file_hash", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=893, - serialized_end=942, -) - - -_HASH = _descriptor.Descriptor( - name="Hash", - full_name="grafeas.v1.Hash", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="type", - full_name="grafeas.v1.Hash.type", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="grafeas.v1.Hash.value", - index=1, - number=2, - type=12, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b(""), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=944, - serialized_end=979, -) - - -_COMMAND = _descriptor.Descriptor( - name="Command", - full_name="grafeas.v1.Command", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="grafeas.v1.Command.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="env", - full_name="grafeas.v1.Command.env", - index=1, - number=2, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="args", - full_name="grafeas.v1.Command.args", - index=2, - number=3, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="dir", - full_name="grafeas.v1.Command.dir", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="id", - full_name="grafeas.v1.Command.id", - index=4, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="wait_for", - full_name="grafeas.v1.Command.wait_for", - index=5, - number=6, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=981, - serialized_end=1074, -) - - -_ARTIFACT = _descriptor.Descriptor( - name="Artifact", - full_name="grafeas.v1.Artifact", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="checksum", - full_name="grafeas.v1.Artifact.checksum", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="id", - full_name="grafeas.v1.Artifact.id", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="names", - full_name="grafeas.v1.Artifact.names", - index=2, - number=3, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1076, - serialized_end=1131, -) - - -_SOURCECONTEXT_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="grafeas.v1.SourceContext.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="grafeas.v1.SourceContext.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="grafeas.v1.SourceContext.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1360, - serialized_end=1405, -) - -_SOURCECONTEXT = _descriptor.Descriptor( - name="SourceContext", - full_name="grafeas.v1.SourceContext", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="cloud_repo", - full_name="grafeas.v1.SourceContext.cloud_repo", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="gerrit", - full_name="grafeas.v1.SourceContext.gerrit", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="git", - full_name="grafeas.v1.SourceContext.git", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="grafeas.v1.SourceContext.labels", - index=3, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_SOURCECONTEXT_LABELSENTRY,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="context", - full_name="grafeas.v1.SourceContext.context", - index=0, - containing_type=None, - fields=[], - ), - ], - serialized_start=1134, - serialized_end=1416, -) - - -_ALIASCONTEXT = _descriptor.Descriptor( - name="AliasContext", - full_name="grafeas.v1.AliasContext", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="kind", - full_name="grafeas.v1.AliasContext.kind", - index=0, - number=1, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="name", - full_name="grafeas.v1.AliasContext.name", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_ALIASCONTEXT_KIND,], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1419, - serialized_end=1557, -) - - -_CLOUDREPOSOURCECONTEXT = _descriptor.Descriptor( - name="CloudRepoSourceContext", - full_name="grafeas.v1.CloudRepoSourceContext", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="repo_id", - full_name="grafeas.v1.CloudRepoSourceContext.repo_id", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="revision_id", - full_name="grafeas.v1.CloudRepoSourceContext.revision_id", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="alias_context", - full_name="grafeas.v1.CloudRepoSourceContext.alias_context", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="revision", - full_name="grafeas.v1.CloudRepoSourceContext.revision", - index=0, - containing_type=None, - fields=[], - ), - ], - serialized_start=1560, - serialized_end=1707, -) - - -_GERRITSOURCECONTEXT = _descriptor.Descriptor( - name="GerritSourceContext", - full_name="grafeas.v1.GerritSourceContext", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="host_uri", - full_name="grafeas.v1.GerritSourceContext.host_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="gerrit_project", - full_name="grafeas.v1.GerritSourceContext.gerrit_project", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="revision_id", - full_name="grafeas.v1.GerritSourceContext.revision_id", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="alias_context", - full_name="grafeas.v1.GerritSourceContext.alias_context", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="revision", - full_name="grafeas.v1.GerritSourceContext.revision", - index=0, - containing_type=None, - fields=[], - ), - ], - serialized_start=1710, - serialized_end=1859, -) - - -_GITSOURCECONTEXT = _descriptor.Descriptor( - name="GitSourceContext", - full_name="grafeas.v1.GitSourceContext", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="url", - full_name="grafeas.v1.GitSourceContext.url", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="revision_id", - full_name="grafeas.v1.GitSourceContext.revision_id", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1861, - serialized_end=1913, -) - - -_REPOID = _descriptor.Descriptor( - name="RepoId", - full_name="grafeas.v1.RepoId", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_repo_id", - full_name="grafeas.v1.RepoId.project_repo_id", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="uid", - full_name="grafeas.v1.RepoId.uid", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="id", - full_name="grafeas.v1.RepoId.id", - index=0, - containing_type=None, - fields=[], - ), - ], - serialized_start=1915, - serialized_end=1998, -) - - -_PROJECTREPOID = _descriptor.Descriptor( - name="ProjectRepoId", - full_name="grafeas.v1.ProjectRepoId", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="grafeas.v1.ProjectRepoId.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="repo_name", - full_name="grafeas.v1.ProjectRepoId.repo_name", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2000, - serialized_end=2054, -) - -_BUILDPROVENANCE_BUILDOPTIONSENTRY.containing_type = _BUILDPROVENANCE -_BUILDPROVENANCE.fields_by_name["commands"].message_type = _COMMAND -_BUILDPROVENANCE.fields_by_name["built_artifacts"].message_type = _ARTIFACT -_BUILDPROVENANCE.fields_by_name[ - "create_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_BUILDPROVENANCE.fields_by_name[ - "start_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_BUILDPROVENANCE.fields_by_name[ - "end_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_BUILDPROVENANCE.fields_by_name["source_provenance"].message_type = _SOURCE -_BUILDPROVENANCE.fields_by_name[ - "build_options" -].message_type = _BUILDPROVENANCE_BUILDOPTIONSENTRY -_SOURCE_FILEHASHESENTRY.fields_by_name["value"].message_type = _FILEHASHES -_SOURCE_FILEHASHESENTRY.containing_type = _SOURCE -_SOURCE.fields_by_name["file_hashes"].message_type = _SOURCE_FILEHASHESENTRY -_SOURCE.fields_by_name["context"].message_type = _SOURCECONTEXT -_SOURCE.fields_by_name["additional_contexts"].message_type = _SOURCECONTEXT -_FILEHASHES.fields_by_name["file_hash"].message_type = _HASH -_SOURCECONTEXT_LABELSENTRY.containing_type = _SOURCECONTEXT -_SOURCECONTEXT.fields_by_name["cloud_repo"].message_type = _CLOUDREPOSOURCECONTEXT -_SOURCECONTEXT.fields_by_name["gerrit"].message_type = _GERRITSOURCECONTEXT -_SOURCECONTEXT.fields_by_name["git"].message_type = _GITSOURCECONTEXT -_SOURCECONTEXT.fields_by_name["labels"].message_type = _SOURCECONTEXT_LABELSENTRY -_SOURCECONTEXT.oneofs_by_name["context"].fields.append( - _SOURCECONTEXT.fields_by_name["cloud_repo"] -) -_SOURCECONTEXT.fields_by_name[ - "cloud_repo" -].containing_oneof = _SOURCECONTEXT.oneofs_by_name["context"] -_SOURCECONTEXT.oneofs_by_name["context"].fields.append( - _SOURCECONTEXT.fields_by_name["gerrit"] -) -_SOURCECONTEXT.fields_by_name[ - "gerrit" -].containing_oneof = _SOURCECONTEXT.oneofs_by_name["context"] -_SOURCECONTEXT.oneofs_by_name["context"].fields.append( - _SOURCECONTEXT.fields_by_name["git"] -) -_SOURCECONTEXT.fields_by_name["git"].containing_oneof = _SOURCECONTEXT.oneofs_by_name[ - "context" -] -_ALIASCONTEXT.fields_by_name["kind"].enum_type = _ALIASCONTEXT_KIND -_ALIASCONTEXT_KIND.containing_type = _ALIASCONTEXT -_CLOUDREPOSOURCECONTEXT.fields_by_name["repo_id"].message_type = _REPOID -_CLOUDREPOSOURCECONTEXT.fields_by_name["alias_context"].message_type = _ALIASCONTEXT -_CLOUDREPOSOURCECONTEXT.oneofs_by_name["revision"].fields.append( - _CLOUDREPOSOURCECONTEXT.fields_by_name["revision_id"] -) -_CLOUDREPOSOURCECONTEXT.fields_by_name[ - "revision_id" -].containing_oneof = _CLOUDREPOSOURCECONTEXT.oneofs_by_name["revision"] -_CLOUDREPOSOURCECONTEXT.oneofs_by_name["revision"].fields.append( - _CLOUDREPOSOURCECONTEXT.fields_by_name["alias_context"] -) -_CLOUDREPOSOURCECONTEXT.fields_by_name[ - "alias_context" -].containing_oneof = _CLOUDREPOSOURCECONTEXT.oneofs_by_name["revision"] -_GERRITSOURCECONTEXT.fields_by_name["alias_context"].message_type = _ALIASCONTEXT -_GERRITSOURCECONTEXT.oneofs_by_name["revision"].fields.append( - _GERRITSOURCECONTEXT.fields_by_name["revision_id"] -) -_GERRITSOURCECONTEXT.fields_by_name[ - "revision_id" -].containing_oneof = _GERRITSOURCECONTEXT.oneofs_by_name["revision"] -_GERRITSOURCECONTEXT.oneofs_by_name["revision"].fields.append( - _GERRITSOURCECONTEXT.fields_by_name["alias_context"] -) -_GERRITSOURCECONTEXT.fields_by_name[ - "alias_context" -].containing_oneof = _GERRITSOURCECONTEXT.oneofs_by_name["revision"] -_REPOID.fields_by_name["project_repo_id"].message_type = _PROJECTREPOID -_REPOID.oneofs_by_name["id"].fields.append(_REPOID.fields_by_name["project_repo_id"]) -_REPOID.fields_by_name["project_repo_id"].containing_oneof = _REPOID.oneofs_by_name[ - "id" -] -_REPOID.oneofs_by_name["id"].fields.append(_REPOID.fields_by_name["uid"]) -_REPOID.fields_by_name["uid"].containing_oneof = _REPOID.oneofs_by_name["id"] -DESCRIPTOR.message_types_by_name["BuildProvenance"] = _BUILDPROVENANCE -DESCRIPTOR.message_types_by_name["Source"] = _SOURCE -DESCRIPTOR.message_types_by_name["FileHashes"] = _FILEHASHES -DESCRIPTOR.message_types_by_name["Hash"] = _HASH -DESCRIPTOR.message_types_by_name["Command"] = _COMMAND -DESCRIPTOR.message_types_by_name["Artifact"] = _ARTIFACT -DESCRIPTOR.message_types_by_name["SourceContext"] = _SOURCECONTEXT -DESCRIPTOR.message_types_by_name["AliasContext"] = _ALIASCONTEXT -DESCRIPTOR.message_types_by_name["CloudRepoSourceContext"] = _CLOUDREPOSOURCECONTEXT -DESCRIPTOR.message_types_by_name["GerritSourceContext"] = _GERRITSOURCECONTEXT -DESCRIPTOR.message_types_by_name["GitSourceContext"] = _GITSOURCECONTEXT -DESCRIPTOR.message_types_by_name["RepoId"] = _REPOID -DESCRIPTOR.message_types_by_name["ProjectRepoId"] = _PROJECTREPOID -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -BuildProvenance = _reflection.GeneratedProtocolMessageType( - "BuildProvenance", - (_message.Message,), - dict( - BuildOptionsEntry=_reflection.GeneratedProtocolMessageType( - "BuildOptionsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_BUILDPROVENANCE_BUILDOPTIONSENTRY, - __module__="grafeas_v1.proto.provenance_pb2" - # @@protoc_insertion_point(class_scope:grafeas.v1.BuildProvenance.BuildOptionsEntry) - ), - ), - DESCRIPTOR=_BUILDPROVENANCE, - __module__="grafeas_v1.proto.provenance_pb2", - __doc__="""Provenance of a build. Contains all information needed to - verify the full details about the build from source to completion. - - - Attributes: - id: - Required. Unique identifier of the build. - project_id: - ID of the project. - commands: - Commands requested by the build. - built_artifacts: - Output of the build. - create_time: - Time at which the build was created. - start_time: - Time at which execution of the build was started. - end_time: - Time at which execution of the build was finished. - creator: - E-mail address of the user who initiated this build. Note that - this was the user's e-mail address at the time the build was - initiated; this address may not represent the same end-user - for all time. - logs_uri: - URI where any logs for this provenance were written. - source_provenance: - Details of the Source input to the build. - trigger_id: - Trigger identifier if the build was triggered automatically; - empty if not. - build_options: - Special options applied to this build. This is a catch-all - field where build providers can enter any desired additional - details. - builder_version: - Version string of the builder at the time this build was - executed. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.BuildProvenance) - ), -) -_sym_db.RegisterMessage(BuildProvenance) -_sym_db.RegisterMessage(BuildProvenance.BuildOptionsEntry) - -Source = _reflection.GeneratedProtocolMessageType( - "Source", - (_message.Message,), - dict( - FileHashesEntry=_reflection.GeneratedProtocolMessageType( - "FileHashesEntry", - (_message.Message,), - dict( - DESCRIPTOR=_SOURCE_FILEHASHESENTRY, - __module__="grafeas_v1.proto.provenance_pb2" - # @@protoc_insertion_point(class_scope:grafeas.v1.Source.FileHashesEntry) - ), - ), - DESCRIPTOR=_SOURCE, - __module__="grafeas_v1.proto.provenance_pb2", - __doc__="""Source describes the location of the source used for the - build. - - - Attributes: - artifact_storage_source_uri: - If provided, the input binary artifacts for the build came - from this location. - file_hashes: - Hash(es) of the build source, which can be used to verify that - the original source integrity was maintained in the build. - The keys to this map are file paths used as build source and - the values contain the hash values for those files. If the - build source came in a single package such as a gzipped - tarfile (.tar.gz), the FileHash will be for the single path to - that file. - context: - If provided, the source code used for the build came from this - location. - additional_contexts: - If provided, some of the source code used for the build may be - found in these locations, in the case where the source - repository had multiple remotes or submodules. This list will - not include the context specified in the context field. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.Source) - ), -) -_sym_db.RegisterMessage(Source) -_sym_db.RegisterMessage(Source.FileHashesEntry) - -FileHashes = _reflection.GeneratedProtocolMessageType( - "FileHashes", - (_message.Message,), - dict( - DESCRIPTOR=_FILEHASHES, - __module__="grafeas_v1.proto.provenance_pb2", - __doc__="""Container message for hashes of byte content of files, - used in source messages to verify integrity of source input to the - build. - - - Attributes: - file_hash: - Required. Collection of file hashes. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.FileHashes) - ), -) -_sym_db.RegisterMessage(FileHashes) - -Hash = _reflection.GeneratedProtocolMessageType( - "Hash", - (_message.Message,), - dict( - DESCRIPTOR=_HASH, - __module__="grafeas_v1.proto.provenance_pb2", - __doc__="""Container message for hash values. - - - Attributes: - type: - Required. The type of hash that was performed, e.g. "SHA-256". - value: - Required. The hash value. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.Hash) - ), -) -_sym_db.RegisterMessage(Hash) - -Command = _reflection.GeneratedProtocolMessageType( - "Command", - (_message.Message,), - dict( - DESCRIPTOR=_COMMAND, - __module__="grafeas_v1.proto.provenance_pb2", - __doc__="""Command describes a step performed as part of the build - pipeline. - - - Attributes: - name: - Required. Name of the command, as presented on the command - line, or if the command is packaged as a Docker container, as - presented to ``docker pull``. - env: - Environment variables set before running this command. - args: - Command-line arguments used when executing this command. - dir: - Working directory (relative to project source root) used when - running this command. - id: - Optional unique identifier for this command, used in wait\_for - to reference this command as a dependency. - wait_for: - The ID(s) of the command(s) that this command depends on. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.Command) - ), -) -_sym_db.RegisterMessage(Command) - -Artifact = _reflection.GeneratedProtocolMessageType( - "Artifact", - (_message.Message,), - dict( - DESCRIPTOR=_ARTIFACT, - __module__="grafeas_v1.proto.provenance_pb2", - __doc__="""Artifact describes a build product. - - - Attributes: - checksum: - Hash or checksum value of a binary, or Docker Registry 2.0 - digest of a container. - id: - Artifact ID, if any; for container images, this will be a URL - by digest like ``gcr.io/projectID/imagename@sha256:123456``. - names: - Related artifact names. This may be the path to a binary or - jar file, or in the case of a container build, the name used - to push the container image to Google Container Registry, as - presented to ``docker push``. Note that a single Artifact ID - can have multiple names, for example if two tags are applied - to one image. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.Artifact) - ), -) -_sym_db.RegisterMessage(Artifact) - -SourceContext = _reflection.GeneratedProtocolMessageType( - "SourceContext", - (_message.Message,), - dict( - LabelsEntry=_reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_SOURCECONTEXT_LABELSENTRY, - __module__="grafeas_v1.proto.provenance_pb2" - # @@protoc_insertion_point(class_scope:grafeas.v1.SourceContext.LabelsEntry) - ), - ), - DESCRIPTOR=_SOURCECONTEXT, - __module__="grafeas_v1.proto.provenance_pb2", - __doc__="""A SourceContext is a reference to a tree of files. A - SourceContext together with a path point to a unique revision of a - single file or directory. - - - Attributes: - context: - A SourceContext can refer any one of the following types of - repositories. - cloud_repo: - A SourceContext referring to a revision in a Google Cloud - Source Repo. - gerrit: - A SourceContext referring to a Gerrit project. - git: - A SourceContext referring to any third party Git repo (e.g., - GitHub). - labels: - Labels with user defined metadata. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.SourceContext) - ), -) -_sym_db.RegisterMessage(SourceContext) -_sym_db.RegisterMessage(SourceContext.LabelsEntry) - -AliasContext = _reflection.GeneratedProtocolMessageType( - "AliasContext", - (_message.Message,), - dict( - DESCRIPTOR=_ALIASCONTEXT, - __module__="grafeas_v1.proto.provenance_pb2", - __doc__="""An alias to a repo revision. - - - Attributes: - kind: - The alias kind. - name: - The alias name. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.AliasContext) - ), -) -_sym_db.RegisterMessage(AliasContext) - -CloudRepoSourceContext = _reflection.GeneratedProtocolMessageType( - "CloudRepoSourceContext", - (_message.Message,), - dict( - DESCRIPTOR=_CLOUDREPOSOURCECONTEXT, - __module__="grafeas_v1.proto.provenance_pb2", - __doc__="""A CloudRepoSourceContext denotes a particular revision in - a Google Cloud Source Repo. - - - Attributes: - repo_id: - The ID of the repo. - revision: - A revision in a Cloud Repo can be identified by either its - revision ID or its alias. - revision_id: - A revision ID. - alias_context: - An alias, which may be a branch or tag. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.CloudRepoSourceContext) - ), -) -_sym_db.RegisterMessage(CloudRepoSourceContext) - -GerritSourceContext = _reflection.GeneratedProtocolMessageType( - "GerritSourceContext", - (_message.Message,), - dict( - DESCRIPTOR=_GERRITSOURCECONTEXT, - __module__="grafeas_v1.proto.provenance_pb2", - __doc__="""A SourceContext referring to a Gerrit project. - - - Attributes: - host_uri: - The URI of a running Gerrit instance. - gerrit_project: - The full project name within the host. Projects may be nested, - so "project/subproject" is a valid project name. The "repo - name" is the hostURI/project. - revision: - A revision in a Gerrit project can be identified by either its - revision ID or its alias. - revision_id: - A revision (commit) ID. - alias_context: - An alias, which may be a branch or tag. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.GerritSourceContext) - ), -) -_sym_db.RegisterMessage(GerritSourceContext) - -GitSourceContext = _reflection.GeneratedProtocolMessageType( - "GitSourceContext", - (_message.Message,), - dict( - DESCRIPTOR=_GITSOURCECONTEXT, - __module__="grafeas_v1.proto.provenance_pb2", - __doc__="""A GitSourceContext denotes a particular revision in a - third party Git repository (e.g., GitHub). - - - Attributes: - url: - Git repository URL. - revision_id: - Git commit hash. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.GitSourceContext) - ), -) -_sym_db.RegisterMessage(GitSourceContext) - -RepoId = _reflection.GeneratedProtocolMessageType( - "RepoId", - (_message.Message,), - dict( - DESCRIPTOR=_REPOID, - __module__="grafeas_v1.proto.provenance_pb2", - __doc__="""A unique identifier for a Cloud Repo. - - - Attributes: - id: - A cloud repo can be identified by either its project ID and - repository name combination, or its globally unique - identifier. - project_repo_id: - A combination of a project ID and a repo name. - uid: - A server-assigned, globally unique identifier. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.RepoId) - ), -) -_sym_db.RegisterMessage(RepoId) - -ProjectRepoId = _reflection.GeneratedProtocolMessageType( - "ProjectRepoId", - (_message.Message,), - dict( - DESCRIPTOR=_PROJECTREPOID, - __module__="grafeas_v1.proto.provenance_pb2", - __doc__="""Selects a repo using a Google Cloud Platform project ID - (e.g., winged-cargo-31) and a repo name within that project. - - - Attributes: - project_id: - The ID of the project. - repo_name: - The name of the repo. Leave empty for the default repo. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.ProjectRepoId) - ), -) -_sym_db.RegisterMessage(ProjectRepoId) - - -DESCRIPTOR._options = None -_BUILDPROVENANCE_BUILDOPTIONSENTRY._options = None -_SOURCE_FILEHASHESENTRY._options = None -_SOURCECONTEXT_LABELSENTRY._options = None -# @@protoc_insertion_point(module_scope) diff --git a/grafeas/grafeas/grafeas_v1/proto/provenance_pb2_grpc.py b/grafeas/grafeas/grafeas_v1/proto/provenance_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/grafeas/grafeas/grafeas_v1/proto/provenance_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/grafeas/grafeas/grafeas_v1/proto/upgrade.proto b/grafeas/grafeas/grafeas_v1/proto/upgrade.proto deleted file mode 100644 index 89d389299199..000000000000 --- a/grafeas/grafeas/grafeas_v1/proto/upgrade.proto +++ /dev/null @@ -1,114 +0,0 @@ -// Copyright 2019 The Grafeas Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package grafeas.v1; - -import "google/protobuf/timestamp.proto"; -import "grafeas/v1/package.proto"; - -option go_package = "google.golang.org/genproto/googleapis/grafeas/v1;grafeas"; -option java_multiple_files = true; -option java_package = "io.grafeas.v1"; -option objc_class_prefix = "GRA"; - -// An Upgrade Note represents a potential upgrade of a package to a given -// version. For each package version combination (i.e. bash 4.0, bash 4.1, -// bash 4.1.2), there will be an Upgrade Note. For Windows, windows_update field -// represents the information related to the update. -message UpgradeNote { - // Required for non-Windows OS. The package this Upgrade is for. - string package = 1; - // Required for non-Windows OS. The version of the package in machine + human - // readable form. - grafeas.v1.Version version = 2; - // Metadata about the upgrade for each specific operating system. - repeated UpgradeDistribution distributions = 3; - // Required for Windows OS. Represents the metadata about the Windows update. - WindowsUpdate windows_update = 4; -} - -// The Upgrade Distribution represents metadata about the Upgrade for each -// operating system (CPE). Some distributions have additional metadata around -// updates, classifying them into various categories and severities. -message UpgradeDistribution { - // Required - The specific operating system this metadata applies to. See - // https://cpe.mitre.org/specification/. - string cpe_uri = 1; - // The operating system classification of this Upgrade, as specified by the - // upstream operating system upgrade feed. For Windows the classification is - // one of the category_ids listed at - // https://docs.microsoft.com/en-us/previous-versions/windows/desktop/ff357803(v=vs.85) - string classification = 2; - // The severity as specified by the upstream operating system. - string severity = 3; - // The cve tied to this Upgrade. - repeated string cve = 4; -} - -// Windows Update represents the metadata about the update for the Windows -// operating system. The fields in this message come from the Windows Update API -// documented at -// https://docs.microsoft.com/en-us/windows/win32/api/wuapi/nn-wuapi-iupdate. -message WindowsUpdate { - // The unique identifier of the update. - message Identity { - // The revision independent identifier of the update. - string update_id = 1; - // The revision number of the update. - int32 revision = 2; - } - // Required - The unique identifier for the update. - Identity identity = 1; - // The localized title of the update. - string title = 2; - // The localized description of the update. - string description = 3; - // The category to which the update belongs. - message Category { - // The identifier of the category. - string category_id = 1; - // The localized name of the category. - string name = 2; - } - // The list of categories to which the update belongs. - repeated Category categories = 4; - // The Microsoft Knowledge Base article IDs that are associated with the - // update. - repeated string kb_article_ids = 5; - // The hyperlink to the support information for the update. - string support_url = 6; - // The last published timestamp of the update. - google.protobuf.Timestamp last_published_timestamp = 7; -} - -// An Upgrade Occurrence represents that a specific resource_url could install a -// specific upgrade. This presence is supplied via local sources (i.e. it is -// present in the mirror and the running system has noticed its availability). -// For Windows, both distribution and windows_update contain information for the -// Windows update. -message UpgradeOccurrence { - // Required for non-Windows OS. The package this Upgrade is for. - string package = 1; - // Required for non-Windows OS. The version of the package in a machine + - // human readable form. - grafeas.v1.Version parsed_version = 3; - // Metadata about the upgrade for available for the specific operating system - // for the resource_url. This allows efficient filtering, as well as - // making it easier to use the occurrence. - UpgradeDistribution distribution = 4; - // Required for Windows OS. Represents the metadata about the Windows update. - WindowsUpdate windows_update = 5; -} diff --git a/grafeas/grafeas/grafeas_v1/proto/upgrade_pb2.py b/grafeas/grafeas/grafeas_v1/proto/upgrade_pb2.py deleted file mode 100644 index a8f4cdaa31ee..000000000000 --- a/grafeas/grafeas/grafeas_v1/proto/upgrade_pb2.py +++ /dev/null @@ -1,770 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: grafeas_v1/proto/upgrade.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from grafeas.grafeas_v1.proto import ( - package_pb2 as grafeas__v1_dot_proto_dot_package__pb2, -) - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="grafeas_v1/proto/upgrade.proto", - package="grafeas.v1", - syntax="proto3", - serialized_options=_b( - "\n\rio.grafeas.v1P\001Z8google.golang.org/genproto/googleapis/grafeas/v1;grafeas\242\002\003GRA" - ), - serialized_pb=_b( - '\n\x1egrafeas_v1/proto/upgrade.proto\x12\ngrafeas.v1\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egrafeas_v1/proto/package.proto"\xaf\x01\n\x0bUpgradeNote\x12\x0f\n\x07package\x18\x01 \x01(\t\x12$\n\x07version\x18\x02 \x01(\x0b\x32\x13.grafeas.v1.Version\x12\x36\n\rdistributions\x18\x03 \x03(\x0b\x32\x1f.grafeas.v1.UpgradeDistribution\x12\x31\n\x0ewindows_update\x18\x04 \x01(\x0b\x32\x19.grafeas.v1.WindowsUpdate"]\n\x13UpgradeDistribution\x12\x0f\n\x07\x63pe_uri\x18\x01 \x01(\t\x12\x16\n\x0e\x63lassification\x18\x02 \x01(\t\x12\x10\n\x08severity\x18\x03 \x01(\t\x12\x0b\n\x03\x63ve\x18\x04 \x03(\t"\xec\x02\n\rWindowsUpdate\x12\x34\n\x08identity\x18\x01 \x01(\x0b\x32".grafeas.v1.WindowsUpdate.Identity\x12\r\n\x05title\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\x36\n\ncategories\x18\x04 \x03(\x0b\x32".grafeas.v1.WindowsUpdate.Category\x12\x16\n\x0ekb_article_ids\x18\x05 \x03(\t\x12\x13\n\x0bsupport_url\x18\x06 \x01(\t\x12<\n\x18last_published_timestamp\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a/\n\x08Identity\x12\x11\n\tupdate_id\x18\x01 \x01(\t\x12\x10\n\x08revision\x18\x02 \x01(\x05\x1a-\n\x08\x43\x61tegory\x12\x13\n\x0b\x63\x61tegory_id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t"\xbb\x01\n\x11UpgradeOccurrence\x12\x0f\n\x07package\x18\x01 \x01(\t\x12+\n\x0eparsed_version\x18\x03 \x01(\x0b\x32\x13.grafeas.v1.Version\x12\x35\n\x0c\x64istribution\x18\x04 \x01(\x0b\x32\x1f.grafeas.v1.UpgradeDistribution\x12\x31\n\x0ewindows_update\x18\x05 \x01(\x0b\x32\x19.grafeas.v1.WindowsUpdateBQ\n\rio.grafeas.v1P\x01Z8google.golang.org/genproto/googleapis/grafeas/v1;grafeas\xa2\x02\x03GRAb\x06proto3' - ), - dependencies=[ - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - grafeas__v1_dot_proto_dot_package__pb2.DESCRIPTOR, - ], -) - - -_UPGRADENOTE = _descriptor.Descriptor( - name="UpgradeNote", - full_name="grafeas.v1.UpgradeNote", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="package", - full_name="grafeas.v1.UpgradeNote.package", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="version", - full_name="grafeas.v1.UpgradeNote.version", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="distributions", - full_name="grafeas.v1.UpgradeNote.distributions", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="windows_update", - full_name="grafeas.v1.UpgradeNote.windows_update", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=112, - serialized_end=287, -) - - -_UPGRADEDISTRIBUTION = _descriptor.Descriptor( - name="UpgradeDistribution", - full_name="grafeas.v1.UpgradeDistribution", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="cpe_uri", - full_name="grafeas.v1.UpgradeDistribution.cpe_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="classification", - full_name="grafeas.v1.UpgradeDistribution.classification", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="severity", - full_name="grafeas.v1.UpgradeDistribution.severity", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cve", - full_name="grafeas.v1.UpgradeDistribution.cve", - index=3, - number=4, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=289, - serialized_end=382, -) - - -_WINDOWSUPDATE_IDENTITY = _descriptor.Descriptor( - name="Identity", - full_name="grafeas.v1.WindowsUpdate.Identity", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="update_id", - full_name="grafeas.v1.WindowsUpdate.Identity.update_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="revision", - full_name="grafeas.v1.WindowsUpdate.Identity.revision", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=655, - serialized_end=702, -) - -_WINDOWSUPDATE_CATEGORY = _descriptor.Descriptor( - name="Category", - full_name="grafeas.v1.WindowsUpdate.Category", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="category_id", - full_name="grafeas.v1.WindowsUpdate.Category.category_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="name", - full_name="grafeas.v1.WindowsUpdate.Category.name", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=704, - serialized_end=749, -) - -_WINDOWSUPDATE = _descriptor.Descriptor( - name="WindowsUpdate", - full_name="grafeas.v1.WindowsUpdate", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="identity", - full_name="grafeas.v1.WindowsUpdate.identity", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="title", - full_name="grafeas.v1.WindowsUpdate.title", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="description", - full_name="grafeas.v1.WindowsUpdate.description", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="categories", - full_name="grafeas.v1.WindowsUpdate.categories", - index=3, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="kb_article_ids", - full_name="grafeas.v1.WindowsUpdate.kb_article_ids", - index=4, - number=5, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="support_url", - full_name="grafeas.v1.WindowsUpdate.support_url", - index=5, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="last_published_timestamp", - full_name="grafeas.v1.WindowsUpdate.last_published_timestamp", - index=6, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_WINDOWSUPDATE_IDENTITY, _WINDOWSUPDATE_CATEGORY,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=385, - serialized_end=749, -) - - -_UPGRADEOCCURRENCE = _descriptor.Descriptor( - name="UpgradeOccurrence", - full_name="grafeas.v1.UpgradeOccurrence", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="package", - full_name="grafeas.v1.UpgradeOccurrence.package", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="parsed_version", - full_name="grafeas.v1.UpgradeOccurrence.parsed_version", - index=1, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="distribution", - full_name="grafeas.v1.UpgradeOccurrence.distribution", - index=2, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="windows_update", - full_name="grafeas.v1.UpgradeOccurrence.windows_update", - index=3, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=752, - serialized_end=939, -) - -_UPGRADENOTE.fields_by_name[ - "version" -].message_type = grafeas__v1_dot_proto_dot_package__pb2._VERSION -_UPGRADENOTE.fields_by_name["distributions"].message_type = _UPGRADEDISTRIBUTION -_UPGRADENOTE.fields_by_name["windows_update"].message_type = _WINDOWSUPDATE -_WINDOWSUPDATE_IDENTITY.containing_type = _WINDOWSUPDATE -_WINDOWSUPDATE_CATEGORY.containing_type = _WINDOWSUPDATE -_WINDOWSUPDATE.fields_by_name["identity"].message_type = _WINDOWSUPDATE_IDENTITY -_WINDOWSUPDATE.fields_by_name["categories"].message_type = _WINDOWSUPDATE_CATEGORY -_WINDOWSUPDATE.fields_by_name[ - "last_published_timestamp" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_UPGRADEOCCURRENCE.fields_by_name[ - "parsed_version" -].message_type = grafeas__v1_dot_proto_dot_package__pb2._VERSION -_UPGRADEOCCURRENCE.fields_by_name["distribution"].message_type = _UPGRADEDISTRIBUTION -_UPGRADEOCCURRENCE.fields_by_name["windows_update"].message_type = _WINDOWSUPDATE -DESCRIPTOR.message_types_by_name["UpgradeNote"] = _UPGRADENOTE -DESCRIPTOR.message_types_by_name["UpgradeDistribution"] = _UPGRADEDISTRIBUTION -DESCRIPTOR.message_types_by_name["WindowsUpdate"] = _WINDOWSUPDATE -DESCRIPTOR.message_types_by_name["UpgradeOccurrence"] = _UPGRADEOCCURRENCE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -UpgradeNote = _reflection.GeneratedProtocolMessageType( - "UpgradeNote", - (_message.Message,), - dict( - DESCRIPTOR=_UPGRADENOTE, - __module__="grafeas_v1.proto.upgrade_pb2", - __doc__="""An Upgrade Note represents a potential upgrade of a - package to a given version. For each package version combination (i.e. - bash 4.0, bash 4.1, bash 4.1.2), there will be an Upgrade Note. For - Windows, windows\_update field represents the information related to the - update. - - - Attributes: - package: - Required for non-Windows OS. The package this Upgrade is for. - version: - Required for non-Windows OS. The version of the package in - machine + human readable form. - distributions: - Metadata about the upgrade for each specific operating system. - windows_update: - Required for Windows OS. Represents the metadata about the - Windows update. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.UpgradeNote) - ), -) -_sym_db.RegisterMessage(UpgradeNote) - -UpgradeDistribution = _reflection.GeneratedProtocolMessageType( - "UpgradeDistribution", - (_message.Message,), - dict( - DESCRIPTOR=_UPGRADEDISTRIBUTION, - __module__="grafeas_v1.proto.upgrade_pb2", - __doc__="""The Upgrade Distribution represents metadata about the - Upgrade for each operating system (CPE). Some distributions have - additional metadata around updates, classifying them into various - categories and severities. - - - Attributes: - cpe_uri: - Required - The specific operating system this metadata applies - to. See https://cpe.mitre.org/specification/. - classification: - The operating system classification of this Upgrade, as - specified by the upstream operating system upgrade feed. For - Windows the classification is one of the category\_ids listed - at https://docs.microsoft.com/en-us/previous- - versions/windows/desktop/ff357803(v=vs.85) - severity: - The severity as specified by the upstream operating system. - cve: - The cve tied to this Upgrade. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.UpgradeDistribution) - ), -) -_sym_db.RegisterMessage(UpgradeDistribution) - -WindowsUpdate = _reflection.GeneratedProtocolMessageType( - "WindowsUpdate", - (_message.Message,), - dict( - Identity=_reflection.GeneratedProtocolMessageType( - "Identity", - (_message.Message,), - dict( - DESCRIPTOR=_WINDOWSUPDATE_IDENTITY, - __module__="grafeas_v1.proto.upgrade_pb2", - __doc__="""The unique identifier of the update. - - - Attributes: - update_id: - The revision independent identifier of the update. - revision: - The revision number of the update. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.WindowsUpdate.Identity) - ), - ), - Category=_reflection.GeneratedProtocolMessageType( - "Category", - (_message.Message,), - dict( - DESCRIPTOR=_WINDOWSUPDATE_CATEGORY, - __module__="grafeas_v1.proto.upgrade_pb2", - __doc__="""The category to which the update belongs. - - - Attributes: - category_id: - The identifier of the category. - name: - The localized name of the category. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.WindowsUpdate.Category) - ), - ), - DESCRIPTOR=_WINDOWSUPDATE, - __module__="grafeas_v1.proto.upgrade_pb2", - __doc__="""Windows Update represents the metadata about the update - for the Windows operating system. The fields in this message come from - the Windows Update API documented at - https://docs.microsoft.com/en-us/windows/win32/api/wuapi/nn-wuapi-iupdate. - - - Attributes: - identity: - Required - The unique identifier for the update. - title: - The localized title of the update. - description: - The localized description of the update. - categories: - The list of categories to which the update belongs. - kb_article_ids: - The Microsoft Knowledge Base article IDs that are associated - with the update. - support_url: - The hyperlink to the support information for the update. - last_published_timestamp: - The last published timestamp of the update. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.WindowsUpdate) - ), -) -_sym_db.RegisterMessage(WindowsUpdate) -_sym_db.RegisterMessage(WindowsUpdate.Identity) -_sym_db.RegisterMessage(WindowsUpdate.Category) - -UpgradeOccurrence = _reflection.GeneratedProtocolMessageType( - "UpgradeOccurrence", - (_message.Message,), - dict( - DESCRIPTOR=_UPGRADEOCCURRENCE, - __module__="grafeas_v1.proto.upgrade_pb2", - __doc__="""An Upgrade Occurrence represents that a specific - resource\_url could install a specific upgrade. This presence is - supplied via local sources (i.e. it is present in the mirror and the - running system has noticed its availability). For Windows, both - distribution and windows\_update contain information for the Windows - update. - - - Attributes: - package: - Required for non-Windows OS. The package this Upgrade is for. - parsed_version: - Required for non-Windows OS. The version of the package in a - machine + human readable form. - distribution: - Metadata about the upgrade for available for the specific - operating system for the resource\_url. This allows efficient - filtering, as well as making it easier to use the occurrence. - windows_update: - Required for Windows OS. Represents the metadata about the - Windows update. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.UpgradeOccurrence) - ), -) -_sym_db.RegisterMessage(UpgradeOccurrence) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/grafeas/grafeas/grafeas_v1/proto/upgrade_pb2_grpc.py b/grafeas/grafeas/grafeas_v1/proto/upgrade_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/grafeas/grafeas/grafeas_v1/proto/upgrade_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/grafeas/grafeas/grafeas_v1/proto/vulnerability.proto b/grafeas/grafeas/grafeas_v1/proto/vulnerability.proto deleted file mode 100644 index 6c94cdf05025..000000000000 --- a/grafeas/grafeas/grafeas_v1/proto/vulnerability.proto +++ /dev/null @@ -1,226 +0,0 @@ -// Copyright 2019 The Grafeas Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package grafeas.v1; - -import "google/protobuf/timestamp.proto"; -import "grafeas/v1/common.proto"; -import "grafeas/v1/cvss.proto"; -import "grafeas/v1/package.proto"; - -option go_package = "google.golang.org/genproto/googleapis/grafeas/v1;grafeas"; -option java_multiple_files = true; -option java_package = "io.grafeas.v1"; -option objc_class_prefix = "GRA"; - -// Note provider assigned severity/impact ranking. -enum Severity { - // Unknown. - SEVERITY_UNSPECIFIED = 0; - // Minimal severity. - MINIMAL = 1; - // Low severity. - LOW = 2; - // Medium severity. - MEDIUM = 3; - // High severity. - HIGH = 4; - // Critical severity. - CRITICAL = 5; -} - -// A security vulnerability that can be found in resources. -message VulnerabilityNote { - // The CVSS score of this vulnerability. CVSS score is on a scale of 0 - 10 - // where 0 indicates low severity and 10 indicates high severity. - float cvss_score = 1; - - // The note provider assigned severity of this vulnerability. - Severity severity = 2; - - // Details of all known distros and packages affected by this vulnerability. - repeated Detail details = 3; - - // A detail for a distro and package affected by this vulnerability and its - // associated fix (if one is available). - message Detail { - // The distro assigned severity of this vulnerability. - string severity_name = 1; - - // A vendor-specific description of this vulnerability. - string description = 2; - - // The type of package; whether native or non native (e.g., ruby gems, - // node.js packages, etc.). - string package_type = 3; - - // Required. The [CPE URI](https://cpe.mitre.org/specification/) this - // vulnerability affects. - string affected_cpe_uri = 4; - - // Required. The package this vulnerability affects. - string affected_package = 5; - - // The version number at the start of an interval in which this - // vulnerability exists. A vulnerability can affect a package between - // version numbers that are disjoint sets of intervals (example: - // [1.0.0-1.1.0], [2.4.6-2.4.8] and [4.5.6-4.6.8]) each of which will be - // represented in its own Detail. If a specific affected version is provided - // by a vulnerability database, affected_version_start and - // affected_version_end will be the same in that Detail. - grafeas.v1.Version affected_version_start = 6; - - // The version number at the end of an interval in which this vulnerability - // exists. A vulnerability can affect a package between version numbers - // that are disjoint sets of intervals (example: [1.0.0-1.1.0], - // [2.4.6-2.4.8] and [4.5.6-4.6.8]) each of which will be represented in its - // own Detail. If a specific affected version is provided by a vulnerability - // database, affected_version_start and affected_version_end will be the - // same in that Detail. - grafeas.v1.Version affected_version_end = 7; - - // The distro recommended [CPE URI](https://cpe.mitre.org/specification/) - // to update to that contains a fix for this vulnerability. It is possible - // for this to be different from the affected_cpe_uri. - string fixed_cpe_uri = 8; - - // The distro recommended package to update to that contains a fix for this - // vulnerability. It is possible for this to be different from the - // affected_package. - string fixed_package = 9; - - // The distro recommended version to update to that contains a - // fix for this vulnerability. Setting this to VersionKind.MAXIMUM means no - // such version is yet available. - grafeas.v1.Version fixed_version = 10; - - // Whether this detail is obsolete. Occurrences are expected not to point to - // obsolete details. - bool is_obsolete = 11; - - // The time this information was last changed at the source. This is an - // upstream timestamp from the underlying information source - e.g. Ubuntu - // security tracker. - google.protobuf.Timestamp source_update_time = 12; - } - - // The full description of the CVSSv3 for this vulnerability. - CVSSv3 cvss_v3 = 4; - - // Windows details get their own format because the information format and - // model don't match a normal detail. Specifically Windows updates are done as - // patches, thus Windows vulnerabilities really are a missing package, rather - // than a package being at an incorrect version. - repeated WindowsDetail windows_details = 5; - - message WindowsDetail { - // Required. The [CPE URI](https://cpe.mitre.org/specification/) this - // vulnerability affects. - string cpe_uri = 1; - - // Required. The name of this vulnerability. - string name = 2; - - // The description of this vulnerability. - string description = 3; - - // Required. The names of the KBs which have hotfixes to mitigate this - // vulnerability. Note that there may be multiple hotfixes (and thus - // multiple KBs) that mitigate a given vulnerability. Currently any listed - // KBs presence is considered a fix. - repeated KnowledgeBase fixing_kbs = 4; - - message KnowledgeBase { - // The KB name (generally of the form KB[0-9]+ (e.g., KB123456)). - string name = 1; - // A link to the KB in the [Windows update catalog] - // (https://www.catalog.update.microsoft.com/). - string url = 2; - } - } - - // The time this information was last changed at the source. This is an - // upstream timestamp from the underlying information source - e.g. Ubuntu - // security tracker. - google.protobuf.Timestamp source_update_time = 6; -} - -// An occurrence of a severity vulnerability on a resource. -message VulnerabilityOccurrence { - // The type of package; whether native or non native (e.g., ruby gems, node.js - // packages, etc.). - string type = 1; - - // Output only. The note provider assigned severity of this vulnerability. - Severity severity = 2; - - // Output only. The CVSS score of this vulnerability. CVSS score is on a - // scale of 0 - 10 where 0 indicates low severity and 10 indicates high - // severity. - float cvss_score = 3; - - // Required. The set of affected locations and their fixes (if available) - // within the associated resource. - repeated PackageIssue package_issue = 4; - - // A detail for a distro and package this vulnerability occurrence was found - // in and its associated fix (if one is available). - message PackageIssue { - // Required. The [CPE URI](https://cpe.mitre.org/specification/) this - // vulnerability was found in. - string affected_cpe_uri = 1; - - // Required. The package this vulnerability was found in. - string affected_package = 2; - - // Required. The version of the package that is installed on the resource - // affected by this vulnerability. - grafeas.v1.Version affected_version = 3; - - // The [CPE URI](https://cpe.mitre.org/specification/) this vulnerability - // was fixed in. It is possible for this to be different from the - // affected_cpe_uri. - string fixed_cpe_uri = 4; - - // The package this vulnerability was fixed in. It is possible for this to - // be different from the affected_package. - string fixed_package = 5; - - // Required. The version of the package this vulnerability was fixed in. - // Setting this to VersionKind.MAXIMUM means no fix is yet available. - grafeas.v1.Version fixed_version = 6; - - // Output only. Whether a fix is available for this package. - bool fix_available = 7; - } - - // Output only. A one sentence description of this vulnerability. - string short_description = 5; - - // Output only. A detailed description of this vulnerability. - string long_description = 6; - - // Output only. URLs related to this vulnerability. - repeated grafeas.v1.RelatedUrl related_urls = 7; - - // The distro assigned severity for this vulnerability when it is available, - // otherwise this is the note provider assigned severity. - Severity effective_severity = 8; - - // Output only. Whether at least one of the affected packages has a fix - // available. - bool fix_available = 9; -} diff --git a/grafeas/grafeas/grafeas_v1/proto/vulnerability_pb2.py b/grafeas/grafeas/grafeas_v1/proto/vulnerability_pb2.py deleted file mode 100644 index bf2abfb18d54..000000000000 --- a/grafeas/grafeas/grafeas_v1/proto/vulnerability_pb2.py +++ /dev/null @@ -1,1212 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: grafeas_v1/proto/vulnerability.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from grafeas.grafeas_v1.proto import common_pb2 as grafeas__v1_dot_proto_dot_common__pb2 -from grafeas.grafeas_v1.proto import cvss_pb2 as grafeas__v1_dot_proto_dot_cvss__pb2 -from grafeas.grafeas_v1.proto import ( - package_pb2 as grafeas__v1_dot_proto_dot_package__pb2, -) - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="grafeas_v1/proto/vulnerability.proto", - package="grafeas.v1", - syntax="proto3", - serialized_options=_b( - "\n\rio.grafeas.v1P\001Z8google.golang.org/genproto/googleapis/grafeas/v1;grafeas\242\002\003GRA" - ), - serialized_pb=_b( - '\n$grafeas_v1/proto/vulnerability.proto\x12\ngrafeas.v1\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1dgrafeas_v1/proto/common.proto\x1a\x1bgrafeas_v1/proto/cvss.proto\x1a\x1egrafeas_v1/proto/package.proto"\xfa\x06\n\x11VulnerabilityNote\x12\x12\n\ncvss_score\x18\x01 \x01(\x02\x12&\n\x08severity\x18\x02 \x01(\x0e\x32\x14.grafeas.v1.Severity\x12\x35\n\x07\x64\x65tails\x18\x03 \x03(\x0b\x32$.grafeas.v1.VulnerabilityNote.Detail\x12#\n\x07\x63vss_v3\x18\x04 \x01(\x0b\x32\x12.grafeas.v1.CVSSv3\x12\x44\n\x0fwindows_details\x18\x05 \x03(\x0b\x32+.grafeas.v1.VulnerabilityNote.WindowsDetail\x12\x36\n\x12source_update_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x8d\x03\n\x06\x44\x65tail\x12\x15\n\rseverity_name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x14\n\x0cpackage_type\x18\x03 \x01(\t\x12\x18\n\x10\x61\x66\x66\x65\x63ted_cpe_uri\x18\x04 \x01(\t\x12\x18\n\x10\x61\x66\x66\x65\x63ted_package\x18\x05 \x01(\t\x12\x33\n\x16\x61\x66\x66\x65\x63ted_version_start\x18\x06 \x01(\x0b\x32\x13.grafeas.v1.Version\x12\x31\n\x14\x61\x66\x66\x65\x63ted_version_end\x18\x07 \x01(\x0b\x32\x13.grafeas.v1.Version\x12\x15\n\rfixed_cpe_uri\x18\x08 \x01(\t\x12\x15\n\rfixed_package\x18\t \x01(\t\x12*\n\rfixed_version\x18\n \x01(\x0b\x32\x13.grafeas.v1.Version\x12\x13\n\x0bis_obsolete\x18\x0b \x01(\x08\x12\x36\n\x12source_update_time\x18\x0c \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\xbe\x01\n\rWindowsDetail\x12\x0f\n\x07\x63pe_uri\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12M\n\nfixing_kbs\x18\x04 \x03(\x0b\x32\x39.grafeas.v1.VulnerabilityNote.WindowsDetail.KnowledgeBase\x1a*\n\rKnowledgeBase\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0b\n\x03url\x18\x02 \x01(\t"\xbd\x04\n\x17VulnerabilityOccurrence\x12\x0c\n\x04type\x18\x01 \x01(\t\x12&\n\x08severity\x18\x02 \x01(\x0e\x32\x14.grafeas.v1.Severity\x12\x12\n\ncvss_score\x18\x03 \x01(\x02\x12G\n\rpackage_issue\x18\x04 \x03(\x0b\x32\x30.grafeas.v1.VulnerabilityOccurrence.PackageIssue\x12\x19\n\x11short_description\x18\x05 \x01(\t\x12\x18\n\x10long_description\x18\x06 \x01(\t\x12,\n\x0crelated_urls\x18\x07 \x03(\x0b\x32\x16.grafeas.v1.RelatedUrl\x12\x30\n\x12\x65\x66\x66\x65\x63tive_severity\x18\x08 \x01(\x0e\x32\x14.grafeas.v1.Severity\x12\x15\n\rfix_available\x18\t \x01(\x08\x1a\xe2\x01\n\x0cPackageIssue\x12\x18\n\x10\x61\x66\x66\x65\x63ted_cpe_uri\x18\x01 \x01(\t\x12\x18\n\x10\x61\x66\x66\x65\x63ted_package\x18\x02 \x01(\t\x12-\n\x10\x61\x66\x66\x65\x63ted_version\x18\x03 \x01(\x0b\x32\x13.grafeas.v1.Version\x12\x15\n\rfixed_cpe_uri\x18\x04 \x01(\t\x12\x15\n\rfixed_package\x18\x05 \x01(\t\x12*\n\rfixed_version\x18\x06 \x01(\x0b\x32\x13.grafeas.v1.Version\x12\x15\n\rfix_available\x18\x07 \x01(\x08*^\n\x08Severity\x12\x18\n\x14SEVERITY_UNSPECIFIED\x10\x00\x12\x0b\n\x07MINIMAL\x10\x01\x12\x07\n\x03LOW\x10\x02\x12\n\n\x06MEDIUM\x10\x03\x12\x08\n\x04HIGH\x10\x04\x12\x0c\n\x08\x43RITICAL\x10\x05\x42Q\n\rio.grafeas.v1P\x01Z8google.golang.org/genproto/googleapis/grafeas/v1;grafeas\xa2\x02\x03GRAb\x06proto3' - ), - dependencies=[ - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - grafeas__v1_dot_proto_dot_common__pb2.DESCRIPTOR, - grafeas__v1_dot_proto_dot_cvss__pb2.DESCRIPTOR, - grafeas__v1_dot_proto_dot_package__pb2.DESCRIPTOR, - ], -) - -_SEVERITY = _descriptor.EnumDescriptor( - name="Severity", - full_name="grafeas.v1.Severity", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="SEVERITY_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="MINIMAL", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="LOW", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="MEDIUM", index=3, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="HIGH", index=4, number=4, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="CRITICAL", index=5, number=5, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=1646, - serialized_end=1740, -) -_sym_db.RegisterEnumDescriptor(_SEVERITY) - -Severity = enum_type_wrapper.EnumTypeWrapper(_SEVERITY) -SEVERITY_UNSPECIFIED = 0 -MINIMAL = 1 -LOW = 2 -MEDIUM = 3 -HIGH = 4 -CRITICAL = 5 - - -_VULNERABILITYNOTE_DETAIL = _descriptor.Descriptor( - name="Detail", - full_name="grafeas.v1.VulnerabilityNote.Detail", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="severity_name", - full_name="grafeas.v1.VulnerabilityNote.Detail.severity_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="description", - full_name="grafeas.v1.VulnerabilityNote.Detail.description", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="package_type", - full_name="grafeas.v1.VulnerabilityNote.Detail.package_type", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="affected_cpe_uri", - full_name="grafeas.v1.VulnerabilityNote.Detail.affected_cpe_uri", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="affected_package", - full_name="grafeas.v1.VulnerabilityNote.Detail.affected_package", - index=4, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="affected_version_start", - full_name="grafeas.v1.VulnerabilityNote.Detail.affected_version_start", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="affected_version_end", - full_name="grafeas.v1.VulnerabilityNote.Detail.affected_version_end", - index=6, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="fixed_cpe_uri", - full_name="grafeas.v1.VulnerabilityNote.Detail.fixed_cpe_uri", - index=7, - number=8, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="fixed_package", - full_name="grafeas.v1.VulnerabilityNote.Detail.fixed_package", - index=8, - number=9, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="fixed_version", - full_name="grafeas.v1.VulnerabilityNote.Detail.fixed_version", - index=9, - number=10, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="is_obsolete", - full_name="grafeas.v1.VulnerabilityNote.Detail.is_obsolete", - index=10, - number=11, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="source_update_time", - full_name="grafeas.v1.VulnerabilityNote.Detail.source_update_time", - index=11, - number=12, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=478, - serialized_end=875, -) - -_VULNERABILITYNOTE_WINDOWSDETAIL_KNOWLEDGEBASE = _descriptor.Descriptor( - name="KnowledgeBase", - full_name="grafeas.v1.VulnerabilityNote.WindowsDetail.KnowledgeBase", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="grafeas.v1.VulnerabilityNote.WindowsDetail.KnowledgeBase.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="url", - full_name="grafeas.v1.VulnerabilityNote.WindowsDetail.KnowledgeBase.url", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1026, - serialized_end=1068, -) - -_VULNERABILITYNOTE_WINDOWSDETAIL = _descriptor.Descriptor( - name="WindowsDetail", - full_name="grafeas.v1.VulnerabilityNote.WindowsDetail", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="cpe_uri", - full_name="grafeas.v1.VulnerabilityNote.WindowsDetail.cpe_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="name", - full_name="grafeas.v1.VulnerabilityNote.WindowsDetail.name", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="description", - full_name="grafeas.v1.VulnerabilityNote.WindowsDetail.description", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="fixing_kbs", - full_name="grafeas.v1.VulnerabilityNote.WindowsDetail.fixing_kbs", - index=3, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_VULNERABILITYNOTE_WINDOWSDETAIL_KNOWLEDGEBASE,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=878, - serialized_end=1068, -) - -_VULNERABILITYNOTE = _descriptor.Descriptor( - name="VulnerabilityNote", - full_name="grafeas.v1.VulnerabilityNote", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="cvss_score", - full_name="grafeas.v1.VulnerabilityNote.cvss_score", - index=0, - number=1, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="severity", - full_name="grafeas.v1.VulnerabilityNote.severity", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="details", - full_name="grafeas.v1.VulnerabilityNote.details", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cvss_v3", - full_name="grafeas.v1.VulnerabilityNote.cvss_v3", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="windows_details", - full_name="grafeas.v1.VulnerabilityNote.windows_details", - index=4, - number=5, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="source_update_time", - full_name="grafeas.v1.VulnerabilityNote.source_update_time", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_VULNERABILITYNOTE_DETAIL, _VULNERABILITYNOTE_WINDOWSDETAIL,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=178, - serialized_end=1068, -) - - -_VULNERABILITYOCCURRENCE_PACKAGEISSUE = _descriptor.Descriptor( - name="PackageIssue", - full_name="grafeas.v1.VulnerabilityOccurrence.PackageIssue", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="affected_cpe_uri", - full_name="grafeas.v1.VulnerabilityOccurrence.PackageIssue.affected_cpe_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="affected_package", - full_name="grafeas.v1.VulnerabilityOccurrence.PackageIssue.affected_package", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="affected_version", - full_name="grafeas.v1.VulnerabilityOccurrence.PackageIssue.affected_version", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="fixed_cpe_uri", - full_name="grafeas.v1.VulnerabilityOccurrence.PackageIssue.fixed_cpe_uri", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="fixed_package", - full_name="grafeas.v1.VulnerabilityOccurrence.PackageIssue.fixed_package", - index=4, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="fixed_version", - full_name="grafeas.v1.VulnerabilityOccurrence.PackageIssue.fixed_version", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="fix_available", - full_name="grafeas.v1.VulnerabilityOccurrence.PackageIssue.fix_available", - index=6, - number=7, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1418, - serialized_end=1644, -) - -_VULNERABILITYOCCURRENCE = _descriptor.Descriptor( - name="VulnerabilityOccurrence", - full_name="grafeas.v1.VulnerabilityOccurrence", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="type", - full_name="grafeas.v1.VulnerabilityOccurrence.type", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="severity", - full_name="grafeas.v1.VulnerabilityOccurrence.severity", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cvss_score", - full_name="grafeas.v1.VulnerabilityOccurrence.cvss_score", - index=2, - number=3, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="package_issue", - full_name="grafeas.v1.VulnerabilityOccurrence.package_issue", - index=3, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="short_description", - full_name="grafeas.v1.VulnerabilityOccurrence.short_description", - index=4, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="long_description", - full_name="grafeas.v1.VulnerabilityOccurrence.long_description", - index=5, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="related_urls", - full_name="grafeas.v1.VulnerabilityOccurrence.related_urls", - index=6, - number=7, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="effective_severity", - full_name="grafeas.v1.VulnerabilityOccurrence.effective_severity", - index=7, - number=8, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="fix_available", - full_name="grafeas.v1.VulnerabilityOccurrence.fix_available", - index=8, - number=9, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_VULNERABILITYOCCURRENCE_PACKAGEISSUE,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1071, - serialized_end=1644, -) - -_VULNERABILITYNOTE_DETAIL.fields_by_name[ - "affected_version_start" -].message_type = grafeas__v1_dot_proto_dot_package__pb2._VERSION -_VULNERABILITYNOTE_DETAIL.fields_by_name[ - "affected_version_end" -].message_type = grafeas__v1_dot_proto_dot_package__pb2._VERSION -_VULNERABILITYNOTE_DETAIL.fields_by_name[ - "fixed_version" -].message_type = grafeas__v1_dot_proto_dot_package__pb2._VERSION -_VULNERABILITYNOTE_DETAIL.fields_by_name[ - "source_update_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_VULNERABILITYNOTE_DETAIL.containing_type = _VULNERABILITYNOTE -_VULNERABILITYNOTE_WINDOWSDETAIL_KNOWLEDGEBASE.containing_type = ( - _VULNERABILITYNOTE_WINDOWSDETAIL -) -_VULNERABILITYNOTE_WINDOWSDETAIL.fields_by_name[ - "fixing_kbs" -].message_type = _VULNERABILITYNOTE_WINDOWSDETAIL_KNOWLEDGEBASE -_VULNERABILITYNOTE_WINDOWSDETAIL.containing_type = _VULNERABILITYNOTE -_VULNERABILITYNOTE.fields_by_name["severity"].enum_type = _SEVERITY -_VULNERABILITYNOTE.fields_by_name["details"].message_type = _VULNERABILITYNOTE_DETAIL -_VULNERABILITYNOTE.fields_by_name[ - "cvss_v3" -].message_type = grafeas__v1_dot_proto_dot_cvss__pb2._CVSSV3 -_VULNERABILITYNOTE.fields_by_name[ - "windows_details" -].message_type = _VULNERABILITYNOTE_WINDOWSDETAIL -_VULNERABILITYNOTE.fields_by_name[ - "source_update_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_VULNERABILITYOCCURRENCE_PACKAGEISSUE.fields_by_name[ - "affected_version" -].message_type = grafeas__v1_dot_proto_dot_package__pb2._VERSION -_VULNERABILITYOCCURRENCE_PACKAGEISSUE.fields_by_name[ - "fixed_version" -].message_type = grafeas__v1_dot_proto_dot_package__pb2._VERSION -_VULNERABILITYOCCURRENCE_PACKAGEISSUE.containing_type = _VULNERABILITYOCCURRENCE -_VULNERABILITYOCCURRENCE.fields_by_name["severity"].enum_type = _SEVERITY -_VULNERABILITYOCCURRENCE.fields_by_name[ - "package_issue" -].message_type = _VULNERABILITYOCCURRENCE_PACKAGEISSUE -_VULNERABILITYOCCURRENCE.fields_by_name[ - "related_urls" -].message_type = grafeas__v1_dot_proto_dot_common__pb2._RELATEDURL -_VULNERABILITYOCCURRENCE.fields_by_name["effective_severity"].enum_type = _SEVERITY -DESCRIPTOR.message_types_by_name["VulnerabilityNote"] = _VULNERABILITYNOTE -DESCRIPTOR.message_types_by_name["VulnerabilityOccurrence"] = _VULNERABILITYOCCURRENCE -DESCRIPTOR.enum_types_by_name["Severity"] = _SEVERITY -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -VulnerabilityNote = _reflection.GeneratedProtocolMessageType( - "VulnerabilityNote", - (_message.Message,), - dict( - Detail=_reflection.GeneratedProtocolMessageType( - "Detail", - (_message.Message,), - dict( - DESCRIPTOR=_VULNERABILITYNOTE_DETAIL, - __module__="grafeas_v1.proto.vulnerability_pb2", - __doc__="""A detail for a distro and package affected by this - vulnerability and its associated fix (if one is available). - - - Attributes: - severity_name: - The distro assigned severity of this vulnerability. - description: - A vendor-specific description of this vulnerability. - package_type: - The type of package; whether native or non native (e.g., ruby - gems, node.js packages, etc.). - affected_cpe_uri: - Required. The `CPE URI - `__ this vulnerability - affects. - affected_package: - Required. The package this vulnerability affects. - affected_version_start: - The version number at the start of an interval in which this - vulnerability exists. A vulnerability can affect a package - between version numbers that are disjoint sets of intervals - (example: [1.0.0-1.1.0], [2.4.6-2.4.8] and [4.5.6-4.6.8]) each - of which will be represented in its own Detail. If a specific - affected version is provided by a vulnerability database, - affected\_version\_start and affected\_version\_end will be - the same in that Detail. - affected_version_end: - The version number at the end of an interval in which this - vulnerability exists. A vulnerability can affect a package - between version numbers that are disjoint sets of intervals - (example: [1.0.0-1.1.0], [2.4.6-2.4.8] and [4.5.6-4.6.8]) each - of which will be represented in its own Detail. If a specific - affected version is provided by a vulnerability database, - affected\_version\_start and affected\_version\_end will be - the same in that Detail. - fixed_cpe_uri: - The distro recommended `CPE URI - `__ to update to that - contains a fix for this vulnerability. It is possible for this - to be different from the affected\_cpe\_uri. - fixed_package: - The distro recommended package to update to that contains a - fix for this vulnerability. It is possible for this to be - different from the affected\_package. - fixed_version: - The distro recommended version to update to that contains a - fix for this vulnerability. Setting this to - VersionKind.MAXIMUM means no such version is yet available. - is_obsolete: - Whether this detail is obsolete. Occurrences are expected not - to point to obsolete details. - source_update_time: - The time this information was last changed at the source. This - is an upstream timestamp from the underlying information - source - e.g. Ubuntu security tracker. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.VulnerabilityNote.Detail) - ), - ), - WindowsDetail=_reflection.GeneratedProtocolMessageType( - "WindowsDetail", - (_message.Message,), - dict( - KnowledgeBase=_reflection.GeneratedProtocolMessageType( - "KnowledgeBase", - (_message.Message,), - dict( - DESCRIPTOR=_VULNERABILITYNOTE_WINDOWSDETAIL_KNOWLEDGEBASE, - __module__="grafeas_v1.proto.vulnerability_pb2", - __doc__=""" - Attributes: - name: - The KB name (generally of the form KB[0-9]+ (e.g., KB123456)). - url: - A link to the KB in the [Windows update catalog] - (https://www.catalog.update.microsoft.com/). - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.VulnerabilityNote.WindowsDetail.KnowledgeBase) - ), - ), - DESCRIPTOR=_VULNERABILITYNOTE_WINDOWSDETAIL, - __module__="grafeas_v1.proto.vulnerability_pb2", - __doc__=""" - Attributes: - cpe_uri: - Required. The `CPE URI - `__ this vulnerability - affects. - name: - Required. The name of this vulnerability. - description: - The description of this vulnerability. - fixing_kbs: - Required. The names of the KBs which have hotfixes to mitigate - this vulnerability. Note that there may be multiple hotfixes - (and thus multiple KBs) that mitigate a given vulnerability. - Currently any listed KBs presence is considered a fix. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.VulnerabilityNote.WindowsDetail) - ), - ), - DESCRIPTOR=_VULNERABILITYNOTE, - __module__="grafeas_v1.proto.vulnerability_pb2", - __doc__="""A security vulnerability that can be found in resources. - - - Attributes: - cvss_score: - The CVSS score of this vulnerability. CVSS score is on a scale - of 0 - 10 where 0 indicates low severity and 10 indicates high - severity. - severity: - The note provider assigned severity of this vulnerability. - details: - Details of all known distros and packages affected by this - vulnerability. - cvss_v3: - The full description of the CVSSv3 for this vulnerability. - windows_details: - Windows details get their own format because the information - format and model don't match a normal detail. Specifically - Windows updates are done as patches, thus Windows - vulnerabilities really are a missing package, rather than a - package being at an incorrect version. - source_update_time: - The time this information was last changed at the source. This - is an upstream timestamp from the underlying information - source - e.g. Ubuntu security tracker. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.VulnerabilityNote) - ), -) -_sym_db.RegisterMessage(VulnerabilityNote) -_sym_db.RegisterMessage(VulnerabilityNote.Detail) -_sym_db.RegisterMessage(VulnerabilityNote.WindowsDetail) -_sym_db.RegisterMessage(VulnerabilityNote.WindowsDetail.KnowledgeBase) - -VulnerabilityOccurrence = _reflection.GeneratedProtocolMessageType( - "VulnerabilityOccurrence", - (_message.Message,), - dict( - PackageIssue=_reflection.GeneratedProtocolMessageType( - "PackageIssue", - (_message.Message,), - dict( - DESCRIPTOR=_VULNERABILITYOCCURRENCE_PACKAGEISSUE, - __module__="grafeas_v1.proto.vulnerability_pb2", - __doc__="""A detail for a distro and package this vulnerability - occurrence was found in and its associated fix (if one is available). - - - Attributes: - affected_cpe_uri: - Required. The `CPE URI - `__ this vulnerability - was found in. - affected_package: - Required. The package this vulnerability was found in. - affected_version: - Required. The version of the package that is installed on the - resource affected by this vulnerability. - fixed_cpe_uri: - The `CPE URI `__ this - vulnerability was fixed in. It is possible for this to be - different from the affected\_cpe\_uri. - fixed_package: - The package this vulnerability was fixed in. It is possible - for this to be different from the affected\_package. - fixed_version: - Required. The version of the package this vulnerability was - fixed in. Setting this to VersionKind.MAXIMUM means no fix is - yet available. - fix_available: - Output only. Whether a fix is available for this package. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.VulnerabilityOccurrence.PackageIssue) - ), - ), - DESCRIPTOR=_VULNERABILITYOCCURRENCE, - __module__="grafeas_v1.proto.vulnerability_pb2", - __doc__="""An occurrence of a severity vulnerability on a resource. - - - Attributes: - type: - The type of package; whether native or non native (e.g., ruby - gems, node.js packages, etc.). - severity: - Output only. The note provider assigned severity of this - vulnerability. - cvss_score: - Output only. The CVSS score of this vulnerability. CVSS score - is on a scale of 0 - 10 where 0 indicates low severity and 10 - indicates high severity. - package_issue: - Required. The set of affected locations and their fixes (if - available) within the associated resource. - short_description: - Output only. A one sentence description of this vulnerability. - long_description: - Output only. A detailed description of this vulnerability. - related_urls: - Output only. URLs related to this vulnerability. - effective_severity: - The distro assigned severity for this vulnerability when it is - available, otherwise this is the note provider assigned - severity. - fix_available: - Output only. Whether at least one of the affected packages has - a fix available. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.VulnerabilityOccurrence) - ), -) -_sym_db.RegisterMessage(VulnerabilityOccurrence) -_sym_db.RegisterMessage(VulnerabilityOccurrence.PackageIssue) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/grafeas/grafeas/grafeas_v1/proto/vulnerability_pb2_grpc.py b/grafeas/grafeas/grafeas_v1/proto/vulnerability_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/grafeas/grafeas/grafeas_v1/proto/vulnerability_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/grafeas/grafeas/grafeas_v1/types.py b/grafeas/grafeas/grafeas_v1/types.py deleted file mode 100644 index 9d65a986fea5..000000000000 --- a/grafeas/grafeas/grafeas_v1/types.py +++ /dev/null @@ -1,78 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from __future__ import absolute_import -import sys - -from google.api_core.protobuf_helpers import get_messages - -from google.protobuf import any_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 -from google.protobuf import timestamp_pb2 -from google.rpc import status_pb2 -from grafeas.grafeas_v1.proto import attestation_pb2 -from grafeas.grafeas_v1.proto import build_pb2 -from grafeas.grafeas_v1.proto import common_pb2 -from grafeas.grafeas_v1.proto import cvss_pb2 -from grafeas.grafeas_v1.proto import deployment_pb2 -from grafeas.grafeas_v1.proto import discovery_pb2 -from grafeas.grafeas_v1.proto import grafeas_pb2 -from grafeas.grafeas_v1.proto import image_pb2 -from grafeas.grafeas_v1.proto import package_pb2 -from grafeas.grafeas_v1.proto import provenance_pb2 -from grafeas.grafeas_v1.proto import upgrade_pb2 -from grafeas.grafeas_v1.proto import vulnerability_pb2 - - -_shared_modules = [ - any_pb2, - empty_pb2, - field_mask_pb2, - timestamp_pb2, - status_pb2, -] - -_local_modules = [ - attestation_pb2, - build_pb2, - common_pb2, - cvss_pb2, - deployment_pb2, - discovery_pb2, - grafeas_pb2, - image_pb2, - package_pb2, - provenance_pb2, - upgrade_pb2, - vulnerability_pb2, -] - -names = [] - -for module in _shared_modules: # pragma: NO COVER - for name, message in get_messages(module).items(): - setattr(sys.modules[__name__], name, message) - names.append(name) -for module in _local_modules: - for name, message in get_messages(module).items(): - message.__module__ = "grafeas.grafeas_v1.types" - setattr(sys.modules[__name__], name, message) - names.append(name) - - -__all__ = tuple(sorted(names)) diff --git a/grafeas/noxfile.py b/grafeas/noxfile.py deleted file mode 100644 index cc5bea5ec83a..000000000000 --- a/grafeas/noxfile.py +++ /dev/null @@ -1,160 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import -import os -import shutil - -import nox - - -LOCAL_DEPS = (os.path.join("..", "api_core"), os.path.join("..", "core")) - -@nox.session(python="3.7") -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", "black", *LOCAL_DEPS) - session.run( - "black", - "--check", - "grafeas", - "tests", - "docs", - ) - session.run("flake8", "grafeas", "tests") - - -@nox.session(python="3.6") -def blacken(session): - """Run black. - - Format code to uniform standard. - - This currently uses Python 3.6 due to the automated Kokoro run of synthtool. - That run uses an image that doesn't have 3.6 installed. Before updating this - check the state of the `gcp_ubuntu_config` we use for that Kokoro run. - """ - session.install("black") - session.run( - "black", - "grafeas", - "tests", - "docs", - ) - - -@nox.session(python="3.7") -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") - - -def default(session): - # Install all test dependencies, then install this package in-place. - session.install("mock", "pytest", "pytest-cov") - for local_dep in LOCAL_DEPS: - session.install("-e", local_dep) - session.install("-e", ".") - - # Run py.test against the unit tests. - session.run( - "py.test", - "--quiet", - "--cov=grafeas", - "--cov=tests.unit", - "--cov-append", - "--cov-config=.coveragerc", - "--cov-report=", - "--cov-fail-under=78", - os.path.join("tests", "unit"), - *session.posargs, - ) - - -@nox.session(python=["2.7", "3.5", "3.6", "3.7"]) -def unit(session): - """Run the unit test suite.""" - default(session) - - -@nox.session(python=["2.7", "3.7"]) -def system(session): - """Run the system test suite.""" - system_test_path = os.path.join("tests", "system.py") - system_test_folder_path = os.path.join("tests", "system") - # Sanity check: Only run tests if the environment variable is set. - if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): - session.skip("Credentials must be set via environment variable") - - system_test_exists = os.path.exists(system_test_path) - system_test_folder_exists = os.path.exists(system_test_folder_path) - # Sanity check: only run tests if found. - if not system_test_exists and not system_test_folder_exists: - session.skip("System tests were not found") - - # Use pre-release gRPC for system tests. - session.install("--pre", "grpcio") - - # Install all test dependencies, then install this package into the - # virtualenv's dist-packages. - session.install("mock", "pytest") - for local_dep in LOCAL_DEPS: - session.install("-e", local_dep) - session.install("-e", "../test_utils/") - session.install("-e", ".") - - # Run py.test against the system tests. - if system_test_exists: - session.run("py.test", "--quiet", system_test_path, *session.posargs) - if system_test_folder_exists: - session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) - - -@nox.session(python="3.7") -def cover(session): - """Run the final coverage report. - - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=78") - - session.run("coverage", "erase") - -@nox.session(python="3.7") -def docs(session): - """Build the docs for this library.""" - - session.install('-e', '.') - session.install('sphinx', 'alabaster', 'recommonmark') - - shutil.rmtree(os.path.join('docs', '_build'), ignore_errors=True) - session.run( - 'sphinx-build', - '-W', # warnings as errors - '-T', # show full traceback on exception - '-N', # no colors - '-b', 'html', - '-d', os.path.join('docs', '_build', 'doctrees', ''), - os.path.join('docs', ''), - os.path.join('docs', '_build', 'html', ''), - ) diff --git a/grafeas/setup.cfg b/grafeas/setup.cfg deleted file mode 100644 index 3bd555500e37..000000000000 --- a/grafeas/setup.cfg +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by synthtool. DO NOT EDIT! -[bdist_wheel] -universal = 1 diff --git a/grafeas/setup.py b/grafeas/setup.py deleted file mode 100644 index 4c351d7a7101..000000000000 --- a/grafeas/setup.py +++ /dev/null @@ -1,73 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import io -import os - -import setuptools - -name = "grafeas" -description = "Grafeas API client library" -version = "0.3.0" -release_status = "Development Status :: 3 - Alpha" -dependencies = [ - "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", - 'enum34; python_version < "3.4"', -] - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package for package in setuptools.find_packages() if package.startswith("grafeas") -] - -namespaces = ["grafeas"] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url="https://github.com/GoogleCloudPlatform/google-cloud-python", - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.5", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - namespace_packages=namespaces, - install_requires=dependencies, - python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*", - include_package_data=True, - zip_safe=False, -) diff --git a/grafeas/synth.metadata b/grafeas/synth.metadata deleted file mode 100644 index 10f2cb3c38d3..000000000000 --- a/grafeas/synth.metadata +++ /dev/null @@ -1,249 +0,0 @@ -{ - "updateTime": "2020-01-30T13:26:03.331590Z", - "sources": [ - { - "generator": { - "name": "artman", - "version": "0.44.4", - "dockerImage": "googleapis/artman@sha256:19e945954fc960a4bdfee6cb34695898ab21a8cf0bac063ee39b91f00a1faec8" - } - }, - { - "git": { - "name": "googleapis", - "remote": "https://github.com/googleapis/googleapis.git", - "sha": "c1246a29e22b0f98e800a536b5b0da2d933a55f2", - "internalRef": "292310790", - "log": "c1246a29e22b0f98e800a536b5b0da2d933a55f2\nUpdating v1 protos with the latest inline documentation (in comments) and config options. Also adding a per-service .yaml file.\n\nPiperOrigin-RevId: 292310790\n\nb491d07cadaae7cde5608321f913e5ca1459b32d\nRevert accidental local_repository change\n\nPiperOrigin-RevId: 292245373\n\naf3400a8cb6110025198b59a0f7d018ae3cda700\nUpdate gapic-generator dependency (prebuilt PHP binary support).\n\nPiperOrigin-RevId: 292243997\n\n341fd5690fae36f36cf626ef048fbcf4bbe7cee6\ngrafeas: v1 add resource_definition for the grafeas.io/Project and change references for Project.\n\nPiperOrigin-RevId: 292221998\n\n42e915ec2ece1cd37a590fbcd10aa2c0fb0e5b06\nUpdate the gapic-generator, protoc-java-resource-name-plugin and protoc-docs-plugin to the latest commit.\n\nPiperOrigin-RevId: 292182368\n\nf035f47250675d31492a09f4a7586cfa395520a7\nFix grafeas build and update build.sh script to include gerafeas.\n\nPiperOrigin-RevId: 292168753\n\n26ccb214b7bc4a716032a6266bcb0a9ca55d6dbb\nasset: v1p1beta1 add client config annotations and retry config\n\nPiperOrigin-RevId: 292154210\n\n974ee5c0b5d03e81a50dafcedf41e0efebb5b749\nasset: v1beta1 add client config annotations\n\nPiperOrigin-RevId: 292152573\n\ncf3b61102ed5f36b827bc82ec39be09525f018c8\n Fix to protos for v1p1beta1 release of Cloud Security Command Center\n\nPiperOrigin-RevId: 292034635\n\n4e1cfaa7c0fede9e65d64213ca3da1b1255816c0\nUpdate the public proto to support UTF-8 encoded id for CatalogService API, increase the ListCatalogItems deadline to 300s and some minor documentation change\n\nPiperOrigin-RevId: 292030970\n\n9c483584f8fd5a1b862ae07973f4cc7bb3e46648\nasset: add annotations to v1p1beta1\n\nPiperOrigin-RevId: 292009868\n\ne19209fac29731d0baf6d9ac23da1164f7bdca24\nAdd the google.rpc.context.AttributeContext message to the open source\ndirectories.\n\nPiperOrigin-RevId: 291999930\n\nae5662960573f279502bf98a108a35ba1175e782\noslogin API: move file level option on top of the file to avoid protobuf.js bug.\n\nPiperOrigin-RevId: 291990506\n\neba3897fff7c49ed85d3c47fc96fe96e47f6f684\nAdd cc_proto_library and cc_grpc_library targets for Spanner and IAM protos.\n\nPiperOrigin-RevId: 291988651\n\n8e981acfd9b97ea2f312f11bbaa7b6c16e412dea\nBeta launch for PersonDetection and FaceDetection features.\n\nPiperOrigin-RevId: 291821782\n\n994e067fae3b21e195f7da932b08fff806d70b5d\nasset: add annotations to v1p2beta1\n\nPiperOrigin-RevId: 291815259\n\n244e1d2c89346ca2e0701b39e65552330d68545a\nAdd Playable Locations service\n\nPiperOrigin-RevId: 291806349\n\n909f8f67963daf45dd88d020877fb9029b76788d\nasset: add annotations to v1beta2\n\nPiperOrigin-RevId: 291805301\n\n3c39a1d6e23c1ef63c7fba4019c25e76c40dfe19\nKMS: add file-level message for CryptoKeyPath, it is defined in gapic yaml but not\nin proto files.\n\nPiperOrigin-RevId: 291420695\n\nc6f3f350b8387f8d1b85ed4506f30187ebaaddc3\ncontaineranalysis: update v1beta1 and bazel build with annotations\n\nPiperOrigin-RevId: 291401900\n\n92887d74b44e4e636252b7b8477d0d2570cd82db\nfix: fix the location of grpc config file.\n\nPiperOrigin-RevId: 291396015\n\ne26cab8afd19d396b929039dac5d874cf0b5336c\nexpr: add default_host and method_signature annotations to CelService\n\nPiperOrigin-RevId: 291240093\n\n06093ae3952441c34ec176d1f7431b8765cec0be\nirm: fix v1alpha2 bazel build by adding missing proto imports\n\nPiperOrigin-RevId: 291227940\n\na8a2514af326e4673063f9a3c9d0ef1091c87e6c\nAdd proto annotation for cloud/irm API\n\nPiperOrigin-RevId: 291217859\n\n8d16f76de065f530d395a4c7eabbf766d6a120fd\nGenerate Memcache v1beta2 API protos and gRPC ServiceConfig files\n\nPiperOrigin-RevId: 291008516\n\n3af1dabd93df9a9f17bf3624d3b875c11235360b\ngrafeas: Add containeranalysis default_host to Grafeas service\n\nPiperOrigin-RevId: 290965849\n\nbe2663fa95e31cba67d0cd62611a6674db9f74b7\nfix(google/maps/roads): add missing opening bracket\n\nPiperOrigin-RevId: 290964086\n\nfacc26550a0af0696e0534bc9cae9df14275aa7c\nUpdating v2 protos with the latest inline documentation (in comments) and adding a per-service .yaml file.\n\nPiperOrigin-RevId: 290952261\n\ncda99c1f7dc5e4ca9b1caeae1dc330838cbc1461\nChange api_name to 'asset' for v1p1beta1\n\nPiperOrigin-RevId: 290800639\n\n94e9e90c303a820ce40643d9129e7f0d2054e8a1\nAdds Google Maps Road service\n\nPiperOrigin-RevId: 290795667\n\na3b23dcb2eaecce98c600c7d009451bdec52dbda\nrpc: new message ErrorInfo, other comment updates\n\nPiperOrigin-RevId: 290781668\n\n26420ef4e46c37f193c0fbe53d6ebac481de460e\nAdd proto definition for Org Policy v1.\n\nPiperOrigin-RevId: 290771923\n\n7f0dab8177cf371ae019a082e2512de7ac102888\nPublish Routes Preferred API v1 service definitions.\n\nPiperOrigin-RevId: 290326986\n\nad6e508d0728e1d1bca6e3f328cd562718cb772d\nFix: Qualify resource type references with \"jobs.googleapis.com/\"\n\nPiperOrigin-RevId: 290285762\n\n58e770d568a2b78168ddc19a874178fee8265a9d\ncts client library\n\nPiperOrigin-RevId: 290146169\n\naf9daa4c3b4c4a8b7133b81588dd9ffd37270af2\nAdd more programming language options to public proto\n\nPiperOrigin-RevId: 290144091\n\nd9f2bbf2df301ef84641d4cec7c828736a0bd907\ntalent: add missing resource.proto dep to Bazel build target\n\nPiperOrigin-RevId: 290143164\n\n3b3968237451d027b42471cd28884a5a1faed6c7\nAnnotate Talent API.\nAdd gRPC service config for retry.\nUpdate bazel file with google.api.resource dependency.\n\nPiperOrigin-RevId: 290125172\n\n0735b4b096872960568d1f366bfa75b7b0e1f1a3\nWeekly library update.\n\nPiperOrigin-RevId: 289939042\n\n8760d3d9a4543d7f9c0d1c7870aca08b116e4095\nWeekly library update.\n\nPiperOrigin-RevId: 289939020\n\n8607df842f782a901805187e02fff598145b0b0e\nChange Talent API timeout to 30s.\n\nPiperOrigin-RevId: 289912621\n\n908155991fe32570653bcb72ecfdcfc896642f41\nAdd Recommendations AI V1Beta1\n\nPiperOrigin-RevId: 289901914\n\n5c9a8c2bebd8b71aa66d1cc473edfaac837a2c78\nAdding no-arg method signatures for ListBillingAccounts and ListServices\n\nPiperOrigin-RevId: 289891136\n\n50b0e8286ac988b0593bd890eb31fef6ea2f5767\nlongrunning: add grpc service config and default_host annotation to operations.proto\n\nPiperOrigin-RevId: 289876944\n\n6cac27dabe51c54807b0401698c32d34998948a9\n Updating default deadline for Cloud Security Command Center's v1 APIs.\n\nPiperOrigin-RevId: 289875412\n\nd99df0d67057a233c711187e0689baa4f8e6333d\nFix: Correct spelling in C# namespace option\n\nPiperOrigin-RevId: 289709813\n\n2fa8d48165cc48e35b0c62e6f7bdade12229326c\nfeat: Publish Recommender v1 to GitHub.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289619243\n\n9118db63d1ab493a2e44a3b4973fde810a835c49\nfirestore: don't retry reads that fail with Aborted\n\nFor transaction reads that fail with ABORTED, we need to rollback and start a new transaction. Our current configuration makes it so that GAPIC retries ABORTED reads multiple times without making any progress. Instead, we should retry at the transaction level.\n\nPiperOrigin-RevId: 289532382\n\n1dbfd3fe4330790b1e99c0bb20beb692f1e20b8a\nFix bazel build\nAdd other langauges (Java was already there) for bigquery/storage/v1alpha2 api.\n\nPiperOrigin-RevId: 289519766\n\nc06599cdd7d11f8d3fd25f8d3249e5bb1a3d5d73\nInitial commit of google.cloud.policytroubleshooter API, The API helps in troubleshooting GCP policies. Refer https://cloud.google.com/iam/docs/troubleshooting-access for more information\n\nPiperOrigin-RevId: 289491444\n\nfce7d80fa16ea241e87f7bc33d68595422e94ecd\nDo not pass samples option for Artman config of recommender v1 API.\n\nPiperOrigin-RevId: 289477403\n\nef179e8c61436297e6bb124352e47e45c8c80cb1\nfix: Address missing Bazel dependency.\n\nBazel builds stopped working in 06ec6d5 because\nthe google/longrunning/operations.proto file took\nan import from google/api/client.proto, but that\nimport was not added to BUILD.bazel.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289446074\n\n8841655b242c84fd691d77d7bcf21b61044f01ff\nMigrate Data Labeling v1beta1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289446026\n\n06ec6d5d053fff299eaa6eaa38afdd36c5e2fc68\nAdd annotations to google.longrunning.v1\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289413169\n\n0480cf40be1d3cc231f4268a2fdb36a8dd60e641\nMigrate IAM Admin v1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289411084\n\n1017173e9adeb858587639af61889ad970c728b1\nSpecify a C# namespace for BigQuery Connection v1beta1\n\nPiperOrigin-RevId: 289396763\n\nb08714b378e8e5b0c4ecdde73f92c36d6303b4b6\nfix: Integrate latest proto-docs-plugin fix.\nFixes dialogflow v2\n\nPiperOrigin-RevId: 289189004\n\n51217a67e79255ee1f2e70a6a3919df082513327\nCreate BUILD file for recommender v1\n\nPiperOrigin-RevId: 289183234\n\nacacd87263c0a60e458561b8b8ce9f67c760552a\nGenerate recommender v1 API protos and gRPC ServiceConfig files\n\nPiperOrigin-RevId: 289177510\n\n9d2f7133b97720b1fa3601f6dcd30760ba6d8a1e\nFix kokoro build script\n\nPiperOrigin-RevId: 289166315\n\nc43a67530d2a47a0220cad20ca8de39b3fbaf2c5\ncloudtasks: replace missing RPC timeout config for v2beta2 and v2beta3\n\nPiperOrigin-RevId: 289162391\n\n4cefc229a9197236fc0adf02d69b71c0c5cf59de\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 289158456\n\n56f263fe959c50786dab42e3c61402d32d1417bd\nCatalog API: Adding config necessary to build client libraries\n\nPiperOrigin-RevId: 289149879\n\n4543762b23a57fc3c53d409efc3a9affd47b6ab3\nFix Bazel build\nbilling/v1 and dialogflow/v2 remain broken (not bazel-related issues).\nBilling has wrong configuration, dialogflow failure is caused by a bug in documentation plugin.\n\nPiperOrigin-RevId: 289140194\n\nc9dce519127b97e866ca133a01157f4ce27dcceb\nUpdate Bigtable docs\n\nPiperOrigin-RevId: 289114419\n\n802c5c5f2bf94c3facb011267d04e71942e0d09f\nMigrate DLP to proto annotations (but not GAPIC v2).\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289102579\n\n6357f30f2ec3cff1d8239d18b707ff9d438ea5da\nRemove gRPC configuration file that was in the wrong place.\n\nPiperOrigin-RevId: 289096111\n\n360a8792ed62f944109d7e22d613a04a010665b4\n Protos for v1p1beta1 release of Cloud Security Command Center\n\nPiperOrigin-RevId: 289011995\n\na79211c20c4f2807eec524d00123bf7c06ad3d6e\nRoll back containeranalysis v1 to GAPIC v1.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288999068\n\n9e60345ba603e03484a8aaa33ce5ffa19c1c652b\nPublish Routes Preferred API v1 proto definitions.\n\nPiperOrigin-RevId: 288941399\n\nd52885b642ad2aa1f42b132ee62dbf49a73e1e24\nMigrate the service management API to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288909426\n\n" - } - }, - { - "template": { - "name": "python_library", - "origin": "synthtool.gcp", - "version": "2019.10.17" - } - } - ], - "destinations": [ - { - "client": { - "source": "googleapis", - "apiName": "grafeas", - "apiVersion": "v1", - "language": "python", - "generator": "gapic", - "config": "grafeas/artman_grafeas_v1.yaml" - } - } - ], - "newFiles": [ - { - "path": ".coveragerc" - }, - { - "path": ".flake8" - }, - { - "path": ".repo-metadata.json" - }, - { - "path": "CHANGELOG.md" - }, - { - "path": "LICENSE" - }, - { - "path": "MANIFEST.in" - }, - { - "path": "README.rst" - }, - { - "path": "docs/README.rst" - }, - { - "path": "docs/_static/custom.css" - }, - { - "path": "docs/_templates/layout.html" - }, - { - "path": "docs/changelog.md" - }, - { - "path": "docs/conf.py" - }, - { - "path": "docs/gapic/v1/api.rst" - }, - { - "path": "docs/gapic/v1/types.rst" - }, - { - "path": "docs/index.rst" - }, - { - "path": "grafeas.py" - }, - { - "path": "grafeas/__init__.py" - }, - { - "path": "grafeas/grafeas.py" - }, - { - "path": "grafeas/grafeas_v1/__init__.py" - }, - { - "path": "grafeas/grafeas_v1/gapic/__init__.py" - }, - { - "path": "grafeas/grafeas_v1/gapic/enums.py" - }, - { - "path": "grafeas/grafeas_v1/gapic/grafeas_client.py" - }, - { - "path": "grafeas/grafeas_v1/gapic/grafeas_client_config.py" - }, - { - "path": "grafeas/grafeas_v1/gapic/transports/__init__.py" - }, - { - "path": "grafeas/grafeas_v1/gapic/transports/grafeas_grpc_transport.py" - }, - { - "path": "grafeas/grafeas_v1/proto/__init__.py" - }, - { - "path": "grafeas/grafeas_v1/proto/attestation.proto" - }, - { - "path": "grafeas/grafeas_v1/proto/attestation_pb2.py" - }, - { - "path": "grafeas/grafeas_v1/proto/attestation_pb2_grpc.py" - }, - { - "path": "grafeas/grafeas_v1/proto/build.proto" - }, - { - "path": "grafeas/grafeas_v1/proto/build_pb2.py" - }, - { - "path": "grafeas/grafeas_v1/proto/build_pb2_grpc.py" - }, - { - "path": "grafeas/grafeas_v1/proto/common.proto" - }, - { - "path": "grafeas/grafeas_v1/proto/common_pb2.py" - }, - { - "path": "grafeas/grafeas_v1/proto/common_pb2_grpc.py" - }, - { - "path": "grafeas/grafeas_v1/proto/cvss.proto" - }, - { - "path": "grafeas/grafeas_v1/proto/cvss_pb2.py" - }, - { - "path": "grafeas/grafeas_v1/proto/cvss_pb2_grpc.py" - }, - { - "path": "grafeas/grafeas_v1/proto/deployment.proto" - }, - { - "path": "grafeas/grafeas_v1/proto/deployment_pb2.py" - }, - { - "path": "grafeas/grafeas_v1/proto/deployment_pb2_grpc.py" - }, - { - "path": "grafeas/grafeas_v1/proto/discovery.proto" - }, - { - "path": "grafeas/grafeas_v1/proto/discovery_pb2.py" - }, - { - "path": "grafeas/grafeas_v1/proto/discovery_pb2_grpc.py" - }, - { - "path": "grafeas/grafeas_v1/proto/grafeas.proto" - }, - { - "path": "grafeas/grafeas_v1/proto/grafeas_pb2.py" - }, - { - "path": "grafeas/grafeas_v1/proto/grafeas_pb2_grpc.py" - }, - { - "path": "grafeas/grafeas_v1/proto/image.proto" - }, - { - "path": "grafeas/grafeas_v1/proto/image_pb2.py" - }, - { - "path": "grafeas/grafeas_v1/proto/image_pb2_grpc.py" - }, - { - "path": "grafeas/grafeas_v1/proto/package.proto" - }, - { - "path": "grafeas/grafeas_v1/proto/package_pb2.py" - }, - { - "path": "grafeas/grafeas_v1/proto/package_pb2_grpc.py" - }, - { - "path": "grafeas/grafeas_v1/proto/provenance.proto" - }, - { - "path": "grafeas/grafeas_v1/proto/provenance_pb2.py" - }, - { - "path": "grafeas/grafeas_v1/proto/provenance_pb2_grpc.py" - }, - { - "path": "grafeas/grafeas_v1/proto/upgrade.proto" - }, - { - "path": "grafeas/grafeas_v1/proto/upgrade_pb2.py" - }, - { - "path": "grafeas/grafeas_v1/proto/upgrade_pb2_grpc.py" - }, - { - "path": "grafeas/grafeas_v1/proto/vulnerability.proto" - }, - { - "path": "grafeas/grafeas_v1/proto/vulnerability_pb2.py" - }, - { - "path": "grafeas/grafeas_v1/proto/vulnerability_pb2_grpc.py" - }, - { - "path": "grafeas/grafeas_v1/types.py" - }, - { - "path": "noxfile.py" - }, - { - "path": "setup.cfg" - }, - { - "path": "setup.py" - }, - { - "path": "synth.metadata" - }, - { - "path": "synth.py" - }, - { - "path": "tests/unit/gapic/v1/test_grafeas_client_v1.py" - } - ] -} \ No newline at end of file diff --git a/grafeas/synth.py b/grafeas/synth.py deleted file mode 100644 index 74bfcd9d64d1..000000000000 --- a/grafeas/synth.py +++ /dev/null @@ -1,358 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This script is used to synthesize generated parts of this library.""" - -import synthtool as s -import synthtool.gcp as gcp -import logging - -logging.basicConfig(level=logging.DEBUG) - -gapic = gcp.GAPICGenerator() -common = gcp.CommonTemplates() - -# ---------------------------------------------------------------------------- -# Generate Grafeas GAPIC layer -# ---------------------------------------------------------------------------- -library = gapic.py_library( - "grafeas", "v1", config_path="/grafeas/artman_grafeas_v1.yaml", include_protos=True -) - -excludes = ["README.rst", "nox.py", "setup.py", "docs/index.rst"] - -# Make 'grafeas' a namespace -s.move(library / "grafeas", excludes=["__init__.py"]) -s.move(library / "docs", excludes=["conf.py", "index.rst"]) -s.move( - library / "google/cloud/grafeas_v1/proto", - "grafeas/grafeas_v1/proto", - excludes=excludes, -) -s.move(library / "tests") - - -# Fix proto imports -s.replace( - ["grafeas/**/*.py", "tests/**/*.py"], - "from grafeas\.v1( import \w*_pb2)", - "from grafeas.grafeas_v1.proto\g<1>", -) -s.replace( - "grafeas/**/*_pb2.py", - "from grafeas_v1\.proto( import \w*_pb2)", - "from grafeas.grafeas_v1.proto\g<1>", -) -s.replace( - "grafeas/**/grafeas_pb2_grpc.py", - "from grafeas_v1\.proto", - "from grafeas.grafeas_v1.proto", -) - -# Make package name 'grafeas' -s.replace( - "grafeas/grafeas_v1/gapic/grafeas_client.py", "google-cloud-grafeas", "grafeas" -) - -# Fix docstrings with no summary lines -s.replace( - "grafeas/grafeas_v1/proto/vulnerability_pb2.py", - r"""(\s+)__doc__ = \"\"\"Attributes:""", - """\g<1>__doc__=\"\"\" - Attributes:""", -) - -# Replace mentions of 'Container Analysis' with 'Grafeas' in the docs -s.replace("docs/**/v*/*.rst", "Container Analysis", "Grafeas") - - -# ---------------------------------------------------------------------------- -# Remove google-specific portions of library -# ---------------------------------------------------------------------------- - -# Please see this PR https://github.com/googleapis/google-cloud-python/pull/8186/ - -# Remove default service address, default scopes, default credentials -# Update tests and code in docstrings showing client instantiation. - - -s.replace( - "grafeas/**/grafeas_client.py", - r""" SERVICE_ADDRESS = 'containeranalysis\.googleapis\.com:443' - \"\"\"The default address of the service\.\"\"\"""", - "", -) - -s.replace( - "grafeas/**/grafeas_client.py", - r""" def __init__\(self, transport=None, channel=None, credentials=None, - client_config=None, client_info=None, client_options=None\):""", - " def __init__(self, transport, client_config=None, client_info=None):", -) - -s.replace( - "grafeas/**/grafeas_client.py", - r"""Union\[~\.GrafeasGrpcTransport, - Callable\[\[~\.Credentials, type], ~\.GrafeasGrpcTransport\]""", - """~.GrafeasGrpcTransport""", -) - -s.replace( - "grafeas/**/grafeas_client.py", - r""" channel \(grpc\.Channel\): DEPRECATED\. A ``Channel`` instance - through which to make calls\. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception\. - credentials \(google\.auth\.credentials\.Credentials\): The - authorization credentials to attach to requests\. These - credentials identify this application to the service\. If none - are specified, the client will attempt to ascertain the - credentials from the environment\. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception\.""", - "", -) - -# Remove client_options -# api_endpoint is currently the only option and doesn't make sense for Grafeas. -s.replace("grafeas/**/grafeas_client.py", "import google.api_core.client_options\n", "") -s.replace( - "grafeas/**/grafeas_client.py", - r""" client_options \(Union\[dict, google\.api_core\.client_options\.ClientOptions\]\): - Client options used to set user options on the client\. API Endpoint - should be set through client_options\. - \"\"\"""", - " \"\"\"" -) - -s.replace( - "grafeas/**/grafeas_client.py", - r"""if channel: - warnings\.warn\('The `channel` argument is deprecated; use ' - '`transport` instead\.', - PendingDeprecationWarning, stacklevel=2\) - - api_endpoint = self\.SERVICE_ADDRESS - if client_options: - if type\(client_options\) == dict: - client_options = google\.api_core\.client_options\.from_dict\(client_options\) - if client_options\.api_endpoint: - api_endpoint = client_options\.api_endpoint - - \# Instantiate the transport\. - \# The transport is responsible for handling serialization and - \# deserialization and actually sending data to the service\. - if transport: - if callable\(transport\): - self\.transport = transport\( - credentials=credentials, - default_class=grafeas_grpc_transport\.GrafeasGrpcTransport, - address=api_endpoint, - \) - else: - if credentials: - raise ValueError\( - 'Received both a transport instance and ' - 'credentials; these are mutually exclusive\.' - \) - self\.transport = transport - else: - self\.transport = grafeas_grpc_transport\.GrafeasGrpcTransport\( - address=api_endpoint, - channel=channel, - credentials=credentials, - \)""", - """# Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - self.transport = transport""", -) - -s.replace( - "grafeas/**/grafeas_client.py", - r""" Example: - >>> from grafeas import grafeas_v1 - >>> - >>> client = grafeas_v1\.GrafeasClient\(\)""", - """ Example: - >>> from grafeas import grafeas_v1 - >>> from grafeas.grafeas_v1.gapic.transports import grafeas_grpc_transport - >>> - >>> address = "[SERVICE_ADDRESS]" - >>> scopes = ("[SCOPE]") - >>> transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - >>> client = grafeas_v1.GrafeasClient(transport)""", -) - -s.replace( - "grafeas/**/grafeas_client.py", - r''' @classmethod - def from_service_account_file\(cls, filename, \*args, \*\*kwargs\): - """Creates an instance of this client using the provided credentials - file\. - - Args: - filename \(str\): The path to the service account private key json - file\. - args: Additional arguments to pass to the constructor\. - kwargs: Additional arguments to pass to the constructor\. - - Returns: - GrafeasClient: The constructed client\. - """ - credentials = service_account\.Credentials\.from_service_account_file\( - filename\) - kwargs\['credentials'\] = credentials - return cls\(\*args, \*\*kwargs\) - - from_service_account_json = from_service_account_file''', - "") - -s.replace( - "grafeas/**/grafeas_grpc_transport.py", - r""" \# The scopes needed to make gRPC calls to all of the methods defined - \# in this service\. - _OAUTH_SCOPES = \( - 'https://www\.googleapis\.com/auth/cloud-platform', - \)""", - "", -) - -s.replace( - "grafeas/**/grafeas_grpc_transport.py", - r""" def __init__\(self, channel=None, credentials=None, - address='containeranalysis\.googleapis\.com:443'\):""", - """ def __init__(self, address, scopes, channel=None, credentials=None):""", -) - -s.replace( - "grafeas/**/grafeas_grpc_transport.py", - r""" \# Create the channel\. - if channel is None: - channel = self\.create_channel\( - address=address, - credentials=credentials, -""", - """ # Create the channel. - if channel is None: - channel = self.create_channel( - address, - scopes, - credentials=credentials, -""", -) - -s.replace( - "grafeas/**/grafeas_grpc_transport.py", - r""" def create_channel\( - cls, - address='containeranalysis\.googleapis\.com:443', - credentials=None, - \*\*kwargs\):""", - """ def create_channel( - cls, - address, - scopes, - credentials=None, - **kwargs):""", -) - -s.replace( - "grafeas/**/grafeas_grpc_transport.py", - r""" Args: - address \(str\): The host for the channel to use\. - credentials \(~\.Credentials\): The - authorization credentials to attach to requests\. These - credentials identify this application to the service\. If - none are specified, the client will attempt to ascertain - the credentials from the environment\.""", - """ Args: - address (str): The host for the channel to use. - scopes (Sequence[str]): The scopes needed to make gRPC calls. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment.""", -) - -s.replace( - "grafeas/**/grafeas_grpc_transport.py", - r""" return google\.api_core\.grpc_helpers\.create_channel\( - address, - credentials=credentials, - scopes=cls\._OAUTH_SCOPES, - \*\*kwargs - \)""", - """ return google.api_core.grpc_helpers.create_channel( - address, - credentials=credentials, - scopes=scopes, - **kwargs - )""", -) - -s.replace( - "grafeas/**/grafeas_grpc_transport.py", - r""" \"\"\"Instantiate the transport class\. - - Args: - channel \(grpc\.Channel\): A ``Channel`` instance through - which to make calls\. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception\. - credentials \(google\.auth\.credentials\.Credentials\): The - authorization credentials to attach to requests\. These - credentials identify this application to the service\. If none - are specified, the client will attempt to ascertain the - credentials from the environment\. - address \(str\): The address where the service is hosted\.""", - ''' """Instantiate the transport class. - - Args: - address (str): The address where the service is hosted. - scopes (Sequence[str]): The scopes needed to make gRPC calls. - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - ''', -) - -s.replace( - "tests/**/test_grafeas_client_v1.py", - r"""from grafeas\.grafeas_v1\.proto import grafeas_pb2""", - r"""from grafeas.grafeas_v1.proto import grafeas_pb2 -from grafeas.grafeas_v1.gapic.transports import grafeas_grpc_transport""", -) - -s.replace( - "tests/**/test_grafeas_client_v1.py", - r"(\s+)client = grafeas_v1\.GrafeasClient\(\)", - r"""\g<1>address = "[SERVICE_ADDRESS]" -\g<1>scopes = ("SCOPE") -\g<1>transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) -\g<1>client=grafeas_v1.GrafeasClient(transport)""", -) - -# ---------------------------------------------------------------------------- -# Add templated files -# ---------------------------------------------------------------------------- -templated_files = common.py_library(unit_cov_level=78, cov_level=78) -s.move(templated_files, excludes=["noxfile.py"]) - -s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/grafeas/tests/unit/gapic/v1/test_grafeas_client_v1.py b/grafeas/tests/unit/gapic/v1/test_grafeas_client_v1.py deleted file mode 100644 index 5ac0efb61c95..000000000000 --- a/grafeas/tests/unit/gapic/v1/test_grafeas_client_v1.py +++ /dev/null @@ -1,842 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests.""" - -import mock -import pytest - -from google.protobuf import empty_pb2 -from grafeas import grafeas_v1 -from grafeas.grafeas_v1.proto import grafeas_pb2 -from grafeas.grafeas_v1.gapic.transports import grafeas_grpc_transport - - -class MultiCallableStub(object): - """Stub for the grpc.UnaryUnaryMultiCallable interface.""" - - def __init__(self, method, channel_stub): - self.method = method - self.channel_stub = channel_stub - - def __call__(self, request, timeout=None, metadata=None, credentials=None): - self.channel_stub.requests.append((self.method, request)) - - response = None - if self.channel_stub.responses: - response = self.channel_stub.responses.pop() - - if isinstance(response, Exception): - raise response - - if response: - return response - - -class ChannelStub(object): - """Stub for the grpc.Channel interface.""" - - def __init__(self, responses=[]): - self.responses = responses - self.requests = [] - - def unary_unary(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - -class CustomException(Exception): - pass - - -class TestGrafeasClient(object): - def test_get_occurrence(self): - # Setup Expected Response - name_2 = "name2-1052831874" - resource_uri = "resourceUri-384040517" - note_name = "noteName1780787896" - remediation = "remediation779381797" - expected_response = { - "name": name_2, - "resource_uri": resource_uri, - "note_name": note_name, - "remediation": remediation, - } - expected_response = grafeas_pb2.Occurrence(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup Request - name = client.occurrence_path("[PROJECT]", "[OCCURRENCE]") - - response = client.get_occurrence(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = grafeas_pb2.GetOccurrenceRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_occurrence_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup request - name = client.occurrence_path("[PROJECT]", "[OCCURRENCE]") - - with pytest.raises(CustomException): - client.get_occurrence(name) - - def test_list_occurrences(self): - # Setup Expected Response - next_page_token = "" - occurrences_element = {} - occurrences = [occurrences_element] - expected_response = { - "next_page_token": next_page_token, - "occurrences": occurrences, - } - expected_response = grafeas_pb2.ListOccurrencesResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup Request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_occurrences(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.occurrences[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = grafeas_pb2.ListOccurrencesRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_occurrences_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_occurrences(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_delete_occurrence(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup Request - name = client.occurrence_path("[PROJECT]", "[OCCURRENCE]") - - client.delete_occurrence(name) - - assert len(channel.requests) == 1 - expected_request = grafeas_pb2.DeleteOccurrenceRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_occurrence_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup request - name = client.occurrence_path("[PROJECT]", "[OCCURRENCE]") - - with pytest.raises(CustomException): - client.delete_occurrence(name) - - def test_create_occurrence(self): - # Setup Expected Response - name = "name3373707" - resource_uri = "resourceUri-384040517" - note_name = "noteName1780787896" - remediation = "remediation779381797" - expected_response = { - "name": name, - "resource_uri": resource_uri, - "note_name": note_name, - "remediation": remediation, - } - expected_response = grafeas_pb2.Occurrence(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup Request - parent = client.project_path("[PROJECT]") - occurrence = {} - - response = client.create_occurrence(parent, occurrence) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = grafeas_pb2.CreateOccurrenceRequest( - parent=parent, occurrence=occurrence - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_occurrence_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup request - parent = client.project_path("[PROJECT]") - occurrence = {} - - with pytest.raises(CustomException): - client.create_occurrence(parent, occurrence) - - def test_batch_create_occurrences(self): - # Setup Expected Response - expected_response = {} - expected_response = grafeas_pb2.BatchCreateOccurrencesResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup Request - parent = client.project_path("[PROJECT]") - occurrences = [] - - response = client.batch_create_occurrences(parent, occurrences) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = grafeas_pb2.BatchCreateOccurrencesRequest( - parent=parent, occurrences=occurrences - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_batch_create_occurrences_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup request - parent = client.project_path("[PROJECT]") - occurrences = [] - - with pytest.raises(CustomException): - client.batch_create_occurrences(parent, occurrences) - - def test_update_occurrence(self): - # Setup Expected Response - name_2 = "name2-1052831874" - resource_uri = "resourceUri-384040517" - note_name = "noteName1780787896" - remediation = "remediation779381797" - expected_response = { - "name": name_2, - "resource_uri": resource_uri, - "note_name": note_name, - "remediation": remediation, - } - expected_response = grafeas_pb2.Occurrence(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup Request - name = client.occurrence_path("[PROJECT]", "[OCCURRENCE]") - occurrence = {} - - response = client.update_occurrence(name, occurrence) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = grafeas_pb2.UpdateOccurrenceRequest( - name=name, occurrence=occurrence - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_occurrence_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup request - name = client.occurrence_path("[PROJECT]", "[OCCURRENCE]") - occurrence = {} - - with pytest.raises(CustomException): - client.update_occurrence(name, occurrence) - - def test_get_occurrence_note(self): - # Setup Expected Response - name_2 = "name2-1052831874" - short_description = "shortDescription-235369287" - long_description = "longDescription-1747792199" - expected_response = { - "name": name_2, - "short_description": short_description, - "long_description": long_description, - } - expected_response = grafeas_pb2.Note(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup Request - name = client.occurrence_path("[PROJECT]", "[OCCURRENCE]") - - response = client.get_occurrence_note(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = grafeas_pb2.GetOccurrenceNoteRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_occurrence_note_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup request - name = client.occurrence_path("[PROJECT]", "[OCCURRENCE]") - - with pytest.raises(CustomException): - client.get_occurrence_note(name) - - def test_get_note(self): - # Setup Expected Response - name_2 = "name2-1052831874" - short_description = "shortDescription-235369287" - long_description = "longDescription-1747792199" - expected_response = { - "name": name_2, - "short_description": short_description, - "long_description": long_description, - } - expected_response = grafeas_pb2.Note(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup Request - name = client.note_path("[PROJECT]", "[NOTE]") - - response = client.get_note(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = grafeas_pb2.GetNoteRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_note_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup request - name = client.note_path("[PROJECT]", "[NOTE]") - - with pytest.raises(CustomException): - client.get_note(name) - - def test_list_notes(self): - # Setup Expected Response - next_page_token = "" - notes_element = {} - notes = [notes_element] - expected_response = {"next_page_token": next_page_token, "notes": notes} - expected_response = grafeas_pb2.ListNotesResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup Request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_notes(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.notes[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = grafeas_pb2.ListNotesRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_notes_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_notes(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_delete_note(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup Request - name = client.note_path("[PROJECT]", "[NOTE]") - - client.delete_note(name) - - assert len(channel.requests) == 1 - expected_request = grafeas_pb2.DeleteNoteRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_note_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup request - name = client.note_path("[PROJECT]", "[NOTE]") - - with pytest.raises(CustomException): - client.delete_note(name) - - def test_create_note(self): - # Setup Expected Response - name = "name3373707" - short_description = "shortDescription-235369287" - long_description = "longDescription-1747792199" - expected_response = { - "name": name, - "short_description": short_description, - "long_description": long_description, - } - expected_response = grafeas_pb2.Note(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup Request - parent = client.project_path("[PROJECT]") - note_id = "noteId2129224840" - note = {} - - response = client.create_note(parent, note_id, note) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = grafeas_pb2.CreateNoteRequest( - parent=parent, note_id=note_id, note=note - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_note_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup request - parent = client.project_path("[PROJECT]") - note_id = "noteId2129224840" - note = {} - - with pytest.raises(CustomException): - client.create_note(parent, note_id, note) - - def test_batch_create_notes(self): - # Setup Expected Response - expected_response = {} - expected_response = grafeas_pb2.BatchCreateNotesResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup Request - parent = client.project_path("[PROJECT]") - notes = {} - - response = client.batch_create_notes(parent, notes) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = grafeas_pb2.BatchCreateNotesRequest( - parent=parent, notes=notes - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_batch_create_notes_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup request - parent = client.project_path("[PROJECT]") - notes = {} - - with pytest.raises(CustomException): - client.batch_create_notes(parent, notes) - - def test_update_note(self): - # Setup Expected Response - name_2 = "name2-1052831874" - short_description = "shortDescription-235369287" - long_description = "longDescription-1747792199" - expected_response = { - "name": name_2, - "short_description": short_description, - "long_description": long_description, - } - expected_response = grafeas_pb2.Note(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup Request - name = client.note_path("[PROJECT]", "[NOTE]") - note = {} - - response = client.update_note(name, note) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = grafeas_pb2.UpdateNoteRequest(name=name, note=note) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_note_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup request - name = client.note_path("[PROJECT]", "[NOTE]") - note = {} - - with pytest.raises(CustomException): - client.update_note(name, note) - - def test_list_note_occurrences(self): - # Setup Expected Response - next_page_token = "" - occurrences_element = {} - occurrences = [occurrences_element] - expected_response = { - "next_page_token": next_page_token, - "occurrences": occurrences, - } - expected_response = grafeas_pb2.ListNoteOccurrencesResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup Request - name = client.note_path("[PROJECT]", "[NOTE]") - - paged_list_response = client.list_note_occurrences(name) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.occurrences[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = grafeas_pb2.ListNoteOccurrencesRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_note_occurrences_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup request - name = client.note_path("[PROJECT]", "[NOTE]") - - paged_list_response = client.list_note_occurrences(name) - with pytest.raises(CustomException): - list(paged_list_response)