File tree Expand file tree Collapse file tree 11 files changed +24
-32
lines changed
appengine/standard_python3/bigquery Expand file tree Collapse file tree 11 files changed +24
-32
lines changed Original file line number Diff line number Diff line change 1- google-cloud-bigquery == 1.28 .0
1+ google-cloud-bigquery == 2.0 .0
22Flask == 1.1.2
Original file line number Diff line number Diff line change 2626# other invocations of this tutorial. In practice, you could leverage
2727# a persistent dataset and not create/destroy it with each invocation.
2828dataset_id = "bqml_tutorial_{}" .format (str (uuid .uuid4 ().hex ))
29+ full_dataset_id = "{}.{}" .format (client .project , dataset_id )
2930# [END bqml_data_scientist_tutorial_import_and_client]
3031
3132
3233@pytest .fixture
3334def delete_dataset ():
3435 yield
35- client .delete_dataset (
36- client .dataset (dataset_id ), delete_contents = True )
36+ client .delete_dataset (full_dataset_id , delete_contents = True )
3737
3838
3939def test_data_scientist_tutorial (delete_dataset ):
4040 # [START bqml_data_scientist_tutorial_create_dataset]
41- dataset = bigquery .Dataset (client . dataset ( dataset_id ) )
41+ dataset = bigquery .Dataset (full_dataset_id )
4242 dataset .location = 'US'
4343 client .create_dataset (dataset )
4444 # [END bqml_data_scientist_tutorial_create_dataset]
Original file line number Diff line number Diff line change 2727# other invocations of this tutorial. In practice, you could leverage
2828# a persistent dataset and not create/destroy it with each invocation.
2929dataset_id = "bqml_tutorial_{}" .format (str (uuid .uuid4 ().hex ))
30+ full_dataset_id = "{}.{}" .format (client .project , dataset_id )
3031# [END bqml_ncaa_tutorial_import_and_client]
3132
3233
3334@pytest .fixture
3435def delete_dataset ():
36+
3537 yield
36- client .delete_dataset (
37- client .dataset (dataset_id ), delete_contents = True )
38+ client .delete_dataset (full_dataset_id , delete_contents = True )
3839
3940
4041def test_ncaa_tutorial (delete_dataset ):
4142 # [START bqml_ncaa_tutorial_create_dataset]
42- dataset = bigquery .Dataset (client . dataset ( dataset_id ) )
43+ dataset = bigquery .Dataset (full_dataset_id )
4344 dataset .location = 'US'
4445 client .create_dataset (dataset )
4546 # [END bqml_ncaa_tutorial_create_dataset]
Original file line number Diff line number Diff line change 1+ flaky==3.7.0
2+ mock==4.0.2
13pytest==6.0.1
Original file line number Diff line number Diff line change 1+ google-cloud-bigquery [pandas,bqstorage ]== 2.0.0
2+ google-cloud-bigquery-storage == 2.0.0
13pandas == 1.1.3
2- google-cloud-bigquery == 1.28.0
4+ pyarrow == 1.0.1
35flaky == 3.7.0
46mock == 4.0.2
Original file line number Diff line number Diff line change 11grpcio == 1.32.0
2- google-cloud-bigquery [pandas,pyarrow ]== 1.28 .0
3- google-cloud-bigquery-storage == 1.1 .0
2+ google-cloud-bigquery [pandas,pyarrow ]== 2.0 .0
3+ google-cloud-bigquery-storage == 2.0 .0
44datalab == 1.2.0
5- ipython == 7.13.0 ; python_version > "2.7"
6- ipython <= 5.5 ; python_version == "2.7"
7- google-cloud-monitoring == 1.1.0
5+ ipython == 7.13.0
86pyarrow == 1.0.1
Original file line number Diff line number Diff line change 1- google-cloud-bigquery == 1.28.0
2- google-cloud-bigquery-storage == 1.1.0
3- pandas == 0.25.3 ; python_version > '3.0'
4- pandas == 0.24.2 ; python_version < '3.0'
1+ google-cloud-bigquery == 2.0.0
2+ google-cloud-bigquery-storage == 2.0.0
3+ pandas == 1.1.3
54pandas-gbq == 0.14.0
65pyarrow == 1.0.1
76grpcio == 1.32.0
Original file line number Diff line number Diff line change @@ -83,23 +83,13 @@ def test_pandas_gbq_query():
8383
8484def test_client_library_query_bqstorage ():
8585 # [START bigquery_migration_client_library_query_bqstorage]
86- import google .auth
8786 from google .cloud import bigquery
88- from google .cloud import bigquery_storage_v1beta1
8987
90- # Create a BigQuery client and a BigQuery Storage API client with the same
91- # credentials to avoid authenticating twice.
92- credentials , project_id = google .auth .default (
93- scopes = ["https://www.googleapis.com/auth/cloud-platform" ]
94- )
95- client = bigquery .Client (credentials = credentials , project = project_id )
96- bqstorage_client = bigquery_storage_v1beta1 .BigQueryStorageClient (
97- credentials = credentials
98- )
88+ client = bigquery .Client ()
9989 sql = "SELECT * FROM `bigquery-public-data.irs_990.irs_990_2012`"
10090
10191 # Use a BigQuery Storage API client to download results more quickly.
102- df = client .query (sql ).to_dataframe (bqstorage_client = bqstorage_client )
92+ df = client .query (sql ).to_dataframe (create_bqstorage_client = True )
10393 # [END bigquery_migration_client_library_query_bqstorage]
10494 assert len (df ) > 0
10595
Original file line number Diff line number Diff line change 11pytest==6.0.1
22pandas==1.1.3
3- google-cloud-bigquery==1.28 .0
3+ google-cloud-bigquery==2.0 .0
44pyarrow==1.0.1
Original file line number Diff line number Diff line change 33#google-auth-httplib2==0.0.3
44google-cloud-storage == 1.31.2
55google-cloud-dataproc == 2.0.2
6- google-cloud-bigquery == 1.28 .0
6+ google-cloud-bigquery == 2.0 .0
You can’t perform that action at this time.
0 commit comments