11# -*- coding: utf-8 -*-
22
33import sys
4- import uuid
54from datetime import datetime
65
7- import google .oauth2 .service_account
86import numpy as np
97import pandas
108import pandas .api .types
@@ -28,76 +26,6 @@ def test_imports():
2826 gbq ._test_google_api_imports ()
2927
3028
31- @pytest .fixture (params = ["env" ])
32- def project (request , project_id ):
33- if request .param == "env" :
34- return project_id
35- elif request .param == "none" :
36- return None
37-
38-
39- @pytest .fixture ()
40- def credentials (private_key_path ):
41- return google .oauth2 .service_account .Credentials .from_service_account_file (
42- private_key_path
43- )
44-
45-
46- @pytest .fixture ()
47- def gbq_connector (project , credentials ):
48- return gbq .GbqConnector (project , credentials = credentials )
49-
50-
51- @pytest .fixture ()
52- def random_dataset (bigquery_client , random_dataset_id ):
53- from google .cloud import bigquery
54-
55- dataset_ref = bigquery_client .dataset (random_dataset_id )
56- dataset = bigquery .Dataset (dataset_ref )
57- bigquery_client .create_dataset (dataset )
58- return dataset
59-
60-
61- @pytest .fixture ()
62- def tokyo_dataset (bigquery_client , random_dataset_id ):
63- from google .cloud import bigquery
64-
65- dataset_ref = bigquery_client .dataset (random_dataset_id )
66- dataset = bigquery .Dataset (dataset_ref )
67- dataset .location = "asia-northeast1"
68- bigquery_client .create_dataset (dataset )
69- return random_dataset_id
70-
71-
72- @pytest .fixture ()
73- def tokyo_table (bigquery_client , tokyo_dataset ):
74- table_id = "tokyo_table"
75- # Create a random table using DDL.
76- # https://github.com/GoogleCloudPlatform/golang-samples/blob/2ab2c6b79a1ea3d71d8f91609b57a8fbde07ae5d/bigquery/snippets/snippet.go#L739
77- bigquery_client .query (
78- """CREATE TABLE {}.{}
79- AS SELECT
80- 2000 + CAST(18 * RAND() as INT64) as year,
81- IF(RAND() > 0.5,"foo","bar") as token
82- FROM UNNEST(GENERATE_ARRAY(0,5,1)) as r
83- """ .format (
84- tokyo_dataset , table_id
85- ),
86- location = "asia-northeast1" ,
87- ).result ()
88- return table_id
89-
90-
91- @pytest .fixture ()
92- def gbq_dataset (project , credentials ):
93- return gbq ._Dataset (project , credentials = credentials )
94-
95-
96- @pytest .fixture ()
97- def gbq_table (project , credentials , random_dataset_id ):
98- return gbq ._Table (project , random_dataset_id , credentials = credentials )
99-
100-
10129def make_mixed_dataframe_v2 (test_size ):
10230 # create df to test for all BQ datatypes except RECORD
10331 bools = np .random .randint (2 , size = (1 , test_size )).astype (bool )
@@ -600,9 +528,6 @@ def test_zero_rows(self, project_id):
600528 empty_columns ,
601529 columns = ["name" , "number" , "is_hurricane" , "iso_time" ],
602530 )
603- expected_result ["iso_time" ] = expected_result [
604- "iso_time"
605- ].dt .tz_localize ("UTC" )
606531 tm .assert_frame_equal (df , expected_result , check_index_type = False )
607532
608533 def test_one_row_one_column (self , project_id ):
@@ -917,43 +842,6 @@ def test_tokyo(self, tokyo_dataset, tokyo_table, project_id):
917842 assert df ["max_year" ][0 ] >= 2000
918843
919844
920- @pytest .mark .slow (reason = "Large query for BQ Storage API tests." )
921- def test_read_gbq_w_bqstorage_api (credentials , random_dataset ):
922- pytest .importorskip ("google.cloud.bigquery_storage" )
923- df = gbq .read_gbq (
924- """
925- SELECT
926- total_amount,
927- passenger_count,
928- trip_distance
929- FROM `bigquery-public-data.new_york_taxi_trips.tlc_green_trips_2014`
930- -- Select non-null rows for no-copy conversion from Arrow to pandas.
931- WHERE total_amount IS NOT NULL
932- AND passenger_count IS NOT NULL
933- AND trip_distance IS NOT NULL
934- LIMIT 10000000
935- """ ,
936- use_bqstorage_api = True ,
937- credentials = credentials ,
938- configuration = {
939- "query" : {
940- "destinationTable" : {
941- "projectId" : random_dataset .project ,
942- "datasetId" : random_dataset .dataset_id ,
943- "tableId" : "" .join (
944- [
945- "test_read_gbq_w_bqstorage_api_" ,
946- str (uuid .uuid4 ()).replace ("-" , "_" ),
947- ]
948- ),
949- },
950- "writeDisposition" : "WRITE_TRUNCATE" ,
951- }
952- },
953- )
954- assert len (df ) == 10000000
955-
956-
957845class TestToGBQIntegration (object ):
958846 @pytest .fixture (autouse = True , scope = "function" )
959847 def setup (self , project , credentials , random_dataset_id ):
0 commit comments