Skip to content

Commit d64fb53

Browse files
authored
BigQuery add default location to client (googleapis#5678)
* Add read-only 'Client.location' property. Settable via new 'location' argument to ctor. * Use 'Client.location' as default for 'Client._get_query_results'. * Use 'Client.location' as default for 'Client.get_job'. * Use 'Client.location' as default for 'Client.cancel_job'. * Use 'Client.location' as default for 'Client.load_table_from_uri'. * Use 'Client.location' as default for 'Client.load_table_from_file'. * Use 'Client.location' as default for 'Client.load_table_from_dataframe'. * Use 'Client.location' as default for 'Client.copy_table'. * Use 'Client.location' as default for 'Client.extract_table'. * Use 'Client.location' as default for 'Client.query'. * Use 'Client.location' as default default for 'create_dataset'. Closes googleapis#5148.
1 parent 2631c7f commit d64fb53

2 files changed

Lines changed: 578 additions & 77 deletions

File tree

bigquery/google/cloud/bigquery/client.py

Lines changed: 82 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -107,6 +107,8 @@ class Client(ClientWithProject):
107107
current object.
108108
This parameter should be considered private, and could change in
109109
the future.
110+
location str:
111+
(Optional) Default location for jobs / datasets / tables.
110112
111113
Raises:
112114
google.auth.exceptions.DefaultCredentialsError:
@@ -118,10 +120,17 @@ class Client(ClientWithProject):
118120
'https://www.googleapis.com/auth/cloud-platform')
119121
"""The scopes required for authenticating as a BigQuery consumer."""
120122

121-
def __init__(self, project=None, credentials=None, _http=None):
123+
def __init__(
124+
self, project=None, credentials=None, _http=None, location=None):
122125
super(Client, self).__init__(
123126
project=project, credentials=credentials, _http=_http)
124127
self._connection = Connection(self)
128+
self._location = location
129+
130+
@property
131+
def location(self):
132+
"""Default location for jobs / datasets / tables."""
133+
return self._location
125134

126135
def get_service_account_email(self, project=None):
127136
"""Get the email address of the project's BigQuery service account
@@ -286,8 +295,14 @@ def create_dataset(self, dataset):
286295
287296
"""
288297
path = '/projects/%s/datasets' % (dataset.project,)
298+
299+
data = dataset.to_api_repr()
300+
if data.get('location') is None and self.location is not None:
301+
data['location'] = self.location
302+
289303
api_response = self._connection.api_request(
290-
method='POST', path=path, data=dataset.to_api_repr())
304+
method='POST', path=path, data=data)
305+
291306
return Dataset.from_api_repr(api_response)
292307

293308
def create_table(self, table):
@@ -548,6 +563,9 @@ def _get_query_results(
548563
if timeout_ms is not None:
549564
extra_params['timeoutMs'] = timeout_ms
550565

566+
if location is None:
567+
location = self.location
568+
551569
if location is not None:
552570
extra_params['location'] = location
553571

@@ -613,6 +631,10 @@ def get_job(
613631

614632
if project is None:
615633
project = self.project
634+
635+
if location is None:
636+
location = self.location
637+
616638
if location is not None:
617639
extra_params['location'] = location
618640

@@ -652,6 +674,10 @@ def cancel_job(
652674

653675
if project is None:
654676
project = self.project
677+
678+
if location is None:
679+
location = self.location
680+
655681
if location is not None:
656682
extra_params['location'] = location
657683

@@ -737,8 +763,12 @@ def list_jobs(
737763
extra_params=extra_params)
738764

739765
def load_table_from_uri(
740-
self, source_uris, destination, job_id=None, job_id_prefix=None,
741-
location=None, project=None, job_config=None,
766+
self, source_uris, destination,
767+
job_id=None,
768+
job_id_prefix=None,
769+
location=None,
770+
project=None,
771+
job_config=None,
742772
retry=DEFAULT_RETRY):
743773
"""Starts a job for loading data into a table from CloudStorage.
744774
@@ -773,14 +803,22 @@ def load_table_from_uri(
773803
google.cloud.bigquery.job.LoadJob: A new load job.
774804
"""
775805
job_id = _make_job_id(job_id, job_id_prefix)
806+
776807
if project is None:
777808
project = self.project
809+
810+
if location is None:
811+
location = self.location
812+
778813
job_ref = job._JobReference(job_id, project=project, location=location)
814+
779815
if isinstance(source_uris, six.string_types):
780816
source_uris = [source_uris]
817+
781818
load_job = job.LoadJob(
782819
job_ref, source_uris, destination, self, job_config)
783820
load_job._begin(retry=retry)
821+
784822
return load_job
785823

786824
def load_table_from_file(
@@ -831,14 +869,22 @@ def load_table_from_file(
831869
mode.
832870
"""
833871
job_id = _make_job_id(job_id, job_id_prefix)
872+
834873
if project is None:
835874
project = self.project
875+
876+
if location is None:
877+
location = self.location
878+
836879
job_ref = job._JobReference(job_id, project=project, location=location)
837880
load_job = job.LoadJob(job_ref, None, destination, self, job_config)
838881
job_resource = load_job._build_resource()
882+
839883
if rewind:
840884
file_obj.seek(0, os.SEEK_SET)
885+
841886
_check_mode(file_obj)
887+
842888
try:
843889
if size is None or size >= _MAX_MULTIPART_SIZE:
844890
response = self._do_resumable_upload(
@@ -848,6 +894,7 @@ def load_table_from_file(
848894
file_obj, job_resource, size, num_retries)
849895
except resumable_media.InvalidResponse as exc:
850896
raise exceptions.from_http_response(exc.response)
897+
851898
return self.job_from_resource(response.json())
852899

853900
def load_table_from_dataframe(self, dataframe, destination,
@@ -901,10 +948,19 @@ def load_table_from_dataframe(self, dataframe, destination,
901948
job_config = job.LoadJobConfig()
902949
job_config.source_format = job.SourceFormat.PARQUET
903950

951+
if location is None:
952+
location = self.location
953+
904954
return self.load_table_from_file(
905-
buffer, destination, num_retries=num_retries, rewind=True,
906-
job_id=job_id, job_id_prefix=job_id_prefix, location=location,
907-
project=project, job_config=job_config)
955+
buffer, destination,
956+
num_retries=num_retries,
957+
rewind=True,
958+
job_id=job_id,
959+
job_id_prefix=job_id_prefix,
960+
location=location,
961+
project=project,
962+
job_config=job_config,
963+
)
908964

909965
def _do_resumable_upload(self, stream, metadata, num_retries):
910966
"""Perform a resumable upload.
@@ -1050,16 +1106,23 @@ def copy_table(
10501106
google.cloud.bigquery.job.CopyJob: A new copy job instance.
10511107
"""
10521108
job_id = _make_job_id(job_id, job_id_prefix)
1109+
10531110
if project is None:
10541111
project = self.project
1112+
1113+
if location is None:
1114+
location = self.location
1115+
10551116
job_ref = job._JobReference(job_id, project=project, location=location)
10561117

10571118
if not isinstance(sources, collections.Sequence):
10581119
sources = [sources]
1120+
10591121
copy_job = job.CopyJob(
10601122
job_ref, sources, destination, client=self,
10611123
job_config=job_config)
10621124
copy_job._begin(retry=retry)
1125+
10631126
return copy_job
10641127

10651128
def extract_table(
@@ -1103,8 +1166,13 @@ def extract_table(
11031166
google.cloud.bigquery.job.ExtractJob: A new extract job instance.
11041167
"""
11051168
job_id = _make_job_id(job_id, job_id_prefix)
1169+
11061170
if project is None:
11071171
project = self.project
1172+
1173+
if location is None:
1174+
location = self.location
1175+
11081176
job_ref = job._JobReference(job_id, project=project, location=location)
11091177

11101178
if isinstance(destination_uris, six.string_types):
@@ -1114,6 +1182,7 @@ def extract_table(
11141182
job_ref, source, destination_uris, client=self,
11151183
job_config=job_config)
11161184
extract_job._begin(retry=retry)
1185+
11171186
return extract_job
11181187

11191188
def query(
@@ -1149,12 +1218,18 @@ def query(
11491218
google.cloud.bigquery.job.QueryJob: A new query job instance.
11501219
"""
11511220
job_id = _make_job_id(job_id, job_id_prefix)
1221+
11521222
if project is None:
11531223
project = self.project
1224+
1225+
if location is None:
1226+
location = self.location
1227+
11541228
job_ref = job._JobReference(job_id, project=project, location=location)
11551229
query_job = job.QueryJob(
11561230
job_ref, query, client=self, job_config=job_config)
11571231
query_job._begin(retry=retry)
1232+
11581233
return query_job
11591234

11601235
def insert_rows(self, table, rows, selected_fields=None, **kwargs):

0 commit comments

Comments
 (0)