Skip to content

Commit b06ab3f

Browse files
tseavertswast
authored andcommitted
Rename job classes (#3797)
* Rename class: 'jobs.LoadTableFromStorageJob' -> 'jobs.LoadJob'. * Rename class: 'jobs.ExtractTableToStorageJob' -> 'jobs.ExtractJob'.
1 parent cd3a05d commit b06ab3f

5 files changed

Lines changed: 46 additions & 46 deletions

File tree

bigquery/google/cloud/bigquery/client.py

Lines changed: 12 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,8 @@
1919
from google.cloud.bigquery._http import Connection
2020
from google.cloud.bigquery.dataset import Dataset
2121
from google.cloud.bigquery.job import CopyJob
22-
from google.cloud.bigquery.job import ExtractTableToStorageJob
23-
from google.cloud.bigquery.job import LoadTableFromStorageJob
22+
from google.cloud.bigquery.job import ExtractJob
23+
from google.cloud.bigquery.job import LoadJob
2424
from google.cloud.bigquery.job import QueryJob
2525
from google.cloud.bigquery.query import QueryResults
2626

@@ -169,20 +169,20 @@ def job_from_resource(self, resource):
169169
:param resource: one job resource from API response
170170
171171
:rtype: One of:
172-
:class:`google.cloud.bigquery.job.LoadTableFromStorageJob`,
172+
:class:`google.cloud.bigquery.job.LoadJob`,
173173
:class:`google.cloud.bigquery.job.CopyJob`,
174-
:class:`google.cloud.bigquery.job.ExtractTableToStorageJob`,
174+
:class:`google.cloud.bigquery.job.ExtractJob`,
175175
:class:`google.cloud.bigquery.job.QueryJob`,
176176
:class:`google.cloud.bigquery.job.RunSyncQueryJob`
177177
:returns: the job instance, constructed via the resource
178178
"""
179179
config = resource['configuration']
180180
if 'load' in config:
181-
return LoadTableFromStorageJob.from_api_repr(resource, self)
181+
return LoadJob.from_api_repr(resource, self)
182182
elif 'copy' in config:
183183
return CopyJob.from_api_repr(resource, self)
184184
elif 'extract' in config:
185-
return ExtractTableToStorageJob.from_api_repr(resource, self)
185+
return ExtractJob.from_api_repr(resource, self)
186186
elif 'query' in config:
187187
return QueryJob.from_api_repr(resource, self)
188188
raise ValueError('Cannot parse job resource')
@@ -253,11 +253,10 @@ def load_table_from_storage(self, job_name, destination, *source_uris):
253253
:param source_uris: URIs of data files to be loaded; in format
254254
``gs://<bucket_name>/<object_name_or_glob>``.
255255
256-
:rtype: :class:`google.cloud.bigquery.job.LoadTableFromStorageJob`
257-
:returns: a new ``LoadTableFromStorageJob`` instance
256+
:rtype: :class:`google.cloud.bigquery.job.LoadJob`
257+
:returns: a new ``LoadJob`` instance
258258
"""
259-
return LoadTableFromStorageJob(job_name, destination, source_uris,
260-
client=self)
259+
return LoadJob(job_name, destination, source_uris, client=self)
261260

262261
def copy_table(self, job_name, destination, *sources):
263262
"""Construct a job for copying one or more tables into another table.
@@ -296,11 +295,10 @@ def extract_table_to_storage(self, job_name, source, *destination_uris):
296295
table data is to be extracted; in format
297296
``gs://<bucket_name>/<object_name_or_glob>``.
298297
299-
:rtype: :class:`google.cloud.bigquery.job.ExtractTableToStorageJob`
300-
:returns: a new ``ExtractTableToStorageJob`` instance
298+
:rtype: :class:`google.cloud.bigquery.job.ExtractJob`
299+
:returns: a new ``ExtractJob`` instance
301300
"""
302-
return ExtractTableToStorageJob(job_name, source, destination_uris,
303-
client=self)
301+
return ExtractJob(job_name, source, destination_uris, client=self)
304302

305303
def run_async_query(self, job_name, query,
306304
udf_resources=(), query_parameters=()):

bigquery/google/cloud/bigquery/job.py

Lines changed: 11 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -525,8 +525,8 @@ class _LoadConfiguration(object):
525525
_write_disposition = None
526526

527527

528-
class LoadTableFromStorageJob(_AsyncJob):
529-
"""Asynchronous job for loading data into a table from CloudStorage.
528+
class LoadJob(_AsyncJob):
529+
"""Asynchronous job for loading data into a table from remote URI.
530530
531531
:type name: str
532532
:param name: the name of the job
@@ -535,8 +535,10 @@ class LoadTableFromStorageJob(_AsyncJob):
535535
:param destination: Table into which data is to be loaded.
536536
537537
:type source_uris: sequence of string
538-
:param source_uris: URIs of one or more data files to be loaded, in
539-
format ``gs://<bucket_name>/<object_name_or_glob>``.
538+
:param source_uris:
539+
URIs of one or more data files to be loaded. See
540+
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.sourceUris
541+
for supported URI formats.
540542
541543
:type client: :class:`google.cloud.bigquery.client.Client`
542544
:param client: A client which holds credentials and project configuration
@@ -550,7 +552,7 @@ class LoadTableFromStorageJob(_AsyncJob):
550552
_JOB_TYPE = 'load'
551553

552554
def __init__(self, name, destination, source_uris, client, schema=()):
553-
super(LoadTableFromStorageJob, self).__init__(name, client)
555+
super(LoadJob, self).__init__(name, client)
554556
self.destination = destination
555557
self.source_uris = source_uris
556558
self._configuration = _LoadConfiguration()
@@ -775,7 +777,7 @@ def from_api_repr(cls, resource, client):
775777
:param client: Client which holds credentials and project
776778
configuration for the dataset.
777779
778-
:rtype: :class:`google.cloud.bigquery.job.LoadTableFromStorageJob`
780+
:rtype: :class:`google.cloud.bigquery.job.LoadJob`
779781
:returns: Job parsed from ``resource``.
780782
"""
781783
name, config = cls._get_resource_config(resource)
@@ -919,7 +921,7 @@ class _ExtractConfiguration(object):
919921
_print_header = None
920922

921923

922-
class ExtractTableToStorageJob(_AsyncJob):
924+
class ExtractJob(_AsyncJob):
923925
"""Asynchronous job: extract data from a table into Cloud Storage.
924926
925927
:type name: str
@@ -940,7 +942,7 @@ class ExtractTableToStorageJob(_AsyncJob):
940942
_JOB_TYPE = 'extract'
941943

942944
def __init__(self, name, source, destination_uris, client):
943-
super(ExtractTableToStorageJob, self).__init__(name, client)
945+
super(ExtractJob, self).__init__(name, client)
944946
self.source = source
945947
self.destination_uris = destination_uris
946948
self._configuration = _ExtractConfiguration()
@@ -1018,7 +1020,7 @@ def from_api_repr(cls, resource, client):
10181020
:param client: Client which holds credentials and project
10191021
configuration for the dataset.
10201022
1021-
:rtype: :class:`google.cloud.bigquery.job.ExtractTableToStorageJob`
1023+
:rtype: :class:`google.cloud.bigquery.job.ExtractJob`
10221024
:returns: Job parsed from ``resource``.
10231025
"""
10241026
name, config = cls._get_resource_config(resource)

bigquery/google/cloud/bigquery/table.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1120,7 +1120,7 @@ def upload_from_file(self,
11201120
:type null_marker: str
11211121
:param null_marker: Optional. A custom null marker (example: "\\N")
11221122
1123-
:rtype: :class:`~google.cloud.bigquery.jobs.LoadTableFromStorageJob`
1123+
:rtype: :class:`~google.cloud.bigquery.jobs.LoadJob`
11241124
11251125
:returns: the job instance used to load the data (e.g., for
11261126
querying status). Note that the job is already started:

bigquery/tests/unit/test_client.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -210,9 +210,9 @@ def test_job_from_resource_unknown_type(self):
210210

211211
def test_list_jobs_defaults(self):
212212
import six
213-
from google.cloud.bigquery.job import LoadTableFromStorageJob
213+
from google.cloud.bigquery.job import LoadJob
214214
from google.cloud.bigquery.job import CopyJob
215-
from google.cloud.bigquery.job import ExtractTableToStorageJob
215+
from google.cloud.bigquery.job import ExtractJob
216216
from google.cloud.bigquery.job import QueryJob
217217

218218
PROJECT = 'PROJECT'
@@ -223,9 +223,9 @@ def test_list_jobs_defaults(self):
223223
SOURCE_URI = 'gs://test_bucket/src_object*'
224224
DESTINATION_URI = 'gs://test_bucket/dst_object*'
225225
JOB_TYPES = {
226-
'load_job': LoadTableFromStorageJob,
226+
'load_job': LoadJob,
227227
'copy_job': CopyJob,
228-
'extract_job': ExtractTableToStorageJob,
228+
'extract_job': ExtractJob,
229229
'query_job': QueryJob,
230230
}
231231
PATH = 'projects/%s/jobs' % PROJECT
@@ -342,13 +342,13 @@ def test_list_jobs_defaults(self):
342342

343343
def test_list_jobs_load_job_wo_sourceUris(self):
344344
import six
345-
from google.cloud.bigquery.job import LoadTableFromStorageJob
345+
from google.cloud.bigquery.job import LoadJob
346346

347347
PROJECT = 'PROJECT'
348348
DATASET = 'test_dataset'
349349
SOURCE_TABLE = 'source_table'
350350
JOB_TYPES = {
351-
'load_job': LoadTableFromStorageJob,
351+
'load_job': LoadJob,
352352
}
353353
PATH = 'projects/%s/jobs' % PROJECT
354354
TOKEN = 'TOKEN'
@@ -429,7 +429,7 @@ def test_list_jobs_explicit_missing(self):
429429
'stateFilter': 'done'})
430430

431431
def test_load_table_from_storage(self):
432-
from google.cloud.bigquery.job import LoadTableFromStorageJob
432+
from google.cloud.bigquery.job import LoadJob
433433

434434
PROJECT = 'PROJECT'
435435
JOB = 'job_name'
@@ -442,7 +442,7 @@ def test_load_table_from_storage(self):
442442
dataset = client.dataset(DATASET)
443443
destination = dataset.table(DESTINATION)
444444
job = client.load_table_from_storage(JOB, destination, SOURCE_URI)
445-
self.assertIsInstance(job, LoadTableFromStorageJob)
445+
self.assertIsInstance(job, LoadJob)
446446
self.assertIs(job._client, client)
447447
self.assertEqual(job.name, JOB)
448448
self.assertEqual(list(job.source_uris), [SOURCE_URI])
@@ -470,7 +470,7 @@ def test_copy_table(self):
470470
self.assertIs(job.destination, destination)
471471

472472
def test_extract_table_to_storage(self):
473-
from google.cloud.bigquery.job import ExtractTableToStorageJob
473+
from google.cloud.bigquery.job import ExtractJob
474474

475475
PROJECT = 'PROJECT'
476476
JOB = 'job_name'
@@ -483,7 +483,7 @@ def test_extract_table_to_storage(self):
483483
dataset = client.dataset(DATASET)
484484
source = dataset.table(SOURCE)
485485
job = client.extract_table_to_storage(JOB, source, DESTINATION)
486-
self.assertIsInstance(job, ExtractTableToStorageJob)
486+
self.assertIsInstance(job, ExtractJob)
487487
self.assertIs(job._client, client)
488488
self.assertEqual(job.name, JOB)
489489
self.assertEqual(job.source, source)

bigquery/tests/unit/test_job.py

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -143,24 +143,24 @@ def _verifyReadonlyResourceProperties(self, job, resource):
143143
self.assertIsNone(job.user_email)
144144

145145

146-
class TestLoadTableFromStorageJob(unittest.TestCase, _Base):
146+
class TestLoadJob(unittest.TestCase, _Base):
147147
JOB_TYPE = 'load'
148148

149149
@staticmethod
150150
def _get_target_class():
151-
from google.cloud.bigquery.job import LoadTableFromStorageJob
151+
from google.cloud.bigquery.job import LoadJob
152152

153-
return LoadTableFromStorageJob
153+
return LoadJob
154154

155155
def _setUpConstants(self):
156-
super(TestLoadTableFromStorageJob, self)._setUpConstants()
156+
super(TestLoadJob, self)._setUpConstants()
157157
self.INPUT_FILES = 2
158158
self.INPUT_BYTES = 12345
159159
self.OUTPUT_BYTES = 23456
160160
self.OUTPUT_ROWS = 345
161161

162162
def _makeResource(self, started=False, ended=False):
163-
resource = super(TestLoadTableFromStorageJob, self)._makeResource(
163+
resource = super(TestLoadJob, self)._makeResource(
164164
started, ended)
165165
config = resource['configuration']['load']
166166
config['sourceUris'] = [self.SOURCE1]
@@ -1110,19 +1110,19 @@ def test_reload_w_alternate_client(self):
11101110
self._verifyResourceProperties(job, RESOURCE)
11111111

11121112

1113-
class TestExtractTableToStorageJob(unittest.TestCase, _Base):
1113+
class TestExtractJob(unittest.TestCase, _Base):
11141114
JOB_TYPE = 'extract'
11151115
SOURCE_TABLE = 'source_table'
11161116
DESTINATION_URI = 'gs://bucket_name/object_name'
11171117

11181118
@staticmethod
11191119
def _get_target_class():
1120-
from google.cloud.bigquery.job import ExtractTableToStorageJob
1120+
from google.cloud.bigquery.job import ExtractJob
11211121

1122-
return ExtractTableToStorageJob
1122+
return ExtractJob
11231123

11241124
def _makeResource(self, started=False, ended=False):
1125-
resource = super(TestExtractTableToStorageJob, self)._makeResource(
1125+
resource = super(TestExtractJob, self)._makeResource(
11261126
started, ended)
11271127
config = resource['configuration']['extract']
11281128
config['sourceTable'] = {
@@ -2098,15 +2098,15 @@ def __init__(self, name=None):
20982098
def name(self):
20992099
if self._name is not None:
21002100
return self._name
2101-
return TestLoadTableFromStorageJob.TABLE_NAME
2101+
return TestLoadJob.TABLE_NAME
21022102

21032103
@property
21042104
def project(self):
2105-
return TestLoadTableFromStorageJob.PROJECT
2105+
return TestLoadJob.PROJECT
21062106

21072107
@property
21082108
def dataset_name(self):
2109-
return TestLoadTableFromStorageJob.DS_NAME
2109+
return TestLoadJob.DS_NAME
21102110

21112111

21122112
class _Connection(object):

0 commit comments

Comments
 (0)