Skip to content

Commit fbe942b

Browse files
chemelnucfinJon Wayne Parrott
authored andcommitted
Use constant strings for job properties in tests (googleapis#4833)
1 parent 1befde4 commit fbe942b

3 files changed

Lines changed: 119 additions & 68 deletions

File tree

bigquery/tests/system.py

Lines changed: 38 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -394,6 +394,9 @@ def test_insert_rows_then_dump_table(self):
394394

395395
def test_load_table_from_local_file_then_dump_table(self):
396396
from google.cloud._testing import _NamedTemporaryFile
397+
from google.cloud.bigquery.job import CreateDisposition
398+
from google.cloud.bigquery.job import SourceFormat
399+
from google.cloud.bigquery.job import WriteDisposition
397400

398401
TABLE_NAME = 'test_table'
399402

@@ -411,10 +414,10 @@ def test_load_table_from_local_file_then_dump_table(self):
411414

412415
with open(temp.name, 'rb') as csv_read:
413416
config = bigquery.LoadJobConfig()
414-
config.source_format = 'CSV'
417+
config.source_format = SourceFormat.CSV
415418
config.skip_leading_rows = 1
416-
config.create_disposition = 'CREATE_NEVER'
417-
config.write_disposition = 'WRITE_EMPTY'
419+
config.create_disposition = CreateDisposition.CREATE_NEVER
420+
config.write_disposition = WriteDisposition.WRITE_EMPTY
418421
config.schema = table.schema
419422
job = Config.CLIENT.load_table_from_file(
420423
csv_read, table_ref, job_config=config)
@@ -431,6 +434,8 @@ def test_load_table_from_local_file_then_dump_table(self):
431434
sorted(ROWS, key=by_age))
432435

433436
def test_load_table_from_local_avro_file_then_dump_table(self):
437+
from google.cloud.bigquery.job import SourceFormat
438+
from google.cloud.bigquery.job import WriteDisposition
434439
TABLE_NAME = 'test_table_avro'
435440
ROWS = [
436441
("violet", 400),
@@ -448,8 +453,8 @@ def test_load_table_from_local_avro_file_then_dump_table(self):
448453

449454
with open(os.path.join(WHERE, 'data', 'colors.avro'), 'rb') as avrof:
450455
config = bigquery.LoadJobConfig()
451-
config.source_format = 'AVRO'
452-
config.write_disposition = 'WRITE_TRUNCATE'
456+
config.source_format = SourceFormat.AVRO
457+
config.write_disposition = WriteDisposition.WRITE_TRUNCATE
453458
job = Config.CLIENT.load_table_from_file(
454459
avrof, table_ref, job_config=config)
455460
# Retry until done.
@@ -465,6 +470,9 @@ def test_load_table_from_local_avro_file_then_dump_table(self):
465470
sorted(ROWS, key=by_wavelength))
466471

467472
def test_load_avro_from_uri_then_dump_table(self):
473+
from google.cloud.bigquery.job import CreateDisposition
474+
from google.cloud.bigquery.job import SourceFormat
475+
from google.cloud.bigquery.job import WriteDisposition
468476
table_name = 'test_table'
469477
rows = [
470478
("violet", 400),
@@ -485,9 +493,9 @@ def test_load_avro_from_uri_then_dump_table(self):
485493
self.to_delete.insert(0, table)
486494

487495
config = bigquery.LoadJobConfig()
488-
config.create_disposition = 'CREATE_NEVER'
489-
config.source_format = 'AVRO'
490-
config.write_disposition = 'WRITE_EMPTY'
496+
config.create_disposition = CreateDisposition.CREATE_NEVER
497+
config.source_format = SourceFormat.AVRO
498+
config.write_disposition = WriteDisposition.WRITE_EMPTY
491499
job = Config.CLIENT.load_table_from_uri(
492500
GS_URL, table_arg, job_config=config)
493501
job.result(timeout=JOB_TIMEOUT)
@@ -500,6 +508,10 @@ def test_load_avro_from_uri_then_dump_table(self):
500508
sorted(rows, key=lambda x: x[1]))
501509

502510
def test_load_table_from_uri_then_dump_table(self):
511+
from google.cloud.bigquery.job import CreateDisposition
512+
from google.cloud.bigquery.job import SourceFormat
513+
from google.cloud.bigquery.job import WriteDisposition
514+
503515
TABLE_ID = 'test_table'
504516
GS_URL = self._write_csv_to_storage(
505517
'bq_load_test' + unique_resource_id(), 'person_ages.csv',
@@ -512,10 +524,10 @@ def test_load_table_from_uri_then_dump_table(self):
512524
self.to_delete.insert(0, table)
513525

514526
config = bigquery.LoadJobConfig()
515-
config.create_disposition = 'CREATE_NEVER'
527+
config.create_disposition = CreateDisposition.CREATE_NEVER
516528
config.skip_leading_rows = 1
517-
config.source_format = 'CSV'
518-
config.write_disposition = 'WRITE_EMPTY'
529+
config.source_format = SourceFormat.CSV
530+
config.write_disposition = WriteDisposition.WRITE_EMPTY
519531
job = Config.CLIENT.load_table_from_uri(
520532
GS_URL, dataset.table(TABLE_ID), job_config=config)
521533

@@ -674,6 +686,7 @@ def test_extract_table(self):
674686

675687
def test_extract_table_w_job_config(self):
676688
from google.cloud.storage import Client as StorageClient
689+
from google.cloud.bigquery.job import DestinationFormat
677690

678691
storage_client = StorageClient()
679692
local_id = unique_resource_id()
@@ -691,10 +704,10 @@ def test_extract_table_w_job_config(self):
691704
destination = bucket.blob(destination_blob_name)
692705
destination_uri = 'gs://{}/person_ages_out.csv'.format(bucket_name)
693706

694-
job_config = bigquery.ExtractJobConfig()
695-
job_config.destination_format = 'NEWLINE_DELIMITED_JSON'
707+
config = bigquery.ExtractJobConfig()
708+
config.destination_format = DestinationFormat.NEWLINE_DELIMITED_JSON
696709
job = Config.CLIENT.extract_table(
697-
table, destination_uri, job_config=job_config)
710+
table, destination_uri, job_config=config)
698711
job.result()
699712

700713
self.to_delete.insert(0, destination)
@@ -947,6 +960,9 @@ def test_dbapi_fetchall(self):
947960

948961
def _load_table_for_dml(self, rows, dataset_id, table_id):
949962
from google.cloud._testing import _NamedTemporaryFile
963+
from google.cloud.bigquery.job import CreateDisposition
964+
from google.cloud.bigquery.job import SourceFormat
965+
from google.cloud.bigquery.job import WriteDisposition
950966

951967
dataset = self.temp_dataset(dataset_id)
952968
greeting = bigquery.SchemaField(
@@ -964,10 +980,10 @@ def _load_table_for_dml(self, rows, dataset_id, table_id):
964980

965981
with open(temp.name, 'rb') as csv_read:
966982
config = bigquery.LoadJobConfig()
967-
config.source_format = 'CSV'
983+
config.source_format = SourceFormat.CSV
968984
config.skip_leading_rows = 1
969-
config.create_disposition = 'CREATE_NEVER'
970-
config.write_disposition = 'WRITE_EMPTY'
985+
config.create_disposition = CreateDisposition.CREATE_NEVER
986+
config.write_disposition = WriteDisposition.WRITE_EMPTY
971987
job = Config.CLIENT.load_table_from_file(
972988
csv_read, table_ref, job_config=config)
973989

@@ -1540,6 +1556,9 @@ def _fetch_dataframe(self, query):
15401556

15411557
@unittest.skipIf(pandas is None, 'Requires `pandas`')
15421558
def test_nested_table_to_dataframe(self):
1559+
from google.cloud.bigquery.job import SourceFormat
1560+
from google.cloud.bigquery.job import WriteDisposition
1561+
15431562
SF = bigquery.SchemaField
15441563
schema = [
15451564
SF('string_col', 'STRING', mode='NULLABLE'),
@@ -1566,8 +1585,8 @@ def test_nested_table_to_dataframe(self):
15661585
table = dataset.table(table_id)
15671586
self.to_delete.insert(0, table)
15681587
job_config = bigquery.LoadJobConfig()
1569-
job_config.write_disposition = 'WRITE_TRUNCATE'
1570-
job_config.source_format = 'NEWLINE_DELIMITED_JSON'
1588+
job_config.write_disposition = WriteDisposition.WRITE_TRUNCATE
1589+
job_config.source_format = SourceFormat.NEWLINE_DELIMITED_JSON
15711590
job_config.schema = schema
15721591
# Load a table using a local JSON file from memory.
15731592
Config.CLIENT.load_table_from_file(

bigquery/tests/unit/test_client.py

Lines changed: 35 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -606,8 +606,9 @@ def test_create_table_w_schema_and_query(self):
606606
self.assertEqual(got.view_query, query)
607607

608608
def test_create_table_w_external(self):
609-
from google.cloud.bigquery.table import Table
610609
from google.cloud.bigquery.external_config import ExternalConfig
610+
from google.cloud.bigquery.job import SourceFormat
611+
from google.cloud.bigquery.table import Table
611612

612613
path = 'projects/%s/datasets/%s/tables' % (
613614
self.PROJECT, self.DS_ID)
@@ -621,7 +622,7 @@ def test_create_table_w_external(self):
621622
'tableId': self.TABLE_ID
622623
},
623624
'externalDataConfiguration': {
624-
'sourceFormat': 'CSV',
625+
'sourceFormat': SourceFormat.CSV,
625626
'autodetect': True,
626627
},
627628
}
@@ -644,7 +645,7 @@ def test_create_table_w_external(self):
644645
'tableId': self.TABLE_ID,
645646
},
646647
'externalDataConfiguration': {
647-
'sourceFormat': 'CSV',
648+
'sourceFormat': SourceFormat.CSV,
648649
'autodetect': True,
649650
},
650651
'labels': {},
@@ -653,7 +654,8 @@ def test_create_table_w_external(self):
653654
self.assertEqual(got.table_id, self.TABLE_ID)
654655
self.assertEqual(got.project, self.PROJECT)
655656
self.assertEqual(got.dataset_id, self.DS_ID)
656-
self.assertEqual(got.external_data_configuration.source_format, 'CSV')
657+
self.assertEqual(got.external_data_configuration.source_format,
658+
SourceFormat.CSV)
657659
self.assertEqual(got.external_data_configuration.autodetect, True)
658660

659661
def test_get_table(self):
@@ -1228,7 +1230,9 @@ def test_get_job_miss_w_explict_project(self):
12281230
self.assertEqual(req['query_params'], {'projection': 'full'})
12291231

12301232
def test_get_job_hit(self):
1233+
from google.cloud.bigquery.job import CreateDisposition
12311234
from google.cloud.bigquery.job import QueryJob
1235+
from google.cloud.bigquery.job import WriteDisposition
12321236

12331237
JOB_ID = 'query_job'
12341238
QUERY_DESTINATION_TABLE = 'query_destination_table'
@@ -1248,8 +1252,8 @@ def test_get_job_hit(self):
12481252
'datasetId': self.DS_ID,
12491253
'tableId': QUERY_DESTINATION_TABLE,
12501254
},
1251-
'createDisposition': 'CREATE_IF_NEEDED',
1252-
'writeDisposition': 'WRITE_TRUNCATE',
1255+
'createDisposition': CreateDisposition.CREATE_IF_NEEDED,
1256+
'writeDisposition': WriteDisposition.WRITE_TRUNCATE,
12531257
}
12541258
},
12551259
}
@@ -1261,8 +1265,10 @@ def test_get_job_hit(self):
12611265

12621266
self.assertIsInstance(job, QueryJob)
12631267
self.assertEqual(job.job_id, JOB_ID)
1264-
self.assertEqual(job.create_disposition, 'CREATE_IF_NEEDED')
1265-
self.assertEqual(job.write_disposition, 'WRITE_TRUNCATE')
1268+
self.assertEqual(job.create_disposition,
1269+
CreateDisposition.CREATE_IF_NEEDED)
1270+
self.assertEqual(job.write_disposition,
1271+
WriteDisposition.WRITE_TRUNCATE)
12661272

12671273
self.assertEqual(len(conn._requested), 1)
12681274
req = conn._requested[0]
@@ -1328,10 +1334,12 @@ def test_cancel_job_hit(self):
13281334
self.assertEqual(req['query_params'], {'projection': 'full'})
13291335

13301336
def test_list_jobs_defaults(self):
1331-
from google.cloud.bigquery.job import LoadJob
13321337
from google.cloud.bigquery.job import CopyJob
1338+
from google.cloud.bigquery.job import CreateDisposition
13331339
from google.cloud.bigquery.job import ExtractJob
1340+
from google.cloud.bigquery.job import LoadJob
13341341
from google.cloud.bigquery.job import QueryJob
1342+
from google.cloud.bigquery.job import WriteDisposition
13351343

13361344
SOURCE_TABLE = 'source_table'
13371345
DESTINATION_TABLE = 'destination_table'
@@ -1362,8 +1370,8 @@ def test_list_jobs_defaults(self):
13621370
'datasetId': self.DS_ID,
13631371
'tableId': QUERY_DESTINATION_TABLE,
13641372
},
1365-
'createDisposition': 'CREATE_IF_NEEDED',
1366-
'writeDisposition': 'WRITE_TRUNCATE',
1373+
'createDisposition': CreateDisposition.CREATE_IF_NEEDED,
1374+
'writeDisposition': WriteDisposition.WRITE_TRUNCATE,
13671375
}
13681376
},
13691377
}
@@ -1608,7 +1616,9 @@ def _initiate_resumable_upload_helper(self, num_retries=None):
16081616
from google.cloud.bigquery.client import _DEFAULT_CHUNKSIZE
16091617
from google.cloud.bigquery.client import _GENERIC_CONTENT_TYPE
16101618
from google.cloud.bigquery.client import _get_upload_headers
1611-
from google.cloud.bigquery.job import LoadJob, LoadJobConfig
1619+
from google.cloud.bigquery.job import LoadJob
1620+
from google.cloud.bigquery.job import LoadJobConfig
1621+
from google.cloud.bigquery.job import SourceFormat
16121622

16131623
# Create mocks to be checked for doing transport.
16141624
resumable_url = 'http://test.invalid?upload_id=hey-you'
@@ -1622,7 +1632,7 @@ def _initiate_resumable_upload_helper(self, num_retries=None):
16221632
data = b'goodbye gudbi gootbee'
16231633
stream = io.BytesIO(data)
16241634
config = LoadJobConfig()
1625-
config.source_format = 'CSV'
1635+
config.source_format = SourceFormat.CSV
16261636
job = LoadJob(None, None, self.TABLE_REF, client, job_config=config)
16271637
metadata = job._build_resource()
16281638
upload, transport = client._initiate_resumable_upload(
@@ -1675,7 +1685,9 @@ def test__initiate_resumable_upload_with_retry(self):
16751685
def _do_multipart_upload_success_helper(
16761686
self, get_boundary, num_retries=None):
16771687
from google.cloud.bigquery.client import _get_upload_headers
1678-
from google.cloud.bigquery.job import LoadJob, LoadJobConfig
1688+
from google.cloud.bigquery.job import LoadJob
1689+
from google.cloud.bigquery.job import LoadJobConfig
1690+
from google.cloud.bigquery.job import SourceFormat
16791691

16801692
fake_transport = self._mock_transport(http_client.OK, {})
16811693
client = self._make_one(project=self.PROJECT, _http=fake_transport)
@@ -1685,7 +1697,7 @@ def _do_multipart_upload_success_helper(
16851697
data = b'Bzzzz-zap \x00\x01\xf4'
16861698
stream = io.BytesIO(data)
16871699
config = LoadJobConfig()
1688-
config.source_format = 'CSV'
1700+
config.source_format = SourceFormat.CSV
16891701
job = LoadJob(None, None, self.TABLE_REF, client, job_config=config)
16901702
metadata = job._build_resource()
16911703
size = len(data)
@@ -2838,7 +2850,7 @@ class TestClientUpload(object):
28382850
# NOTE: This is a "partner" to `TestClient` meant to test some of the
28392851
# "load_table_from_file" portions of `Client`. It also uses
28402852
# `pytest`-style tests rather than `unittest`-style.
2841-
2853+
from google.cloud.bigquery.job import SourceFormat
28422854
TABLE_REF = DatasetReference(
28432855
'project_id', 'test_dataset').table('test_table')
28442856

@@ -2881,7 +2893,7 @@ def _make_do_upload_patch(cls, client, method,
28812893
'jobReference': {'projectId': 'project_id', 'jobId': 'job_id'},
28822894
'configuration': {
28832895
'load': {
2884-
'sourceFormat': 'CSV',
2896+
'sourceFormat': SourceFormat.CSV,
28852897
'destinationTable': {
28862898
'projectId': 'project_id',
28872899
'datasetId': 'test_dataset',
@@ -2898,9 +2910,10 @@ def _make_file_obj():
28982910
@staticmethod
28992911
def _make_config():
29002912
from google.cloud.bigquery.job import LoadJobConfig
2913+
from google.cloud.bigquery.job import SourceFormat
29012914

29022915
config = LoadJobConfig()
2903-
config.source_format = 'CSV'
2916+
config.source_format = SourceFormat.CSV
29042917
return config
29052918

29062919
# High-level tests
@@ -2925,21 +2938,23 @@ def test_load_table_from_file_resumable(self):
29252938

29262939
def test_load_table_from_file_resumable_metadata(self):
29272940
from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES
2941+
from google.cloud.bigquery.job import CreateDisposition
2942+
from google.cloud.bigquery.job import WriteDisposition
29282943

29292944
client = self._make_client()
29302945
file_obj = self._make_file_obj()
29312946

29322947
config = self._make_config()
29332948
config.allow_jagged_rows = False
29342949
config.allow_quoted_newlines = False
2935-
config.create_disposition = 'CREATE_IF_NEEDED'
2950+
config.create_disposition = CreateDisposition.CREATE_IF_NEEDED
29362951
config.encoding = 'utf8'
29372952
config.field_delimiter = ','
29382953
config.ignore_unknown_values = False
29392954
config.max_bad_records = 0
29402955
config.quote_character = '"'
29412956
config.skip_leading_rows = 1
2942-
config.write_disposition = 'WRITE_APPEND'
2957+
config.write_disposition = WriteDisposition.WRITE_APPEND
29432958
config.null_marker = r'\N'
29442959

29452960
expected_config = {

0 commit comments

Comments
 (0)