Skip to content

Commit 04d1649

Browse files
authored
Prune systests identified as reduntant to snippets. (googleapis#5365)
See @tswast's analysis: googleapis#5003 (comment) Toward googleapis#5003.
1 parent e592ca3 commit 04d1649

1 file changed

Lines changed: 0 additions & 98 deletions

File tree

bigquery/tests/system.py

Lines changed: 0 additions & 98 deletions
Original file line numberDiff line numberDiff line change
@@ -475,47 +475,6 @@ def test_insert_rows_then_dump_table(self):
475475
self.assertEqual(sorted(row_tuples, key=by_age),
476476
sorted(ROWS, key=by_age))
477477

478-
def test_load_table_from_local_file_then_dump_table(self):
479-
from google.cloud._testing import _NamedTemporaryFile
480-
from google.cloud.bigquery.job import CreateDisposition
481-
from google.cloud.bigquery.job import SourceFormat
482-
from google.cloud.bigquery.job import WriteDisposition
483-
484-
TABLE_NAME = 'test_table'
485-
486-
dataset = self.temp_dataset(_make_dataset_id('load_local_then_dump'))
487-
table_ref = dataset.table(TABLE_NAME)
488-
table_arg = Table(table_ref, schema=SCHEMA)
489-
table = retry_403(Config.CLIENT.create_table)(table_arg)
490-
self.to_delete.insert(0, table)
491-
492-
with _NamedTemporaryFile() as temp:
493-
with open(temp.name, 'w') as csv_write:
494-
writer = csv.writer(csv_write)
495-
writer.writerow(HEADER_ROW)
496-
writer.writerows(ROWS)
497-
498-
with open(temp.name, 'rb') as csv_read:
499-
config = bigquery.LoadJobConfig()
500-
config.source_format = SourceFormat.CSV
501-
config.skip_leading_rows = 1
502-
config.create_disposition = CreateDisposition.CREATE_NEVER
503-
config.write_disposition = WriteDisposition.WRITE_EMPTY
504-
config.schema = table.schema
505-
job = Config.CLIENT.load_table_from_file(
506-
csv_read, table_ref, job_config=config)
507-
508-
# Retry until done.
509-
job.result(timeout=JOB_TIMEOUT)
510-
511-
self.assertEqual(job.output_rows, len(ROWS))
512-
513-
rows = self._fetch_single_page(table)
514-
row_tuples = [r.values() for r in rows]
515-
by_age = operator.itemgetter(1)
516-
self.assertEqual(sorted(row_tuples, key=by_age),
517-
sorted(ROWS, key=by_age))
518-
519478
def test_load_table_from_local_avro_file_then_dump_table(self):
520479
from google.cloud.bigquery.job import SourceFormat
521480
from google.cloud.bigquery.job import WriteDisposition
@@ -817,37 +776,6 @@ def test_extract_table(self):
817776
got = destination.download_as_string().decode('utf-8')
818777
self.assertIn('Bharney Rhubble', got)
819778

820-
def test_extract_table_w_job_config(self):
821-
from google.cloud.storage import Client as StorageClient
822-
from google.cloud.bigquery.job import DestinationFormat
823-
824-
storage_client = StorageClient()
825-
local_id = unique_resource_id()
826-
bucket_name = 'bq_extract_test' + local_id
827-
blob_name = 'person_ages.csv'
828-
dataset_id = _make_dataset_id('load_gcs_then_extract')
829-
table_id = 'test_table'
830-
table_ref = Config.CLIENT.dataset(dataset_id).table(table_id)
831-
table = Table(table_ref)
832-
self.to_delete.insert(0, table)
833-
self._load_table_for_extract_table(
834-
storage_client, ROWS, bucket_name, blob_name, table_ref)
835-
bucket = storage_client.bucket(bucket_name)
836-
destination_blob_name = 'person_ages_out.csv'
837-
destination = bucket.blob(destination_blob_name)
838-
destination_uri = 'gs://{}/person_ages_out.csv'.format(bucket_name)
839-
840-
config = bigquery.ExtractJobConfig()
841-
config.destination_format = DestinationFormat.NEWLINE_DELIMITED_JSON
842-
job = Config.CLIENT.extract_table(
843-
table, destination_uri, job_config=config)
844-
job.result()
845-
846-
self.to_delete.insert(0, destination)
847-
got = destination.download_as_string().decode('utf-8')
848-
self.assertIn('"Bharney Rhubble"', got)
849-
self.assertEqual(job.destination_uri_file_counts, [1])
850-
851779
def test_copy_table(self):
852780
# If we create a new table to copy from, the test won't work
853781
# because the new rows will be stored in the streaming buffer,
@@ -1538,32 +1466,6 @@ def test_dbapi_w_query_parameters(self):
15381466
row = Config.CURSOR.fetchone()
15391467
self.assertIsNone(row, msg=msg)
15401468

1541-
def test_dump_table_w_public_data(self):
1542-
PUBLIC = 'bigquery-public-data'
1543-
DATASET_ID = 'samples'
1544-
TABLE_NAME = 'natality'
1545-
1546-
table_ref = DatasetReference(PUBLIC, DATASET_ID).table(TABLE_NAME)
1547-
table = Config.CLIENT.get_table(table_ref)
1548-
self._fetch_single_page(table)
1549-
1550-
def test_dump_table_w_public_data_selected_fields(self):
1551-
PUBLIC = 'bigquery-public-data'
1552-
DATASET_ID = 'samples'
1553-
TABLE_NAME = 'natality'
1554-
selected_fields = [
1555-
bigquery.SchemaField('year', 'INTEGER', mode='NULLABLE'),
1556-
bigquery.SchemaField('month', 'INTEGER', mode='NULLABLE'),
1557-
bigquery.SchemaField('day', 'INTEGER', mode='NULLABLE'),
1558-
]
1559-
table_ref = DatasetReference(PUBLIC, DATASET_ID).table(TABLE_NAME)
1560-
1561-
rows = self._fetch_single_page(
1562-
table_ref, selected_fields=selected_fields)
1563-
1564-
self.assertGreater(len(rows), 0)
1565-
self.assertEqual(len(rows[0]), 3)
1566-
15671469
def test_large_query_w_public_data(self):
15681470
PUBLIC = 'bigquery-public-data'
15691471
DATASET_ID = 'samples'

0 commit comments

Comments
 (0)