Skip to content

Commit cad353c

Browse files
authored
Move table and dataset snippets to samples/ directory (#7683)
* Move create table sample to samples directory These samples follow the same pattern as the Models API samples, following our rubric guidelines for 1 sample per file. This work was completed by Layla and reviewed on Tim's personal fork, as the Models API PR hadn't yet merged when she started this work.
1 parent e043faa commit cad353c

28 files changed

Lines changed: 729 additions & 351 deletions

docs/samples

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
../samples/

docs/snippets.py

Lines changed: 0 additions & 338 deletions
Original file line numberDiff line numberDiff line change
@@ -147,24 +147,6 @@ def test_create_client_json_credentials():
147147
assert client is not None
148148

149149

150-
def test_list_datasets(client):
151-
"""List datasets for a project."""
152-
# [START bigquery_list_datasets]
153-
# from google.cloud import bigquery
154-
# client = bigquery.Client()
155-
156-
datasets = list(client.list_datasets())
157-
project = client.project
158-
159-
if datasets:
160-
print("Datasets in project {}:".format(project))
161-
for dataset in datasets: # API request(s)
162-
print("\t{}".format(dataset.dataset_id))
163-
else:
164-
print("{} project does not contain any datasets.".format(project))
165-
# [END bigquery_list_datasets]
166-
167-
168150
def test_list_datasets_by_label(client, to_delete):
169151
dataset_id = "list_datasets_by_label_{}".format(_millis())
170152
dataset = bigquery.Dataset(client.dataset(dataset_id))
@@ -192,78 +174,6 @@ def test_list_datasets_by_label(client, to_delete):
192174
assert dataset_id in found
193175

194176

195-
def test_create_dataset(client, to_delete):
196-
"""Create a dataset."""
197-
dataset_id = "create_dataset_{}".format(_millis())
198-
199-
# [START bigquery_create_dataset]
200-
# from google.cloud import bigquery
201-
# client = bigquery.Client()
202-
# dataset_id = 'my_dataset'
203-
204-
# Create a DatasetReference using a chosen dataset ID.
205-
# The project defaults to the Client's project if not specified.
206-
dataset_ref = client.dataset(dataset_id)
207-
208-
# Construct a full Dataset object to send to the API.
209-
dataset = bigquery.Dataset(dataset_ref)
210-
# Specify the geographic location where the dataset should reside.
211-
dataset.location = "US"
212-
213-
# Send the dataset to the API for creation.
214-
# Raises google.api_core.exceptions.Conflict if the Dataset already
215-
# exists within the project.
216-
dataset = client.create_dataset(dataset) # API request
217-
# [END bigquery_create_dataset]
218-
219-
to_delete.append(dataset)
220-
221-
222-
def test_get_dataset_information(client, to_delete):
223-
"""View information about a dataset."""
224-
dataset_id = "get_dataset_{}".format(_millis())
225-
dataset_labels = {"color": "green"}
226-
dataset_ref = client.dataset(dataset_id)
227-
dataset = bigquery.Dataset(dataset_ref)
228-
dataset.description = ORIGINAL_DESCRIPTION
229-
dataset.labels = dataset_labels
230-
dataset = client.create_dataset(dataset) # API request
231-
to_delete.append(dataset)
232-
233-
# [START bigquery_get_dataset]
234-
# from google.cloud import bigquery
235-
# client = bigquery.Client()
236-
# dataset_id = 'my_dataset'
237-
238-
dataset_ref = client.dataset(dataset_id)
239-
dataset = client.get_dataset(dataset_ref) # API request
240-
241-
# View dataset properties
242-
print("Dataset ID: {}".format(dataset_id))
243-
print("Description: {}".format(dataset.description))
244-
print("Labels:")
245-
labels = dataset.labels
246-
if labels:
247-
for label, value in labels.items():
248-
print("\t{}: {}".format(label, value))
249-
else:
250-
print("\tDataset has no labels defined.")
251-
252-
# View tables in dataset
253-
print("Tables:")
254-
tables = list(client.list_tables(dataset_ref)) # API request(s)
255-
if tables:
256-
for table in tables:
257-
print("\t{}".format(table.table_id))
258-
else:
259-
print("\tThis dataset does not contain any tables.")
260-
# [END bigquery_get_dataset]
261-
262-
assert dataset.description == ORIGINAL_DESCRIPTION
263-
assert dataset.labels == dataset_labels
264-
assert tables == []
265-
266-
267177
# [START bigquery_dataset_exists]
268178
def dataset_exists(client, dataset_reference):
269179
"""Return if a dataset exists.
@@ -301,66 +211,6 @@ def test_dataset_exists(client, to_delete):
301211
assert not dataset_exists(client, client.dataset("i_dont_exist"))
302212

303213

304-
@pytest.mark.skip(
305-
reason=(
306-
"update_dataset() is flaky "
307-
"https://github.com/GoogleCloudPlatform/google-cloud-python/issues/5588"
308-
)
309-
)
310-
def test_update_dataset_description(client, to_delete):
311-
"""Update a dataset's description."""
312-
dataset_id = "update_dataset_description_{}".format(_millis())
313-
dataset = bigquery.Dataset(client.dataset(dataset_id))
314-
dataset.description = "Original description."
315-
client.create_dataset(dataset)
316-
to_delete.append(dataset)
317-
318-
# [START bigquery_update_dataset_description]
319-
# from google.cloud import bigquery
320-
# client = bigquery.Client()
321-
# dataset_ref = client.dataset('my_dataset')
322-
# dataset = client.get_dataset(dataset_ref) # API request
323-
324-
assert dataset.description == "Original description."
325-
dataset.description = "Updated description."
326-
327-
dataset = client.update_dataset(dataset, ["description"]) # API request
328-
329-
assert dataset.description == "Updated description."
330-
# [END bigquery_update_dataset_description]
331-
332-
333-
@pytest.mark.skip(
334-
reason=(
335-
"update_dataset() is flaky "
336-
"https://github.com/GoogleCloudPlatform/google-cloud-python/issues/5588"
337-
)
338-
)
339-
def test_update_dataset_default_table_expiration(client, to_delete):
340-
"""Update a dataset's default table expiration."""
341-
dataset_id = "update_dataset_default_expiration_{}".format(_millis())
342-
dataset = bigquery.Dataset(client.dataset(dataset_id))
343-
dataset = client.create_dataset(dataset)
344-
to_delete.append(dataset)
345-
346-
# [START bigquery_update_dataset_expiration]
347-
# from google.cloud import bigquery
348-
# client = bigquery.Client()
349-
# dataset_ref = client.dataset('my_dataset')
350-
# dataset = client.get_dataset(dataset_ref) # API request
351-
352-
assert dataset.default_table_expiration_ms is None
353-
one_day_ms = 24 * 60 * 60 * 1000 # in milliseconds
354-
dataset.default_table_expiration_ms = one_day_ms
355-
356-
dataset = client.update_dataset(
357-
dataset, ["default_table_expiration_ms"]
358-
) # API request
359-
360-
assert dataset.default_table_expiration_ms == one_day_ms
361-
# [END bigquery_update_dataset_expiration]
362-
363-
364214
@pytest.mark.skip(
365215
reason=(
366216
"update_dataset() is flaky "
@@ -424,129 +274,6 @@ def test_manage_dataset_labels(client, to_delete):
424274
# [END bigquery_delete_label_dataset]
425275

426276

427-
@pytest.mark.skip(
428-
reason=(
429-
"update_dataset() is flaky "
430-
"https://github.com/GoogleCloudPlatform/google-cloud-python/issues/5588"
431-
)
432-
)
433-
def test_update_dataset_access(client, to_delete):
434-
"""Update a dataset's access controls."""
435-
dataset_id = "update_dataset_access_{}".format(_millis())
436-
dataset = bigquery.Dataset(client.dataset(dataset_id))
437-
dataset = client.create_dataset(dataset)
438-
to_delete.append(dataset)
439-
440-
# [START bigquery_update_dataset_access]
441-
# from google.cloud import bigquery
442-
# client = bigquery.Client()
443-
# dataset = client.get_dataset(client.dataset('my_dataset'))
444-
445-
entry = bigquery.AccessEntry(
446-
role="READER",
447-
entity_type="userByEmail",
448-
entity_id="sample.bigquery.dev@gmail.com",
449-
)
450-
assert entry not in dataset.access_entries
451-
entries = list(dataset.access_entries)
452-
entries.append(entry)
453-
dataset.access_entries = entries
454-
455-
dataset = client.update_dataset(dataset, ["access_entries"]) # API request
456-
457-
assert entry in dataset.access_entries
458-
# [END bigquery_update_dataset_access]
459-
460-
461-
def test_delete_dataset(client):
462-
"""Delete a dataset."""
463-
from google.cloud.exceptions import NotFound
464-
465-
dataset1_id = "delete_dataset_{}".format(_millis())
466-
dataset1 = bigquery.Dataset(client.dataset(dataset1_id))
467-
client.create_dataset(dataset1)
468-
469-
dataset2_id = "delete_dataset_with_tables{}".format(_millis())
470-
dataset2 = bigquery.Dataset(client.dataset(dataset2_id))
471-
client.create_dataset(dataset2)
472-
473-
table = bigquery.Table(dataset2.table("new_table"))
474-
client.create_table(table)
475-
476-
# [START bigquery_delete_dataset]
477-
# from google.cloud import bigquery
478-
# client = bigquery.Client()
479-
480-
# Delete a dataset that does not contain any tables
481-
# dataset1_id = 'my_empty_dataset'
482-
dataset1_ref = client.dataset(dataset1_id)
483-
client.delete_dataset(dataset1_ref) # API request
484-
485-
print("Dataset {} deleted.".format(dataset1_id))
486-
487-
# Use the delete_contents parameter to delete a dataset and its contents
488-
# dataset2_id = 'my_dataset_with_tables'
489-
dataset2_ref = client.dataset(dataset2_id)
490-
client.delete_dataset(dataset2_ref, delete_contents=True) # API request
491-
492-
print("Dataset {} deleted.".format(dataset2_id))
493-
# [END bigquery_delete_dataset]
494-
495-
for dataset in [dataset1, dataset2]:
496-
with pytest.raises(NotFound):
497-
client.get_dataset(dataset) # API request
498-
499-
500-
def test_list_tables(client, to_delete):
501-
"""List tables within a dataset."""
502-
dataset_id = "list_tables_dataset_{}".format(_millis())
503-
dataset_ref = client.dataset(dataset_id)
504-
dataset = client.create_dataset(bigquery.Dataset(dataset_ref))
505-
to_delete.append(dataset)
506-
507-
# [START bigquery_list_tables]
508-
# from google.cloud import bigquery
509-
# client = bigquery.Client()
510-
# dataset_ref = client.dataset('my_dataset')
511-
512-
tables = list(client.list_tables(dataset_ref)) # API request(s)
513-
assert len(tables) == 0
514-
515-
table_ref = dataset.table("my_table")
516-
table = bigquery.Table(table_ref)
517-
client.create_table(table) # API request
518-
tables = list(client.list_tables(dataset)) # API request(s)
519-
520-
assert len(tables) == 1
521-
assert tables[0].table_id == "my_table"
522-
# [END bigquery_list_tables]
523-
524-
525-
def test_create_table(client, to_delete):
526-
"""Create a table."""
527-
dataset_id = "create_table_dataset_{}".format(_millis())
528-
dataset_ref = client.dataset(dataset_id)
529-
dataset = bigquery.Dataset(dataset_ref)
530-
client.create_dataset(dataset)
531-
to_delete.append(dataset)
532-
533-
# [START bigquery_create_table]
534-
# from google.cloud import bigquery
535-
# client = bigquery.Client()
536-
# dataset_ref = client.dataset('my_dataset')
537-
538-
schema = [
539-
bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"),
540-
bigquery.SchemaField("age", "INTEGER", mode="REQUIRED"),
541-
]
542-
table_ref = dataset_ref.table("my_table")
543-
table = bigquery.Table(table_ref, schema=schema)
544-
table = client.create_table(table) # API request
545-
546-
assert table.table_id == "my_table"
547-
# [END bigquery_create_table]
548-
549-
550277
def test_create_table_nested_repeated_schema(client, to_delete):
551278
dataset_id = "create_table_nested_repeated_{}".format(_millis())
552279
dataset_ref = client.dataset(dataset_id)
@@ -729,40 +456,6 @@ def test_load_and_query_partitioned_table(client, to_delete):
729456
assert len(rows) == 29
730457

731458

732-
def test_get_table_information(client, to_delete):
733-
"""Show a table's properties."""
734-
dataset_id = "show_table_dataset_{}".format(_millis())
735-
table_id = "show_table_table_{}".format(_millis())
736-
dataset_ref = client.dataset(dataset_id)
737-
dataset = bigquery.Dataset(dataset_ref)
738-
client.create_dataset(dataset)
739-
to_delete.append(dataset)
740-
741-
table = bigquery.Table(dataset.table(table_id), schema=SCHEMA)
742-
table.description = ORIGINAL_DESCRIPTION
743-
table = client.create_table(table)
744-
745-
# [START bigquery_get_table]
746-
# from google.cloud import bigquery
747-
# client = bigquery.Client()
748-
# dataset_id = 'my_dataset'
749-
# table_id = 'my_table'
750-
751-
dataset_ref = client.dataset(dataset_id)
752-
table_ref = dataset_ref.table(table_id)
753-
table = client.get_table(table_ref) # API Request
754-
755-
# View table properties
756-
print(table.schema)
757-
print(table.description)
758-
print(table.num_rows)
759-
# [END bigquery_get_table]
760-
761-
assert table.schema == SCHEMA
762-
assert table.description == ORIGINAL_DESCRIPTION
763-
assert table.num_rows == 0
764-
765-
766459
# [START bigquery_table_exists]
767460
def table_exists(client, table_reference):
768461
"""Return if a table exists.
@@ -2081,37 +1774,6 @@ def test_extract_table_compressed(client, to_delete):
20811774
to_delete.insert(0, blob)
20821775

20831776

2084-
def test_delete_table(client, to_delete):
2085-
"""Delete a table."""
2086-
from google.cloud.exceptions import NotFound
2087-
2088-
dataset_id = "delete_table_dataset_{}".format(_millis())
2089-
table_id = "delete_table_table_{}".format(_millis())
2090-
dataset_ref = client.dataset(dataset_id)
2091-
dataset = bigquery.Dataset(dataset_ref)
2092-
dataset.location = "US"
2093-
dataset = client.create_dataset(dataset)
2094-
to_delete.append(dataset)
2095-
2096-
table_ref = dataset.table(table_id)
2097-
table = bigquery.Table(table_ref, schema=SCHEMA)
2098-
client.create_table(table)
2099-
# [START bigquery_delete_table]
2100-
# from google.cloud import bigquery
2101-
# client = bigquery.Client()
2102-
# dataset_id = 'my_dataset'
2103-
# table_id = 'my_table'
2104-
2105-
table_ref = client.dataset(dataset_id).table(table_id)
2106-
client.delete_table(table_ref) # API request
2107-
2108-
print("Table {}:{} deleted.".format(dataset_id, table_id))
2109-
# [END bigquery_delete_table]
2110-
2111-
with pytest.raises(NotFound):
2112-
client.get_table(table) # API request
2113-
2114-
21151777
def test_undelete_table(client, to_delete):
21161778
dataset_id = "undelete_table_dataset_{}".format(_millis())
21171779
table_id = "undelete_table_table_{}".format(_millis())

0 commit comments

Comments
 (0)