Skip to content

Commit 2dcad57

Browse files
authored
Merge pull request googleapis#1832 from tseaver/bigquery-testable_dataset_snippets
Make dataset snippets testable
2 parents aa56c9f + 9d92808 commit 2dcad57

3 files changed

Lines changed: 222 additions & 64 deletions

File tree

docs/bigquery-usage.rst

Lines changed: 23 additions & 63 deletions
Original file line numberDiff line numberDiff line change
@@ -16,33 +16,15 @@ Authentication / Configuration
1616
and
1717
:meth:`from_service_account_p12 <gcloud.bigquery.client.Client.from_service_account_p12>`.
1818

19-
- After setting ``GOOGLE_APPLICATION_CREDENTIALS`` and ``GCLOUD_PROJECT``
20-
environment variables, create an instance of
19+
- After setting :envvar:`GOOGLE_APPLICATION_CREDENTIALS` and
20+
:envvar:`GCLOUD_PROJECT` environment variables, create an instance of
2121
:class:`Client <gcloud.bigquery.client.Client>`.
2222

2323
.. doctest::
2424

2525
>>> from gcloud import bigquery
2626
>>> client = bigquery.Client()
2727

28-
- Override the credentials inferred from the environment by passing explicit
29-
``credentials`` to one of the alternative ``classmethod`` factories,
30-
:meth:`gcloud.bigquery.client.Client.from_service_account_json`:
31-
32-
.. doctest::
33-
34-
>>> from gcloud import bigquery
35-
>>> client = bigquery.Client.from_service_account_json('/path/to/creds.json')
36-
37-
or :meth:`gcloud.bigquery.client.Client.from_service_account_p12`:
38-
39-
.. doctest::
40-
41-
>>> from gcloud import bigquery
42-
>>> client = bigquery.Client.from_service_account_p12(
43-
... '/path/to/creds.p12', 'jrandom@example.com')
44-
45-
4628
Projects
4729
--------
4830

@@ -83,54 +65,35 @@ policies to tables as they are created:
8365
Dataset operations
8466
~~~~~~~~~~~~~~~~~~
8567

86-
Create a new dataset for the client's project:
87-
88-
.. doctest::
89-
90-
>>> from gcloud import bigquery
91-
>>> client = bigquery.Client()
92-
>>> dataset = client.dataset('dataset_name')
93-
>>> dataset.create() # API request
94-
95-
Check for the existence of a dataset:
68+
List datasets for the client's project:
9669

97-
.. doctest::
70+
.. literalinclude:: bigquery_snippets.py
71+
:start-after: [START client_list_datasets]
72+
:end-before: [END client_list_datasets]
9873

99-
>>> from gcloud import bigquery
100-
>>> client = bigquery.Client()
101-
>>> dataset = client.dataset('dataset_name')
102-
>>> dataset.exists() # API request
103-
True
74+
Create a new dataset for the client's project:
10475

105-
List datasets for the client's project:
76+
.. literalinclude:: bigquery_snippets.py
77+
:start-after: [START dataset_create]
78+
:end-before: [END dataset_create]
10679

107-
.. doctest::
80+
Check for the existence of a dataset:
10881

109-
>>> from gcloud import bigquery
110-
>>> client = bigquery.Client()
111-
>>> datasets, next_page_token = client.list_datasets() # API request
112-
>>> [dataset.name for dataset in datasets]
113-
['dataset_name']
82+
.. literalinclude:: bigquery_snippets.py
83+
:start-after: [START dataset_exists]
84+
:end-before: [END dataset_exists]
11485

11586
Refresh metadata for a dataset (to pick up changes made by another client):
11687

117-
.. doctest::
118-
119-
>>> from gcloud import bigquery
120-
>>> client = bigquery.Client()
121-
>>> dataset = client.dataset('dataset_name')
122-
>>> dataset.reload() # API request
88+
.. literalinclude:: bigquery_snippets.py
89+
:start-after: [START dataset_reload]
90+
:end-before: [END dataset_reload]
12391

12492
Patch metadata for a dataset:
12593

126-
.. doctest::
127-
128-
>>> from gcloud import bigquery
129-
>>> client = bigquery.Client()
130-
>>> dataset = client.dataset('dataset_name')
131-
>>> one_day_ms = 24 * 60 * 60 * 1000
132-
>>> dataset.patch(description='Description goes here',
133-
... default_table_expiration_ms=one_day_ms) # API request
94+
.. literalinclude:: bigquery_snippets.py
95+
:start-after: [START dataset_patch]
96+
:end-before: [END dataset_patch]
13497

13598
Replace the ACL for a dataset, and update all writeable fields:
13699

@@ -147,12 +110,9 @@ Replace the ACL for a dataset, and update all writeable fields:
147110

148111
Delete a dataset:
149112

150-
.. doctest::
151-
152-
>>> from gcloud import bigquery
153-
>>> client = bigquery.Client()
154-
>>> dataset = client.dataset('dataset_name')
155-
>>> dataset.delete() # API request
113+
.. literalinclude:: bigquery_snippets.py
114+
:start-after: [START dataset_delete]
115+
:end-before: [END dataset_delete]
156116

157117

158118
Tables

docs/bigquery_snippets.py

Lines changed: 198 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,198 @@
1+
# Copyright 2016 Google Inc. All rights reserved.
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
15+
"""Testable usage examples for Google Cloud BigQuery API wrapper
16+
17+
Each example function takes a ``client`` argument (which must be an instance
18+
of :class:`gcloud.bigquery.client.Client`) and uses it to perform a task with
19+
the API.
20+
21+
To facilitate running the examples as system tests, each example is also passed
22+
a ``to_delete`` list; the function adds to the list any objects created which
23+
need to be deleted during teardown.
24+
"""
25+
26+
import time
27+
28+
from gcloud.bigquery.client import Client
29+
30+
31+
def snippet(func):
32+
"""Mark ``func`` as a snippet example function."""
33+
func._snippet = True
34+
return func
35+
36+
37+
def _millis():
38+
return time.time() * 1000
39+
40+
41+
@snippet
42+
def client_list_datasets(client, to_delete): # pylint: disable=unused-argument
43+
"""List datasets for a project."""
44+
45+
def do_something_with(sub): # pylint: disable=unused-argument
46+
pass
47+
48+
# [START client_list_datasets]
49+
datasets, token = client.list_datasets() # API request
50+
while True:
51+
for dataset in datasets:
52+
do_something_with(dataset)
53+
if token is None:
54+
break
55+
datasets, token = client.list_datasets(page_token=token) # API request
56+
# [END client_list_datasets]
57+
58+
59+
@snippet
60+
def dataset_create(client, to_delete):
61+
"""Create a dataset."""
62+
DATASET_NAME = 'dataset_create_%d' % (_millis(),)
63+
64+
# [START dataset_create]
65+
dataset = client.dataset(DATASET_NAME)
66+
dataset.create() # API request
67+
# [END dataset_create]
68+
69+
to_delete.append(dataset)
70+
71+
72+
@snippet
73+
def dataset_exists(client, to_delete):
74+
"""Test existence of a dataset."""
75+
DATASET_NAME = 'dataset_exists_%d' % (_millis(),)
76+
dataset = client.dataset(DATASET_NAME)
77+
to_delete.append(dataset)
78+
79+
# [START dataset_exists]
80+
assert not dataset.exists() # API request
81+
dataset.create() # API request
82+
assert dataset.exists() # API request
83+
# [END dataset_exists]
84+
85+
86+
@snippet
87+
def dataset_reload(client, to_delete):
88+
"""Reload a dataset's metadata."""
89+
DATASET_NAME = 'dataset_reload_%d' % (_millis(),)
90+
ORIGINAL_DESCRIPTION = 'Original description'
91+
LOCALLY_CHANGED_DESCRIPTION = 'Locally-changed description'
92+
dataset = client.dataset(DATASET_NAME)
93+
dataset.description = ORIGINAL_DESCRIPTION
94+
dataset.create()
95+
to_delete.append(dataset)
96+
97+
# [START dataset_reload]
98+
assert dataset.description == ORIGINAL_DESCRIPTION
99+
dataset.description = LOCALLY_CHANGED_DESCRIPTION
100+
assert dataset.description == LOCALLY_CHANGED_DESCRIPTION
101+
dataset.reload() # API request
102+
assert dataset.description == ORIGINAL_DESCRIPTION
103+
# [END dataset_reload]
104+
105+
106+
@snippet
107+
def dataset_patch(client, to_delete):
108+
"""Patch a dataset's metadata."""
109+
DATASET_NAME = 'dataset_patch_%d' % (_millis(),)
110+
ORIGINAL_DESCRIPTION = 'Original description'
111+
PATCHED_DESCRIPTION = 'Patched description'
112+
dataset = client.dataset(DATASET_NAME)
113+
dataset.description = ORIGINAL_DESCRIPTION
114+
dataset.create()
115+
to_delete.append(dataset)
116+
117+
# [START dataset_patch]
118+
ONE_DAY_MS = 24 * 60 * 60 * 1000
119+
assert dataset.description == ORIGINAL_DESCRIPTION
120+
dataset.patch(
121+
description=PATCHED_DESCRIPTION,
122+
default_table_expiration_ms=ONE_DAY_MS
123+
) # API request
124+
assert dataset.description == PATCHED_DESCRIPTION
125+
assert dataset.default_table_expiration_ms == ONE_DAY_MS
126+
# [END dataset_patch]
127+
128+
129+
@snippet
130+
def dataset_update(client, to_delete):
131+
"""Update a dataset's metadata."""
132+
DATASET_NAME = 'dataset_update_%d' % (_millis(),)
133+
ORIGINAL_DESCRIPTION = 'Original description'
134+
UPDATED_DESCRIPTION = 'Updated description'
135+
dataset = client.dataset(DATASET_NAME)
136+
dataset.description = ORIGINAL_DESCRIPTION
137+
dataset.create()
138+
to_delete.append(dataset)
139+
dataset.reload()
140+
141+
# [START dataset_update]
142+
from gcloud.bigquery import AccessGrant
143+
assert dataset.description == ORIGINAL_DESCRIPTION
144+
assert dataset.default_table_expiration_ms is None
145+
grant = AccessGrant(
146+
role='READER', entity_type='domain', entity_id='example.com')
147+
assert grant not in dataset.access_grants
148+
ONE_DAY_MS = 24 * 60 * 60 * 1000
149+
dataset.description = UPDATED_DESCRIPTION
150+
dataset.default_table_expiration_ms = ONE_DAY_MS
151+
grants = list(dataset.access_grants)
152+
grants.append(grant)
153+
dataset.access_grants = grants
154+
dataset.update() # API request
155+
assert dataset.description == UPDATED_DESCRIPTION
156+
assert dataset.default_table_expiration_ms == ONE_DAY_MS
157+
assert grant in dataset.access_grants
158+
# [END dataset_update]
159+
160+
161+
@snippet
162+
def dataset_delete(client, to_delete): # pylint: disable=unused-argument
163+
"""Delete a dataset."""
164+
DATASET_NAME = 'dataset_delete_%d' % (_millis(),)
165+
dataset = client.dataset(DATASET_NAME)
166+
dataset.create()
167+
168+
# [START dataset_delete]
169+
assert dataset.exists() # API request
170+
dataset.delete()
171+
assert not dataset.exists() # API request
172+
# [END dataset_delete]
173+
174+
175+
def _find_examples():
176+
funcs = [obj for obj in globals().values()
177+
if getattr(obj, '_snippet', False)]
178+
for func in sorted(funcs, key=lambda f: f.func_code.co_firstlineno):
179+
yield func
180+
181+
182+
def main():
183+
client = Client()
184+
for example in _find_examples():
185+
to_delete = []
186+
print('%-25s: %s' % (
187+
example.func_name, example.func_doc))
188+
try:
189+
example(client, to_delete)
190+
except AssertionError as e:
191+
print(' FAIL: %s' % (e,))
192+
except Exception as e: # pylint: disable=broad-except
193+
print(' ERROR: %r' % (e,))
194+
for item in to_delete:
195+
item.delete()
196+
197+
if __name__ == '__main__':
198+
main()

docs/pubsub_snippets.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
of :class:`gcloud.pubsub.client.Client`) and uses it to perform a task with
1919
the API.
2020
21-
To facility running the examples as system tests, each example is also passed
21+
To facilitate running the examples as system tests, each example is also passed
2222
a ``to_delete`` list; the function adds to the list any objects created which
2323
need to be deleted during teardown.
2424
"""

0 commit comments

Comments
 (0)