Skip to content

Commit 0b23231

Browse files
authored
Add sample to schedule query with BQ DTS. (googleapis#7703)
* Add semi-generated sample for BQ DTS. * Add authorization_code as parameter. * Give scheduled query example a shorter filename. * Add test for create scheduled query sample. * Create dataset in sample tests. This ensures the user (or service account) running the tests has owner permissions, which is required to run scheduled queries. * Add samples nox session.
1 parent cd57e65 commit 0b23231

7 files changed

Lines changed: 201 additions & 2 deletions

File tree

bigquery_datatransfer/noxfile.py

Lines changed: 20 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ def blacken(session):
4646
"""Run black.
4747
4848
Format code to uniform standard.
49-
49+
5050
This currently uses Python 3.6 due to the automated Kokoro run of synthtool.
5151
That run uses an image that doesn't have 3.6 installed. Before updating this
5252
check the state of the `gcp_ubuntu_config` we use for that Kokoro run.
@@ -123,6 +123,25 @@ def system(session):
123123
session.run("py.test", "--quiet", system_test_folder_path, *session.posargs)
124124

125125

126+
@nox.session(python=["2.7", "3.7"])
127+
def samples(session):
128+
requirements_path = os.path.join("samples", "requirements.txt")
129+
requirements_exists = os.path.exists(requirements_path)
130+
131+
# Sanity check: Only run tests if the environment variable is set.
132+
if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""):
133+
session.skip("Credentials must be set via environment variable")
134+
135+
session.install("mock", "pytest")
136+
for local_dep in LOCAL_DEPS:
137+
session.install("-e", local_dep)
138+
if requirements_exists:
139+
session.install("-r", requirements_path)
140+
session.install("-e", ".")
141+
142+
session.run("py.test", "--quiet", "samples", *session.posargs)
143+
144+
126145
@nox.session(python="3.7")
127146
def cover(session):
128147
"""Run the final coverage report.

bigquery_datatransfer/samples/__init__.py

Whitespace-only changes.
Lines changed: 97 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,97 @@
1+
# -*- coding: utf-8 -*-
2+
#
3+
# Copyright 2019 Google LLC
4+
#
5+
# Licensed under the Apache License, Version 2.0 (the "License");
6+
# you may not use this file except in compliance with the License.
7+
# You may obtain a copy of the License at
8+
#
9+
# https://www.apache.org/licenses/LICENSE-2.0
10+
#
11+
# Unless required by applicable law or agreed to in writing, software
12+
# distributed under the License is distributed on an "AS IS" BASIS,
13+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
# See the License for the specific language governing permissions and
15+
# limitations under the License.
16+
17+
# To install the latest published package dependency, execute the following:
18+
# pip install google-cloud-bigquery-datatransfer
19+
20+
21+
def sample_create_transfer_config(project_id, dataset_id, authorization_code=""):
22+
# [START bigquerydatatransfer_create_scheduled_query]
23+
from google.cloud import bigquery_datatransfer_v1
24+
import google.protobuf.json_format
25+
26+
client = bigquery_datatransfer_v1.DataTransferServiceClient()
27+
28+
# TODO(developer): Set the project_id to the project that contains the
29+
# destination dataset.
30+
# project_id = "your-project-id"
31+
32+
# TODO(developer): Set the destination dataset. The authorized user must
33+
# have owner permissions on the dataset.
34+
# dataset_id = "your_dataset_id"
35+
36+
# TODO(developer): The first time you run this sample, set the
37+
# authorization code to a value from the URL:
38+
# https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=433065040935-hav5fqnc9p9cht3rqneus9115ias2kn1.apps.googleusercontent.com&scope=https://www.googleapis.com/auth/bigquery%20https://www.googleapis.com/auth/drive&redirect_uri=urn:ietf:wg:oauth:2.0:oob
39+
#
40+
# authorization_code = "_4/ABCD-EFGHIJKLMNOP-QRSTUVWXYZ"
41+
#
42+
# You can use an empty string for authorization_code in subsequent runs of
43+
# this code sample with the same credentials.
44+
#
45+
# authorization_code = ""
46+
47+
# Use standard SQL syntax for the query.
48+
query_string = """
49+
SELECT
50+
CURRENT_TIMESTAMP() as current_time,
51+
@run_time as intended_run_time,
52+
@run_date as intended_run_date,
53+
17 as some_integer
54+
"""
55+
56+
parent = client.project_path(project_id)
57+
58+
transfer_config = google.protobuf.json_format.ParseDict(
59+
{
60+
"destination_dataset_id": dataset_id,
61+
"display_name": "Your Scheduled Query Name",
62+
"data_source_id": "scheduled_query",
63+
"params": {
64+
"query": query_string,
65+
"destination_table_name_template": "your_table_{run_date}",
66+
"write_disposition": "WRITE_TRUNCATE",
67+
"partitioning_field": "",
68+
},
69+
"schedule": "every 24 hours",
70+
},
71+
bigquery_datatransfer_v1.types.TransferConfig(),
72+
)
73+
74+
response = client.create_transfer_config(
75+
parent, transfer_config, authorization_code=authorization_code
76+
)
77+
78+
print("Created scheduled query '{}'".format(response.name))
79+
# [END bigquerydatatransfer_create_scheduled_query]
80+
# Return the config name for testing purposes, so that it can be deleted.
81+
return response.name
82+
83+
84+
def main():
85+
import argparse
86+
87+
parser = argparse.ArgumentParser()
88+
parser.add_argument("--project_id", type=str, default="your-project-id")
89+
parser.add_argument("--dataset_id", type=str, default="your_dataset_id")
90+
parser.add_argument("--authorization_code", type=str, default="")
91+
args = parser.parse_args()
92+
93+
sample_create_transfer_config(args.project_id, args.authorization_code)
94+
95+
96+
if __name__ == "__main__":
97+
main()
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
-e ../bigquery

bigquery_datatransfer/samples/tests/__init__.py

Whitespace-only changes.
Lines changed: 82 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,82 @@
1+
# -*- coding: utf-8 -*-
2+
#
3+
# Copyright 2019 Google LLC
4+
#
5+
# Licensed under the Apache License, Version 2.0 (the "License");
6+
# you may not use this file except in compliance with the License.
7+
# You may obtain a copy of the License at
8+
#
9+
# https://www.apache.org/licenses/LICENSE-2.0
10+
#
11+
# Unless required by applicable law or agreed to in writing, software
12+
# distributed under the License is distributed on an "AS IS" BASIS,
13+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
# See the License for the specific language governing permissions and
15+
# limitations under the License.
16+
17+
import time
18+
import os
19+
20+
import google.api_core.exceptions
21+
import google.auth
22+
import google.cloud.bigquery
23+
import pytest
24+
25+
from .. import create_scheduled_query
26+
27+
28+
@pytest.fixture
29+
def project_id():
30+
return os.environ["PROJECT_ID"]
31+
32+
33+
@pytest.fixture(scope="module")
34+
def credentials():
35+
# If using a service account, the BQ DTS robot associated with your project
36+
# requires the roles/iam.serviceAccountShortTermTokenMinter permission to
37+
# act on behalf of the account.
38+
creds, _ = google.auth.default(["https://www.googleapis.com/auth/cloud-platform"])
39+
return creds
40+
41+
42+
@pytest.fixture(scope="module")
43+
def bqdts_client(credentials):
44+
from google.cloud import bigquery_datatransfer_v1
45+
46+
return bigquery_datatransfer_v1.DataTransferServiceClient(credentials=credentials)
47+
48+
49+
@pytest.fixture(scope="module")
50+
def bigquery_client(credentials):
51+
return google.cloud.bigquery.Client(credentials=credentials)
52+
53+
54+
@pytest.fixture(scope="module")
55+
def dataset_id(bigquery_client):
56+
# Ensure the test account has owner permissions on the dataset by creating
57+
# one from scratch.
58+
temp_ds_id = "bqdts_{}".format(int(time.clock() * 1000000))
59+
bigquery_client.create_dataset(temp_ds_id)
60+
yield temp_ds_id
61+
bigquery_client.delete_dataset(temp_ds_id)
62+
63+
64+
@pytest.fixture
65+
def to_delete(bqdts_client):
66+
doomed = []
67+
yield doomed
68+
69+
for resource_name in doomed:
70+
try:
71+
bqdts_client.delete_transfer_config(resource_name)
72+
except google.api_core.exceptions.NotFound:
73+
pass
74+
75+
76+
def test_sample(project_id, dataset_id, capsys, to_delete):
77+
config_name = create_scheduled_query.sample_create_transfer_config(
78+
project_id, dataset_id
79+
)
80+
to_delete.append(config_name)
81+
out, err = capsys.readouterr()
82+
assert config_name in out

bigquery_datatransfer/synth.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,7 @@
6262
# ----------------------------------------------------------------------------
6363
# Add templated files
6464
# ----------------------------------------------------------------------------
65-
templated_files = common.py_library(unit_cov_level=79, cov_level=79)
65+
templated_files = common.py_library(unit_cov_level=79, cov_level=79, samples_test=True)
6666
s.move(templated_files)
6767

6868
s.shell.run(["nox", "-s", "blacken"], hide_output=False)

0 commit comments

Comments
 (0)