Skip to content

Commit 098e65e

Browse files
authored
Merge pull request googleapis#3 from GoogleCloudPlatform/master
Rebase after squash
2 parents 43464b7 + aed487d commit 098e65e

108 files changed

Lines changed: 13282 additions & 3065 deletions

File tree

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

.coveragerc

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
[report]
22
omit =
33
*/_generated/*.py
4+
*/_generated_v2/*.py
45
show_missing = True
56
exclude_lines =
67
# Re-enable the standard pragma

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -57,3 +57,4 @@ scripts/pylintrc_reduced
5757
generated_python/
5858
cloud-bigtable-client/
5959
googleapis-pb/
60+
grpc_python_venv/

.travis.yml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,8 @@ deploy:
2727
repo: GoogleCloudPlatform/gcloud-python
2828
# until this is fixed: https://github.com/travis-ci/travis-ci/issues/1675
2929
all_branches: true
30-
distributions: "sdist bdist_wheel"
30+
# 'bdist_wheel' builds disabled until #1879 et al. are resolved.
31+
distributions: "sdist"
3132

3233
cache:
3334
directories:

LICENSE

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
1-
Apache License
1+
2+
Apache License
23
Version 2.0, January 2004
34
http://www.apache.org/licenses/
45

@@ -178,7 +179,7 @@ Apache License
178179
APPENDIX: How to apply the Apache License to your work.
179180

180181
To apply the Apache License to your work, attach the following
181-
boilerplate notice, with the fields enclosed by brackets "{}"
182+
boilerplate notice, with the fields enclosed by brackets "[]"
182183
replaced with your own identifying information. (Don't include
183184
the brackets!) The text should be enclosed in the appropriate
184185
comment syntax for the file format. We also recommend that a
@@ -199,4 +200,3 @@ Apache License
199200
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200201
See the License for the specific language governing permissions and
201202
limitations under the License.
202-
Lines changed: 24 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,11 @@
1+
GRPCIO_VIRTUALENV=$(shell pwd)/grpc_python_venv
12
GENERATED_DIR=$(shell pwd)/generated_python
2-
BIGTABLE_DIR=$(shell pwd)/gcloud/bigtable/_generated
3-
DATASTORE_DIR=$(shell pwd)/gcloud/datastore/_generated
4-
GRPC_PLUGIN=grpc_python_plugin
5-
PROTOC_CMD=protoc
6-
BIGTABLE_PROTOS_DIR=$(shell pwd)/cloud-bigtable-client/bigtable-protos/src/main/proto
3+
GENERATED_SUBDIR=_generated
4+
BIGTABLE_DIR=$(shell pwd)/gcloud/bigtable/$(GENERATED_SUBDIR)
5+
PROTOC_CMD=$(GRPCIO_VIRTUALENV)/bin/python -m grpc.tools.protoc
76
GOOGLEAPIS_PROTOS_DIR=$(shell pwd)/googleapis-pb
7+
BIGTABLE_CHECKOUT_DIR=$(shell pwd)/cloud-bigtable-client
8+
BIGTABLE_PROTOS_DIR=$(BIGTABLE_CHECKOUT_DIR)/bigtable-client-core-parent/bigtable-protos/src/main/proto
89

910
help:
1011
@echo 'Makefile for gcloud-python Bigtable protos '
@@ -14,19 +15,22 @@ help:
1415
@echo ' make clean Clean generated files '
1516

1617
generate:
18+
# Ensure we have a virtualenv w/ up-to-date grpcio/grpcio-tools
19+
[ -d $(GRPCIO_VIRTUALENV) ] || python2.7 -m virtualenv $(GRPCIO_VIRTUALENV)
20+
$(GRPCIO_VIRTUALENV)/bin/pip install --upgrade grpcio grpcio-tools
1721
# Retrieve git repos that have our *.proto files.
18-
[ -d cloud-bigtable-client ] || git clone https://github.com/GoogleCloudPlatform/cloud-bigtable-client --depth=1
19-
cd cloud-bigtable-client && git pull origin master
20-
[ -d googleapis-pb ] || git clone https://github.com/google/googleapis googleapis-pb --depth=1
21-
cd googleapis-pb && git pull origin master
22+
[ -d $(BIGTABLE_CHECKOUT_DIR) ] || git clone https://github.com/GoogleCloudPlatform/cloud-bigtable-client --depth=1
23+
cd $(BIGTABLE_CHECKOUT_DIR) && git pull origin master
24+
[ -d $(GOOGLEAPIS_PROTOS_DIR) ] || git clone https://github.com/google/googleapis googleapis-pb --depth=1
25+
cd $(GOOGLEAPIS_PROTOS_DIR) && git pull origin master
2226
# Make the directory where our *_pb2.py files will go.
2327
mkdir -p $(GENERATED_DIR)
2428
# Generate all *_pb2.py files that require gRPC.
2529
$(PROTOC_CMD) \
2630
--proto_path=$(BIGTABLE_PROTOS_DIR) \
31+
--proto_path=$(GOOGLEAPIS_PROTOS_DIR) \
2732
--python_out=$(GENERATED_DIR) \
28-
--plugin=protoc-gen-grpc=$(GRPC_PLUGIN) \
29-
--grpc_out=$(GENERATED_DIR) \
33+
--grpc_python_out=$(GENERATED_DIR) \
3034
$(BIGTABLE_PROTOS_DIR)/google/bigtable/v1/bigtable_service.proto \
3135
$(BIGTABLE_PROTOS_DIR)/google/bigtable/admin/cluster/v1/bigtable_cluster_service.proto \
3236
$(BIGTABLE_PROTOS_DIR)/google/bigtable/admin/table/v1/bigtable_table_service.proto
@@ -41,49 +45,37 @@ generate:
4145
$(BIGTABLE_PROTOS_DIR)/google/bigtable/admin/cluster/v1/bigtable_cluster_service_messages.proto \
4246
$(BIGTABLE_PROTOS_DIR)/google/bigtable/admin/table/v1/bigtable_table_data.proto \
4347
$(BIGTABLE_PROTOS_DIR)/google/bigtable/admin/table/v1/bigtable_table_service_messages.proto \
44-
$(GOOGLEAPIS_PROTOS_DIR)/google/datastore/v1beta3/datastore.proto \
45-
$(GOOGLEAPIS_PROTOS_DIR)/google/datastore/v1beta3/entity.proto \
46-
$(GOOGLEAPIS_PROTOS_DIR)/google/datastore/v1beta3/query.proto
4748
# Move the newly generated *_pb2.py files into our library.
48-
mv $(GENERATED_DIR)/google/bigtable/v1/* $(BIGTABLE_DIR)
49-
mv $(GENERATED_DIR)/google/bigtable/admin/cluster/v1/* $(BIGTABLE_DIR)
50-
mv $(GENERATED_DIR)/google/bigtable/admin/table/v1/* $(BIGTABLE_DIR)
51-
mv $(GENERATED_DIR)/google/datastore/v1beta3/* $(DATASTORE_DIR)
49+
cp $(GENERATED_DIR)/google/bigtable/v1/* $(BIGTABLE_DIR)
50+
cp $(GENERATED_DIR)/google/bigtable/admin/cluster/v1/* $(BIGTABLE_DIR)
51+
cp $(GENERATED_DIR)/google/bigtable/admin/table/v1/* $(BIGTABLE_DIR)
5252
# Remove all existing *.proto files before we replace
5353
rm -f $(BIGTABLE_DIR)/*.proto
54-
rm -f $(DATASTORE_DIR)/*.proto
5554
# Copy over the *.proto files into our library.
5655
cp $(BIGTABLE_PROTOS_DIR)/google/bigtable/v1/*.proto $(BIGTABLE_DIR)
5756
cp $(BIGTABLE_PROTOS_DIR)/google/bigtable/admin/cluster/v1/*.proto $(BIGTABLE_DIR)
5857
cp $(BIGTABLE_PROTOS_DIR)/google/bigtable/admin/table/v1/*.proto $(BIGTABLE_DIR)
59-
cp $(BIGTABLE_PROTOS_DIR)/google/longrunning/operations.proto $(BIGTABLE_DIR)
60-
cp $(GOOGLEAPIS_PROTOS_DIR)/google/datastore/v1beta3/*.proto $(DATASTORE_DIR)
58+
cp $(GOOGLEAPIS_PROTOS_DIR)/google/longrunning/operations.proto $(BIGTABLE_DIR)
6159
# Rename all *.proto files in our library with an
6260
# underscore and remove executable bit.
6361
cd $(BIGTABLE_DIR) && \
6462
for filename in *.proto; do \
6563
chmod -x $$filename ; \
6664
mv $$filename _$$filename ; \
6765
done
68-
cd $(DATASTORE_DIR) && \
69-
for filename in *.proto; do \
70-
chmod -x $$filename ; \
71-
mv $$filename _$$filename ; \
72-
done
7366
# Separate the gRPC parts of the operations service from the
7467
# non-gRPC parts so that the protos from `googleapis-common-protos`
7568
# can be used without gRPC.
76-
python scripts/make_operations_grpc.py
77-
# Separate the gRPC parts of the datastore service from the
78-
# non-gRPC parts so that the protos can be used without gRPC.
79-
python scripts/make_datastore_grpc.py
69+
GRPCIO_VIRTUALENV="$(GRPCIO_VIRTUALENV)" \
70+
GENERATED_SUBDIR=$(GENERATED_SUBDIR) \
71+
python scripts/make_operations_grpc.py
8072
# Rewrite the imports in the generated *_pb2.py files.
81-
python scripts/rewrite_imports.py
73+
python scripts/rewrite_imports.py $(BIGTABLE_DIR)/*pb2.py
8274

8375
check_generate:
8476
python scripts/check_generate.py
8577

8678
clean:
87-
rm -fr cloud-bigtable-client $(GENERATED_DIR)
79+
rm -fr $(GRPCIO_VIRTUALENV) $(GOOGLEAPIS_PROTOS_DIR) $(GENERATED_DIR)
8880

8981
.PHONY: generate check_generate clean

Makefile.bigtable_v2

Lines changed: 71 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,71 @@
1+
GRPCIO_VIRTUALENV=$(shell pwd)/grpc_python_venv
2+
GENERATED_DIR=$(shell pwd)/generated_python
3+
GENERATED_SUBDIR=_generated_v2
4+
BIGTABLE_DIR=$(shell pwd)/gcloud/bigtable/$(GENERATED_SUBDIR)
5+
PROTOC_CMD=$(GRPCIO_VIRTUALENV)/bin/python -m grpc.tools.protoc
6+
GOOGLEAPIS_PROTOS_DIR=$(shell pwd)/googleapis-pb
7+
8+
help:
9+
@echo 'Makefile for gcloud-python Bigtable protos '
10+
@echo ' '
11+
@echo ' make generate Generates the protobuf modules '
12+
@echo ' make check_generate Checks that generate succeeded '
13+
@echo ' make clean Clean generated files '
14+
15+
generate:
16+
# Ensure we have a virtualenv w/ up-to-date grpcio/grpcio-tools
17+
[ -d $(GRPCIO_VIRTUALENV) ] || python2.7 -m virtualenv $(GRPCIO_VIRTUALENV)
18+
$(GRPCIO_VIRTUALENV)/bin/pip install --upgrade grpcio grpcio-tools
19+
# Retrieve git repos that have our *.proto files.
20+
[ -d googleapis-pb ] || git clone https://github.com/google/googleapis googleapis-pb --depth=1
21+
cd googleapis-pb && git pull origin master
22+
# Make the directory where our *_pb2.py files will go.
23+
mkdir -p $(GENERATED_DIR)
24+
# Generate all *_pb2.py files that require gRPC.
25+
$(PROTOC_CMD) \
26+
--proto_path=$(GOOGLEAPIS_PROTOS_DIR) \
27+
--python_out=$(GENERATED_DIR) \
28+
--grpc_python_out=$(GENERATED_DIR) \
29+
$(GOOGLEAPIS_PROTOS_DIR)/google/bigtable/v2/bigtable.proto \
30+
$(GOOGLEAPIS_PROTOS_DIR)/google/bigtable/admin/v2/bigtable_instance_admin.proto \
31+
$(GOOGLEAPIS_PROTOS_DIR)/google/bigtable/admin/v2/bigtable_table_admin.proto
32+
# Generate all *_pb2.py files that do not require gRPC.
33+
$(PROTOC_CMD) \
34+
--proto_path=$(GOOGLEAPIS_PROTOS_DIR) \
35+
--python_out=$(GENERATED_DIR) \
36+
$(GOOGLEAPIS_PROTOS_DIR)/google/bigtable/v2/data.proto \
37+
$(GOOGLEAPIS_PROTOS_DIR)/google/bigtable/admin/v2/common.proto \
38+
$(GOOGLEAPIS_PROTOS_DIR)/google/bigtable/admin/v2/instance.proto \
39+
$(GOOGLEAPIS_PROTOS_DIR)/google/bigtable/admin/v2/table.proto \
40+
# Move the newly generated *_pb2.py files into our library.
41+
cp $(GENERATED_DIR)/google/bigtable/v2/* $(BIGTABLE_DIR)
42+
cp $(GENERATED_DIR)/google/bigtable/admin/v2/* $(BIGTABLE_DIR)
43+
# Remove all existing *.proto files before we replace
44+
rm -f $(BIGTABLE_DIR)/*.proto
45+
# Copy over the *.proto files into our library.
46+
cp $(GOOGLEAPIS_PROTOS_DIR)/google/bigtable/v2/*.proto $(BIGTABLE_DIR)
47+
cp $(GOOGLEAPIS_PROTOS_DIR)/google/bigtable/admin/v2/*.proto $(BIGTABLE_DIR)
48+
cp $(GOOGLEAPIS_PROTOS_DIR)/google/longrunning/operations.proto $(BIGTABLE_DIR)
49+
# Rename all *.proto files in our library with an
50+
# underscore and remove executable bit.
51+
cd $(BIGTABLE_DIR) && \
52+
for filename in *.proto; do \
53+
chmod -x $$filename ; \
54+
mv $$filename _$$filename ; \
55+
done
56+
# Separate the gRPC parts of the operations service from the
57+
# non-gRPC parts so that the protos from `googleapis-common-protos`
58+
# can be used without gRPC.
59+
GRPCIO_VIRTUALENV="$(GRPCIO_VIRTUALENV)" \
60+
GENERATED_SUBDIR=$(GENERATED_SUBDIR) \
61+
python scripts/make_operations_grpc.py
62+
# Rewrite the imports in the generated *_pb2.py files.
63+
python scripts/rewrite_imports.py $(BIGTABLE_DIR)/*pb2.py
64+
65+
check_generate:
66+
python scripts/check_generate.py
67+
68+
clean:
69+
rm -fr $(GRPCIO_VIRTUALENV) $(GOOGLEAPIS_PROTOS_DIR) $(GENERATED_DIR)
70+
71+
.PHONY: generate check_generate clean

Makefile.datastore

Lines changed: 57 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,57 @@
1+
GRPCIO_VIRTUALENV=$(shell pwd)/grpc_python_venv
2+
GENERATED_DIR=$(shell pwd)/generated_python
3+
DATASTORE_DIR=$(shell pwd)/gcloud/datastore/_generated
4+
PROTOC_CMD=$(GRPCIO_VIRTUALENV)/bin/python -m grpc.tools.protoc
5+
GOOGLEAPIS_PROTOS_DIR=$(shell pwd)/googleapis-pb
6+
7+
help:
8+
@echo 'Makefile for gcloud-python Bigtable protos '
9+
@echo ' '
10+
@echo ' make generate Generates the protobuf modules '
11+
@echo ' make check_generate Checks that generate succeeded '
12+
@echo ' make clean Clean generated files '
13+
14+
generate:
15+
# Ensure we have a virtualenv w/ up-to-date grpcio/grpcio-tools
16+
[ -d $(GRPCIO_VIRTUALENV) ] || python2.7 -m virtualenv $(GRPCIO_VIRTUALENV)
17+
$(GRPCIO_VIRTUALENV)/bin/pip install --upgrade grpcio grpcio-tools
18+
# Retrieve git repos that have our *.proto files.
19+
[ -d googleapis-pb ] || git clone https://github.com/google/googleapis googleapis-pb --depth=1
20+
cd googleapis-pb && git pull origin master
21+
# Make the directory where our *_pb2.py files will go.
22+
mkdir -p $(GENERATED_DIR)
23+
# Generate all *_pb2.py files that do not require gRPC.
24+
$(PROTOC_CMD) \
25+
--proto_path=$(GOOGLEAPIS_PROTOS_DIR) \
26+
--python_out=$(GENERATED_DIR) \
27+
$(GOOGLEAPIS_PROTOS_DIR)/google/datastore/v1beta3/datastore.proto \
28+
$(GOOGLEAPIS_PROTOS_DIR)/google/datastore/v1beta3/entity.proto \
29+
$(GOOGLEAPIS_PROTOS_DIR)/google/datastore/v1beta3/query.proto
30+
# Move the newly generated *_pb2.py files into our library.
31+
cp $(GENERATED_DIR)/google/datastore/v1beta3/* $(DATASTORE_DIR)
32+
# Remove all existing *.proto files before we replace
33+
rm -f $(DATASTORE_DIR)/*.proto
34+
# Copy over the *.proto files into our library.
35+
cp $(GOOGLEAPIS_PROTOS_DIR)/google/datastore/v1beta3/*.proto $(DATASTORE_DIR)
36+
# Rename all *.proto files in our library with an
37+
# underscore and remove executable bit.
38+
cd $(DATASTORE_DIR) && \
39+
for filename in *.proto; do \
40+
chmod -x $$filename ; \
41+
mv $$filename _$$filename ; \
42+
done
43+
# Separate the gRPC parts of the datastore service from the
44+
# non-gRPC parts so that the protos can be used without gRPC.
45+
GRPCIO_VIRTUALENV="$(GRPCIO_VIRTUALENV)" \
46+
GENERATED_SUBDIR=$(GENERATED_SUBDIR) \
47+
python scripts/make_datastore_grpc.py
48+
# Rewrite the imports in the generated *_pb2.py files.
49+
python scripts/rewrite_imports.py $(DATASTORE_DIR)/*pb2.py
50+
51+
check_generate:
52+
python scripts/check_generate.py
53+
54+
clean:
55+
rm -fr $(GENERATED_DIR)
56+
57+
.PHONY: generate check_generate clean

README.rst

Lines changed: 74 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,12 +20,14 @@ This client supports the following Google Cloud Platform services:
2020
- `Google Cloud Pub/Sub`_
2121
- `Google BigQuery`_
2222
- `Google Cloud Resource Manager`_
23+
- `Google Stackdriver Logging`_
2324

2425
.. _Google Cloud Datastore: https://github.com/GoogleCloudPlatform/gcloud-python#google-cloud-datastore
2526
.. _Google Cloud Storage: https://github.com/GoogleCloudPlatform/gcloud-python#google-cloud-storage
2627
.. _Google Cloud Pub/Sub: https://github.com/GoogleCloudPlatform/gcloud-python#google-cloud-pubsub
2728
.. _Google BigQuery: https://github.com/GoogleCloudPlatform/gcloud-python#google-bigquery
2829
.. _Google Cloud Resource Manager: https://github.com/GoogleCloudPlatform/gcloud-python#google-cloud-resource-manager
30+
.. _Google Stackdriver Logging: https://github.com/GoogleCloudPlatform/gcloud-python#google-cloud-logging
2931

3032
If you need support for other Google APIs, check out the
3133
`Google APIs Python Client library`_.
@@ -174,6 +176,49 @@ append-only tables, using the processing power of Google's infrastructure.
174176

175177
This package is still being implemented, but it is almost complete!
176178

179+
Load data from CSV
180+
~~~~~~~~~~~~~~~~~~
181+
182+
.. code:: python
183+
184+
import csv
185+
186+
from gcloud import bigquery
187+
from gcloud.bigquery import SchemaField
188+
189+
client = bigquery.Client()
190+
191+
dataset = client.dataset('dataset_name')
192+
dataset.create() # API request
193+
194+
SCHEMA = [
195+
SchemaField('full_name', 'STRING', mode='required'),
196+
SchemaField('age', 'INTEGER', mode='required'),
197+
]
198+
table = dataset.table('table_name', SCHEMA)
199+
table.create()
200+
201+
with open('csv_file', 'rb') as readable:
202+
table.upload_from_file(
203+
readable, source_format='CSV', skip_leading_rows=1)
204+
205+
Perform a synchronous query
206+
~~~~~~~~~~~~~~~~~~~~~~~~~~~
207+
208+
.. code:: python
209+
210+
# Perform a synchronous query.
211+
QUERY = (
212+
'SELECT name FROM [bigquery-public-data:usa_names.usa_1910_2013] '
213+
'WHERE state = "TX"')
214+
query = client.run_sync_query('%s LIMIT 100' % QUERY)
215+
query.timeout_ms = TIMEOUT_MS
216+
query.run()
217+
218+
for row in query.rows:
219+
print row
220+
221+
177222
See the ``gcloud-python`` API `BigQuery documentation`_ to learn how to connect
178223
to BigQuery using this Client Library.
179224

@@ -194,6 +239,35 @@ manage projects using this Client Library.
194239

195240
.. _Resource Manager documentation: https://googlecloudplatform.github.io/gcloud-python/stable/resource-manager-api.html
196241

242+
Google Stackdriver Logging
243+
--------------------------
244+
245+
`Stackdriver Logging`_ API (`Logging API docs`_) allows you to store, search,
246+
analyze, monitor, and alert on log data and events from Google Cloud Platform.
247+
248+
.. _Stackdriver Logging: https://cloud.google.com/logging/
249+
.. _Logging API docs: https://cloud.google.com/logging/docs/
250+
251+
.. code:: python
252+
253+
from gcloud import logging
254+
client = logging.Client()
255+
logger = client.logger('log_name')
256+
logger.log_text("A simple entry") # API call
257+
258+
Example of fetching entries:
259+
260+
.. code:: python
261+
262+
entries, token = logger.list_entries()
263+
for entry in entries:
264+
print entry.payload
265+
266+
See the ``gcloud-python`` API `logging documentation`_ to learn how to connect
267+
to Stackdriver Logging using this Client Library.
268+
269+
.. _logging documentation: https://googlecloudplatform.github.io/gcloud-python/stable/logging-usage.html
270+
197271
Contributing
198272
------------
199273

0 commit comments

Comments
 (0)