1818Query objects rather than via protobufs.
1919"""
2020
21- import collections
22-
2321from gcloud .datastore import _implicit_environ
22+ from gcloud .datastore .batch import _BATCHES
23+ from gcloud .datastore .batch import Batch
2424from gcloud .datastore import helpers
2525
2626
@@ -60,12 +60,33 @@ def _require_connection(connection=None):
6060 return connection
6161
6262
63- def get (key_or_keys , missing = None , deferred = None , connection = None ):
63+ def _get_dataset_id_from_keys (keys ):
64+ """Determines dataset ID from a list of keys.
65+
66+ :type keys: list of :class:`gcloud.datastore.key.Key`
67+ :param keys: The keys from the same dataset.
68+
69+ :rtype: string
70+ :returns: The dataset ID of the keys.
71+ :raises: :class:`ValueError` if the key dataset IDs don't agree.
72+ """
73+ dataset_id = keys [0 ].dataset_id
74+ # Rather than creating a list or set of all dataset IDs, we iterate
75+ # and check. We could allow the backend to check this for us if IDs
76+ # with no prefix worked (GoogleCloudPlatform/google-cloud-datastore#59)
77+ # or if we made sure that a prefix s~ or e~ was on each key.
78+ for key in keys [1 :]:
79+ if key .dataset_id != dataset_id :
80+ raise ValueError ('All keys in get must be from the same dataset.' )
81+
82+ return dataset_id
83+
84+
85+ def get (keys , missing = None , deferred = None , connection = None ):
6486 """Retrieves entities, along with their attributes.
6587
66- :type key_or_keys: list of :class:`gcloud.datastore.key.Key` or single
67- :class:`gcloud.datastore.key.Key`
68- :param key_or_keys: The key or keys to be retrieved from the datastore.
88+ :type keys: list of :class:`gcloud.datastore.key.Key`
89+ :param keys: The keys to be retrieved from the datastore.
6990
7091 :type missing: an empty list or None.
7192 :param missing: If a list is passed, the key-only entities returned
@@ -80,27 +101,14 @@ def get(key_or_keys, missing=None, deferred=None, connection=None):
80101 :type connection: :class:`gcloud.datastore.connection.Connection`
81102 :param connection: Optional. The connection used to connect to datastore.
82103
83- :rtype: list of :class:`gcloud.datastore.entity.Entity`, single
84- :class:`gcloud.datastore.entity.Entity`, or ``NoneType``
85- :returns: The requested entities, or single entity.
104+ :rtype: list of :class:`gcloud.datastore.entity.Entity`
105+ :returns: The requested entities.
86106 """
87- if isinstance (key_or_keys , collections .Iterable ):
88- keys = key_or_keys
89- else :
90- keys = [key_or_keys ]
91-
92107 if not keys :
93108 return []
94109
95110 connection = _require_connection (connection )
96- dataset_id = keys [0 ].dataset_id
97- # Rather than creating a list or set of all dataset IDs, we iterate
98- # and check. We could allow the backend to check this for us if IDs
99- # with no prefix worked (GoogleCloudPlatform/google-cloud-datastore#59)
100- # or if we made sure that a prefix s~ or e~ was on each key.
101- for key in keys [1 :]:
102- if key .dataset_id != dataset_id :
103- raise ValueError ('All keys in get must be from the same dataset.' )
111+ dataset_id = _get_dataset_id_from_keys (keys )
104112
105113 entity_pbs = connection .lookup (
106114 dataset_id = dataset_id ,
@@ -122,11 +130,33 @@ def get(key_or_keys, missing=None, deferred=None, connection=None):
122130 for entity_pb in entity_pbs :
123131 entities .append (helpers .entity_from_protobuf (entity_pb ))
124132
125- if keys is key_or_keys :
126- return entities
127- else :
128- if entities :
129- return entities [0 ]
133+ return entities
134+
135+
136+ def delete (keys , connection = None ):
137+ """Delete the keys in the Cloud Datastore.
138+
139+ :type keys: list of :class:`gcloud.datastore.key.Key`
140+ :param keys: The keys to be deleted from the datastore.
141+
142+ :type connection: :class:`gcloud.datastore.connection.Connection`
143+ :param connection: Optional connection used to connect to datastore.
144+ """
145+ if not keys :
146+ return
147+
148+ connection = connection or _implicit_environ .CONNECTION
149+
150+ # We allow partial keys to attempt a delete, the backend will fail.
151+ current = _BATCHES .top
152+ in_batch = current is not None
153+ if not in_batch :
154+ dataset_id = _get_dataset_id_from_keys (keys )
155+ current = Batch (dataset_id = dataset_id , connection = connection )
156+ for key in keys :
157+ current .delete (key )
158+ if not in_batch :
159+ current .commit ()
130160
131161
132162def allocate_ids (incomplete_key , num_ids , connection = None ):
0 commit comments