55from gcloud import datastore
66# This assumes the command is being run via tox hence the
77# repository root is the current directory.
8+ from regression import populate_datastore
89from regression import regression_utils
910
1011
1112class TestDatastore (unittest2 .TestCase ):
1213
13- def setUp (self ):
14- environ = regression_utils .get_environ ()
15- self ._dataset_id = environ ['dataset_id' ]
16- self ._client_email = environ ['client_email' ]
17- self ._key_filename = environ ['key_filename' ]
18- self ._datasets = {}
14+ @classmethod
15+ def setUpClass (cls ):
16+ cls .dataset = regression_utils .get_dataset ()
1917
20- self .entities_to_delete = []
18+ def setUp (self ):
19+ self .case_entities_to_delete = []
2120
2221 def tearDown (self ):
23- for entity in self .entities_to_delete :
24- entity .delete ()
22+ with self .dataset .transaction ():
23+ for entity in self .case_entities_to_delete :
24+ entity .delete ()
25+
2526
26- def _get_dataset (self ):
27- if self ._dataset_id not in self ._datasets :
28- self ._datasets [self ._dataset_id ] = datastore .get_dataset (
29- self ._dataset_id , self ._client_email , self ._key_filename )
30- return self ._datasets [self ._dataset_id ]
27+ class TestDatastoreSave (TestDatastore ):
3128
3229 def _get_post (self , name = None , key_id = None , post_content = None ):
3330 post_content = post_content or {
@@ -40,8 +37,7 @@ def _get_post(self, name=None, key_id=None, post_content=None):
4037 'rating' : 5.0 ,
4138 }
4239 # Create an entity with the given content in our dataset.
43- dataset = self ._get_dataset ()
44- entity = dataset .entity (kind = 'Post' )
40+ entity = self .dataset .entity (kind = 'Post' )
4541 entity .update (post_content )
4642
4743 # Update the entity key.
@@ -60,16 +56,18 @@ def _generic_test_post(self, name=None, key_id=None):
6056 entity .save ()
6157
6258 # Register entity to be deleted.
63- self .entities_to_delete .append (entity )
59+ self .case_entities_to_delete .append (entity )
6460
6561 if name is not None :
6662 self .assertEqual (entity .key ().name (), name )
6763 if key_id is not None :
6864 self .assertEqual (entity .key ().id (), key_id )
69- retrieved_entity = self ._get_dataset () .get_entity (entity .key ())
65+ retrieved_entity = self .dataset .get_entity (entity .key ())
7066 # Check the keys are the same.
71- self .assertEqual (retrieved_entity .key ().path (),
72- entity .key ().path ())
67+ self .assertEqual (retrieved_entity .key ().path (), entity .key ().path ())
68+ self .assertEqual (retrieved_entity .key ().namespace (),
69+ entity .key ().namespace ())
70+
7371 # Check the data is the same.
7472 retrieved_dict = dict (retrieved_entity .items ())
7573 entity_dict = dict (entity .items ())
@@ -85,12 +83,11 @@ def test_post_with_generated_id(self):
8583 self ._generic_test_post ()
8684
8785 def test_save_multiple (self ):
88- dataset = self ._get_dataset ()
89- with dataset .transaction ():
86+ with self .dataset .transaction ():
9087 entity1 = self ._get_post ()
9188 entity1 .save ()
9289 # Register entity to be deleted.
93- self .entities_to_delete .append (entity1 )
90+ self .case_entities_to_delete .append (entity1 )
9491
9592 second_post_content = {
9693 'title' : 'How to make the perfect homemade pasta' ,
@@ -104,12 +101,188 @@ def test_save_multiple(self):
104101 entity2 = self ._get_post (post_content = second_post_content )
105102 entity2 .save ()
106103 # Register entity to be deleted.
107- self .entities_to_delete .append (entity2 )
104+ self .case_entities_to_delete .append (entity2 )
108105
109106 keys = [entity1 .key (), entity2 .key ()]
110- matches = dataset .get_entities (keys )
107+ matches = self . dataset .get_entities (keys )
111108 self .assertEqual (len (matches ), 2 )
112109
113110 def test_empty_kind (self ):
114- posts = self ._get_dataset () .query (). kind ('Post' ).limit (2 ).fetch ()
111+ posts = self .dataset .query ('Post' ).limit (2 ).fetch ()
115112 self .assertEqual (posts , [])
113+
114+
115+ class TestDatastoreQuery (TestDatastore ):
116+
117+ @classmethod
118+ def setUpClass (cls ):
119+ super (TestDatastoreQuery , cls ).setUpClass ()
120+ cls .CHARACTERS = populate_datastore .CHARACTERS
121+ cls .ANCESTOR_KEY = datastore .key .Key (
122+ path = [populate_datastore .ANCESTOR ])
123+
124+ def _base_query (self ):
125+ return self .dataset .query ('Character' ).ancestor (self .ANCESTOR_KEY )
126+
127+ def test_limit_queries (self ):
128+ limit = 5
129+ query = self ._base_query ().limit (limit )
130+ # Verify there is not cursor before fetch().
131+ self .assertRaises (RuntimeError , query .cursor )
132+
133+ # Fetch characters.
134+ character_entities = query .fetch ()
135+ self .assertEqual (len (character_entities ), limit )
136+
137+ # Check cursor after fetch.
138+ cursor = query .cursor ()
139+ self .assertTrue (cursor is not None )
140+
141+ # Fetch next batch of characters.
142+ new_query = self ._base_query ().with_cursor (cursor )
143+ new_character_entities = new_query .fetch ()
144+ characters_remaining = len (self .CHARACTERS ) - limit
145+ self .assertEqual (len (new_character_entities ), characters_remaining )
146+
147+ def test_query_simple_filter (self ):
148+ query = self ._base_query ().filter ('appearances >=' , 20 )
149+ expected_matches = 6
150+ # We expect 6, but allow the query to get 1 extra.
151+ entities = query .fetch (limit = expected_matches + 1 )
152+ self .assertEqual (len (entities ), expected_matches )
153+
154+ def test_query_multiple_filters (self ):
155+ query = self ._base_query ().filter (
156+ 'appearances >=' , 26 ).filter ('family =' , 'Stark' )
157+ expected_matches = 4
158+ # We expect 4, but allow the query to get 1 extra.
159+ entities = query .fetch (limit = expected_matches + 1 )
160+ self .assertEqual (len (entities ), expected_matches )
161+
162+ def test_ancestor_query (self ):
163+ filtered_query = self ._base_query ()
164+
165+ expected_matches = 8
166+ # We expect 8, but allow the query to get 1 extra.
167+ entities = filtered_query .fetch (limit = expected_matches + 1 )
168+ self .assertEqual (len (entities ), expected_matches )
169+
170+ def test_query___key___filter (self ):
171+ rickard_key = datastore .key .Key (
172+ path = [populate_datastore .ANCESTOR , populate_datastore .RICKARD ])
173+
174+ query = self ._base_query ().filter ('__key__ =' , rickard_key )
175+ expected_matches = 1
176+ # We expect 1, but allow the query to get 1 extra.
177+ entities = query .fetch (limit = expected_matches + 1 )
178+ self .assertEqual (len (entities ), expected_matches )
179+
180+ def test_ordered_query (self ):
181+ query = self ._base_query ().order ('appearances' )
182+ expected_matches = 8
183+ # We expect 8, but allow the query to get 1 extra.
184+ entities = query .fetch (limit = expected_matches + 1 )
185+ self .assertEqual (len (entities ), expected_matches )
186+
187+ # Actually check the ordered data returned.
188+ self .assertEqual (entities [0 ]['name' ], self .CHARACTERS [0 ]['name' ])
189+ self .assertEqual (entities [7 ]['name' ], self .CHARACTERS [3 ]['name' ])
190+
191+ def test_projection_query (self ):
192+ filtered_query = self ._base_query ().projection (['name' , 'family' ])
193+
194+ # NOTE: There are 9 responses because of Catelyn. She has both
195+ # Stark and Tully as her families, hence occurs twice in
196+ # the results.
197+ expected_matches = 9
198+ # We expect 9, but allow the query to get 1 extra.
199+ entities = filtered_query .fetch (limit = expected_matches + 1 )
200+ self .assertEqual (len (entities ), expected_matches )
201+
202+ arya_entity = entities [0 ]
203+ arya_dict = dict (arya_entity .items ())
204+ self .assertEqual (arya_dict , {'name' : 'Arya' , 'family' : 'Stark' })
205+
206+ catelyn_stark_entity = entities [2 ]
207+ catelyn_stark_dict = dict (catelyn_stark_entity .items ())
208+ self .assertEqual (catelyn_stark_dict ,
209+ {'name' : 'Catelyn' , 'family' : 'Stark' })
210+
211+ catelyn_tully_entity = entities [3 ]
212+ catelyn_tully_dict = dict (catelyn_tully_entity .items ())
213+ self .assertEqual (catelyn_tully_dict ,
214+ {'name' : 'Catelyn' , 'family' : 'Tully' })
215+
216+ # Check both Catelyn keys are the same.
217+ catelyn_stark_key = catelyn_stark_entity .key ()
218+ catelyn_tully_key = catelyn_tully_entity .key ()
219+ self .assertEqual (catelyn_stark_key .path (), catelyn_tully_key .path ())
220+ self .assertEqual (catelyn_stark_key .namespace (),
221+ catelyn_tully_key .namespace ())
222+ # Also check the _dataset_id since both retrieved from datastore.
223+ self .assertEqual (catelyn_stark_key ._dataset_id ,
224+ catelyn_tully_key ._dataset_id )
225+
226+ sansa_entity = entities [8 ]
227+ sansa_dict = dict (sansa_entity .items ())
228+ self .assertEqual (sansa_dict , {'name' : 'Sansa' , 'family' : 'Stark' })
229+
230+ def test_query_paginate_with_offset (self ):
231+ query = self ._base_query ()
232+ offset = 2
233+ limit = 3
234+ page_query = query .offset (offset ).limit (limit ).order ('appearances' )
235+ # Make sure no query set before fetch.
236+ self .assertRaises (RuntimeError , page_query .cursor )
237+
238+ # Fetch characters.
239+ entities = page_query .fetch ()
240+ self .assertEqual (len (entities ), limit )
241+ self .assertEqual (entities [0 ]['name' ], 'Robb' )
242+ self .assertEqual (entities [1 ]['name' ], 'Bran' )
243+ self .assertEqual (entities [2 ]['name' ], 'Catelyn' )
244+
245+ # Use cursor to begin next query.
246+ cursor = page_query .cursor ()
247+ next_query = page_query .with_cursor (cursor ).offset (0 )
248+ self .assertEqual (next_query .limit (), limit )
249+ # Fetch next set of characters.
250+ entities = next_query .fetch ()
251+ self .assertEqual (len (entities ), limit )
252+ self .assertEqual (entities [0 ]['name' ], 'Sansa' )
253+ self .assertEqual (entities [1 ]['name' ], 'Jon Snow' )
254+ self .assertEqual (entities [2 ]['name' ], 'Arya' )
255+
256+ def test_query_paginate_with_start_cursor (self ):
257+ query = self ._base_query ()
258+ offset = 2
259+ limit = 2
260+ page_query = query .offset (offset ).limit (limit ).order ('appearances' )
261+ # Make sure no query set before fetch.
262+ self .assertRaises (RuntimeError , page_query .cursor )
263+
264+ # Fetch characters.
265+ entities = page_query .fetch ()
266+ self .assertEqual (len (entities ), limit )
267+
268+ # Use cursor to create a fresh query.
269+ cursor = page_query .cursor ()
270+ fresh_query = self ._base_query ()
271+ fresh_query = fresh_query .order ('appearances' ).with_cursor (cursor )
272+
273+ new_entities = fresh_query .fetch ()
274+ characters_remaining = len (self .CHARACTERS ) - limit - offset
275+ self .assertEqual (len (new_entities ), characters_remaining )
276+ self .assertEqual (new_entities [0 ]['name' ], 'Catelyn' )
277+ self .assertEqual (new_entities [3 ]['name' ], 'Arya' )
278+
279+ def test_query_group_by (self ):
280+ query = self ._base_query ().group_by (['alive' ])
281+
282+ expected_matches = 2
283+ # We expect 2, but allow the query to get 1 extra.
284+ entities = query .fetch (limit = expected_matches + 1 )
285+ self .assertEqual (len (entities ), expected_matches )
286+
287+ self .assertEqual (entities [0 ]['name' ], 'Catelyn' )
288+ self .assertEqual (entities [1 ]['name' ], 'Arya' )
0 commit comments