3030DATASET_NAME = 'system_tests' + unique_resource_id ()
3131
3232
33+ def _rate_limit_exceeded (forbidden ):
34+ """Predicate: pass only exceptions with 'rateLimitExceeded' as reason."""
35+ return any (error ['reason' ] == 'rateLimitExceeded'
36+ for error in forbidden ._errors )
37+
38+ # We need to wait to stay within the rate limits.
39+ # The alternative outcome is a 403 Forbidden response from upstream, which
40+ # they return instead of the more appropriate 429.
41+ # See: https://cloud.google.com/bigquery/quota-policy
42+ retry_403 = RetryErrors (Forbidden , error_predicate = _rate_limit_exceeded )
43+
44+
3345class Config (object ):
3446 """Run-time configuration to be modified at set-up.
3547
@@ -56,17 +68,21 @@ def tearDown(self):
5668 def test_create_dataset (self ):
5769 dataset = Config .CLIENT .dataset (DATASET_NAME )
5870 self .assertFalse (dataset .exists ())
59- dataset .create ()
71+
72+ retry_403 (dataset .create )()
6073 self .to_delete .append (dataset )
74+
6175 self .assertTrue (dataset .exists ())
6276 self .assertEqual (dataset .name , DATASET_NAME )
6377
6478 def test_reload_dataset (self ):
6579 dataset = Config .CLIENT .dataset (DATASET_NAME )
6680 dataset .friendly_name = 'Friendly'
6781 dataset .description = 'Description'
68- dataset .create ()
82+
83+ retry_403 (dataset .create )()
6984 self .to_delete .append (dataset )
85+
7086 other = Config .CLIENT .dataset (DATASET_NAME )
7187 other .reload ()
7288 self .assertEqual (other .friendly_name , 'Friendly' )
@@ -75,8 +91,10 @@ def test_reload_dataset(self):
7591 def test_patch_dataset (self ):
7692 dataset = Config .CLIENT .dataset (DATASET_NAME )
7793 self .assertFalse (dataset .exists ())
78- dataset .create ()
94+
95+ retry_403 (dataset .create )()
7996 self .to_delete .append (dataset )
97+
8098 self .assertTrue (dataset .exists ())
8199 self .assertEqual (dataset .friendly_name , None )
82100 self .assertEqual (dataset .description , None )
@@ -88,22 +106,15 @@ def test_update_dataset(self):
88106 dataset = Config .CLIENT .dataset (DATASET_NAME )
89107 self .assertFalse (dataset .exists ())
90108
91- # We need to wait to stay within the rate limits.
92- # The alternative outcome is a 403 Forbidden response from upstream.
93- # See: https://cloud.google.com/bigquery/quota-policy
94- retry = RetryErrors (Forbidden , max_tries = 2 , delay = 30 )
95- retry (dataset .create )()
96-
109+ retry_403 (dataset .create )()
97110 self .to_delete .append (dataset )
111+
98112 self .assertTrue (dataset .exists ())
99113 after = [grant for grant in dataset .access_grants
100114 if grant .entity_id != 'projectWriters' ]
101115 dataset .access_grants = after
102116
103- # We need to wait to stay within the rate limits.
104- # The alternative outcome is a 403 Forbidden response from upstream.
105- # See: https://cloud.google.com/bigquery/quota-policy
106- retry (dataset .update )()
117+ retry_403 (dataset .update )()
107118
108119 self .assertEqual (len (dataset .access_grants ), len (after ))
109120 for found , expected in zip (dataset .access_grants , after ):
@@ -119,7 +130,7 @@ def test_list_datasets(self):
119130 ]
120131 for dataset_name in datasets_to_create :
121132 dataset = Config .CLIENT .dataset (dataset_name )
122- dataset .create ()
133+ retry_403 ( dataset .create ) ()
123134 self .to_delete .append (dataset )
124135
125136 # Retrieve the datasets.
@@ -133,8 +144,10 @@ def test_list_datasets(self):
133144 def test_create_table (self ):
134145 dataset = Config .CLIENT .dataset (DATASET_NAME )
135146 self .assertFalse (dataset .exists ())
136- dataset .create ()
147+
148+ retry_403 (dataset .create )()
137149 self .to_delete .append (dataset )
150+
138151 TABLE_NAME = 'test_table'
139152 full_name = bigquery .SchemaField ('full_name' , 'STRING' ,
140153 mode = 'REQUIRED' )
@@ -149,7 +162,8 @@ def test_create_table(self):
149162 def test_list_tables (self ):
150163 dataset = Config .CLIENT .dataset (DATASET_NAME )
151164 self .assertFalse (dataset .exists ())
152- dataset .create ()
165+
166+ retry_403 (dataset .create )()
153167 self .to_delete .append (dataset )
154168
155169 # Retrieve tables before any are created for the dataset.
@@ -182,8 +196,10 @@ def test_list_tables(self):
182196 def test_patch_table (self ):
183197 dataset = Config .CLIENT .dataset (DATASET_NAME )
184198 self .assertFalse (dataset .exists ())
185- dataset .create ()
199+
200+ retry_403 (dataset .create )()
186201 self .to_delete .append (dataset )
202+
187203 TABLE_NAME = 'test_table'
188204 full_name = bigquery .SchemaField ('full_name' , 'STRING' ,
189205 mode = 'REQUIRED' )
@@ -203,13 +219,9 @@ def test_update_table(self):
203219 dataset = Config .CLIENT .dataset (DATASET_NAME )
204220 self .assertFalse (dataset .exists ())
205221
206- # We need to wait to stay within the rate limits.
207- # The alternative outcome is a 403 Forbidden response from upstream.
208- # See: https://cloud.google.com/bigquery/quota-policy
209- retry = RetryErrors (Forbidden , max_tries = 2 , delay = 30 )
210- retry (dataset .create )()
211-
222+ retry_403 (dataset .create )()
212223 self .to_delete .append (dataset )
224+
213225 TABLE_NAME = 'test_table'
214226 full_name = bigquery .SchemaField ('full_name' , 'STRING' ,
215227 mode = 'REQUIRED' )
@@ -246,8 +258,10 @@ def test_load_table_then_dump_table(self):
246258 ROW_IDS = range (len (ROWS ))
247259 dataset = Config .CLIENT .dataset (DATASET_NAME )
248260 self .assertFalse (dataset .exists ())
249- dataset .create ()
261+
262+ retry_403 (dataset .create )()
250263 self .to_delete .append (dataset )
264+
251265 TABLE_NAME = 'test_table'
252266 full_name = bigquery .SchemaField ('full_name' , 'STRING' ,
253267 mode = 'REQUIRED' )
@@ -312,7 +326,8 @@ def test_load_table_from_storage_then_dump_table(self):
312326 self .to_delete .insert (0 , blob )
313327
314328 dataset = Config .CLIENT .dataset (DATASET_NAME )
315- dataset .create ()
329+
330+ retry_403 (dataset .create )()
316331 self .to_delete .append (dataset )
317332
318333 full_name = bigquery .SchemaField ('full_name' , 'STRING' ,
0 commit comments