File "/usr/local/lib/python3.10/site-packages/google/cloud/bigquery/job/query.py", line 1687, in to_dataframe
query_result = wait_for_query(self, progress_bar_type, max_results=max_results)
File "/usr/local/lib/python3.10/site-packages/google/cloud/bigquery/_tqdm_helpers.py", line 104, in wait_for_query
return query_job.result(max_results=max_results)
File "/usr/local/lib/python3.10/site-packages/google/cloud/bigquery/job/query.py", line 1499, in result
do_get_result()
File "/usr/local/lib/python3.10/site-packages/google/api_core/retry/retry_unary.py", line 293, in retry_wrapped_func
return retry_target(
File "/usr/local/lib/python3.10/site-packages/google/api_core/retry/retry_unary.py", line 153, in retry_target
_retry_error_helper(
File "/usr/local/lib/python3.10/site-packages/google/api_core/retry/retry_base.py", line 212, in _retry_error_helper
raise final_exc from source_exc
File "/usr/local/lib/python3.10/site-packages/google/api_core/retry/retry_unary.py", line 144, in retry_target
result = target()
File "/usr/local/lib/python3.10/site-packages/google/cloud/bigquery/job/query.py", line 1489, in do_get_result
super(QueryJob, self).result(retry=retry, timeout=timeout)
File "/usr/local/lib/python3.10/site-packages/google/cloud/bigquery/job/base.py", line 728, in result
return super(_AsyncJob, self).result(timeout=timeout, **kwargs)
File "/usr/local/lib/python3.10/site-packages/google/api_core/future/polling.py", line 256, in result
self._blocking_poll(timeout=timeout, retry=retry, polling=polling)
File "/usr/local/lib/python3.10/site-packages/google/cloud/bigquery/job/query.py", line 1245, in _blocking_poll
super(QueryJob, self)._blocking_poll(timeout=timeout, **kwargs)
File "/usr/local/lib/python3.10/site-packages/google/api_core/future/polling.py", line 139, in _blocking_poll
raise concurrent.futures.TimeoutError(
concurrent.futures._base.TimeoutError: Operation did not complete within the designated timeout of 900 seconds.
Hello,
We are using python-api-core via python-bigquery and since we upgraded to 2.16+ version of python-api-core, we experience timeout on
to_dataframe()method (in python-bigquery) for very large queries. After investigation this seems to be a regression in python-api-core with the introduction of a 900s (15min) timeout (in pooling.py).Environment details
google-cloud-bigqueryversion: 3.6.0Steps to reproduce
Code example
Stack trace