@@ -238,6 +238,41 @@ def test_to_arrow():
238238 ]
239239
240240
241+ @pytest .mark .skipif (pyarrow is None , reason = "Requires `pyarrow`" )
242+ def test_to_arrow_max_results_no_progress_bar ():
243+ from google .cloud .bigquery import table
244+ from google .cloud .bigquery .job import QueryJob as target_class
245+ from google .cloud .bigquery .schema import SchemaField
246+
247+ connection = _make_connection ({})
248+ client = _make_client (connection = connection )
249+ begun_resource = _make_job_resource (job_type = "query" )
250+ job = target_class .from_api_repr (begun_resource , client )
251+
252+ schema = [
253+ SchemaField ("name" , "STRING" , mode = "REQUIRED" ),
254+ SchemaField ("age" , "INTEGER" , mode = "REQUIRED" ),
255+ ]
256+ rows = [
257+ {"f" : [{"v" : "Bharney Rhubble" }, {"v" : "33" }]},
258+ {"f" : [{"v" : "Wylma Phlyntstone" }, {"v" : "29" }]},
259+ ]
260+ path = "/foo"
261+ api_request = mock .Mock (return_value = {"rows" : rows })
262+ row_iterator = table .RowIterator (client , api_request , path , schema )
263+
264+ result_patch = mock .patch (
265+ "google.cloud.bigquery.job.QueryJob.result" , return_value = row_iterator ,
266+ )
267+ with result_patch as result_patch_tqdm :
268+ tbl = job .to_arrow (create_bqstorage_client = False , max_results = 123 )
269+
270+ result_patch_tqdm .assert_called_once_with (max_results = 123 )
271+
272+ assert isinstance (tbl , pyarrow .Table )
273+ assert tbl .num_rows == 2
274+
275+
241276@pytest .mark .skipif (pyarrow is None , reason = "Requires `pyarrow`" )
242277@pytest .mark .skipif (tqdm is None , reason = "Requires `tqdm`" )
243278def test_to_arrow_w_tqdm_w_query_plan ():
@@ -290,7 +325,9 @@ def test_to_arrow_w_tqdm_w_query_plan():
290325 assert result_patch_tqdm .call_count == 3
291326 assert isinstance (tbl , pyarrow .Table )
292327 assert tbl .num_rows == 2
293- result_patch_tqdm .assert_called_with (timeout = _PROGRESS_BAR_UPDATE_INTERVAL )
328+ result_patch_tqdm .assert_called_with (
329+ timeout = _PROGRESS_BAR_UPDATE_INTERVAL , max_results = None
330+ )
294331
295332
296333@pytest .mark .skipif (pyarrow is None , reason = "Requires `pyarrow`" )
@@ -341,7 +378,9 @@ def test_to_arrow_w_tqdm_w_pending_status():
341378 assert result_patch_tqdm .call_count == 2
342379 assert isinstance (tbl , pyarrow .Table )
343380 assert tbl .num_rows == 2
344- result_patch_tqdm .assert_called_with (timeout = _PROGRESS_BAR_UPDATE_INTERVAL )
381+ result_patch_tqdm .assert_called_with (
382+ timeout = _PROGRESS_BAR_UPDATE_INTERVAL , max_results = None
383+ )
345384
346385
347386@pytest .mark .skipif (pyarrow is None , reason = "Requires `pyarrow`" )
@@ -716,7 +755,9 @@ def test_to_dataframe_w_tqdm_pending():
716755 assert isinstance (df , pandas .DataFrame )
717756 assert len (df ) == 4 # verify the number of rows
718757 assert list (df ) == ["name" , "age" ] # verify the column names
719- result_patch_tqdm .assert_called_with (timeout = _PROGRESS_BAR_UPDATE_INTERVAL )
758+ result_patch_tqdm .assert_called_with (
759+ timeout = _PROGRESS_BAR_UPDATE_INTERVAL , max_results = None
760+ )
720761
721762
722763@pytest .mark .skipif (pandas is None , reason = "Requires `pandas`" )
@@ -774,4 +815,56 @@ def test_to_dataframe_w_tqdm():
774815 assert isinstance (df , pandas .DataFrame )
775816 assert len (df ) == 4 # verify the number of rows
776817 assert list (df ), ["name" , "age" ] # verify the column names
777- result_patch_tqdm .assert_called_with (timeout = _PROGRESS_BAR_UPDATE_INTERVAL )
818+ result_patch_tqdm .assert_called_with (
819+ timeout = _PROGRESS_BAR_UPDATE_INTERVAL , max_results = None
820+ )
821+
822+
823+ @pytest .mark .skipif (pandas is None , reason = "Requires `pandas`" )
824+ @pytest .mark .skipif (tqdm is None , reason = "Requires `tqdm`" )
825+ def test_to_dataframe_w_tqdm_max_results ():
826+ from google .cloud .bigquery import table
827+ from google .cloud .bigquery .job import QueryJob as target_class
828+ from google .cloud .bigquery .schema import SchemaField
829+ from google .cloud .bigquery ._tqdm_helpers import _PROGRESS_BAR_UPDATE_INTERVAL
830+
831+ begun_resource = _make_job_resource (job_type = "query" )
832+ schema = [
833+ SchemaField ("name" , "STRING" , mode = "NULLABLE" ),
834+ SchemaField ("age" , "INTEGER" , mode = "NULLABLE" ),
835+ ]
836+ rows = [{"f" : [{"v" : "Phred Phlyntstone" }, {"v" : "32" }]}]
837+
838+ connection = _make_connection ({})
839+ client = _make_client (connection = connection )
840+ job = target_class .from_api_repr (begun_resource , client )
841+
842+ path = "/foo"
843+ api_request = mock .Mock (return_value = {"rows" : rows })
844+ row_iterator = table .RowIterator (client , api_request , path , schema )
845+
846+ job ._properties ["statistics" ] = {
847+ "query" : {
848+ "queryPlan" : [
849+ {"name" : "S00: Input" , "id" : "0" , "status" : "COMPLETE" },
850+ {"name" : "S01: Output" , "id" : "1" , "status" : "COMPLETE" },
851+ ]
852+ },
853+ }
854+ reload_patch = mock .patch (
855+ "google.cloud.bigquery.job._AsyncJob.reload" , autospec = True
856+ )
857+ result_patch = mock .patch (
858+ "google.cloud.bigquery.job.QueryJob.result" ,
859+ side_effect = [concurrent .futures .TimeoutError , row_iterator ],
860+ )
861+
862+ with result_patch as result_patch_tqdm , reload_patch :
863+ job .to_dataframe (
864+ progress_bar_type = "tqdm" , create_bqstorage_client = False , max_results = 3
865+ )
866+
867+ assert result_patch_tqdm .call_count == 2
868+ result_patch_tqdm .assert_called_with (
869+ timeout = _PROGRESS_BAR_UPDATE_INTERVAL , max_results = 3
870+ )
0 commit comments