Skip to content
This repository was archived by the owner on Mar 6, 2026. It is now read-only.

Commit e94597d

Browse files
committed
tests: avoid INTERVAL columns in pandas tests
1 parent 7e6ee6d commit e94597d

File tree

1 file changed

+37
-2
lines changed

1 file changed

+37
-2
lines changed

tests/system/test_pandas.py

Lines changed: 37 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@
2626
import pytest
2727

2828
from google.cloud import bigquery
29+
from google.cloud.bigquery import enums
2930
from google.cloud import bigquery_storage
3031

3132
from . import helpers
@@ -802,8 +803,25 @@ def test_list_rows_max_results_w_bqstorage(bigquery_client):
802803
("max_results",), ((None,), (10,),) # Use BQ Storage API. # Use REST API.
803804
)
804805
def test_list_rows_nullable_scalars_dtypes(bigquery_client, scalars_table, max_results):
806+
# TODO(GH#836): Avoid INTERVAL columns until they are supported by the
807+
# BigQuery Storage API and pyarrow.
808+
schema = [
809+
bigquery.SchemaField("bool_col", enums.SqlTypeNames.BOOLEAN),
810+
bigquery.SchemaField("bignumeric_col", enums.SqlTypeNames.BIGNUMERIC),
811+
bigquery.SchemaField("bytes_col", enums.SqlTypeNames.BYTES),
812+
bigquery.SchemaField("date_col", enums.SqlTypeNames.DATE),
813+
bigquery.SchemaField("datetime_col", enums.SqlTypeNames.DATETIME),
814+
bigquery.SchemaField("float64_col", enums.SqlTypeNames.FLOAT64),
815+
bigquery.SchemaField("geography_col", enums.SqlTypeNames.GEOGRAPHY),
816+
bigquery.SchemaField("int64_col", enums.SqlTypeNames.INT64),
817+
bigquery.SchemaField("numeric_col", enums.SqlTypeNames.NUMERIC),
818+
bigquery.SchemaField("string_col", enums.SqlTypeNames.STRING),
819+
bigquery.SchemaField("time_col", enums.SqlTypeNames.TIME),
820+
bigquery.SchemaField("timestamp_col", enums.SqlTypeNames.TIMESTAMP),
821+
]
822+
805823
df = bigquery_client.list_rows(
806-
scalars_table, max_results=max_results,
824+
scalars_table, max_results=max_results, selected_fields=schema,
807825
).to_dataframe()
808826

809827
assert df.dtypes["bool_col"].name == "boolean"
@@ -836,8 +854,25 @@ def test_list_rows_nullable_scalars_dtypes(bigquery_client, scalars_table, max_r
836854
def test_list_rows_nullable_scalars_extreme_dtypes(
837855
bigquery_client, scalars_extreme_table, max_results
838856
):
857+
# TODO(GH#836): Avoid INTERVAL columns until they are supported by the
858+
# BigQuery Storage API and pyarrow.
859+
schema = [
860+
bigquery.SchemaField("bool_col", enums.SqlTypeNames.BOOLEAN),
861+
bigquery.SchemaField("bignumeric_col", enums.SqlTypeNames.BIGNUMERIC),
862+
bigquery.SchemaField("bytes_col", enums.SqlTypeNames.BYTES),
863+
bigquery.SchemaField("date_col", enums.SqlTypeNames.DATE),
864+
bigquery.SchemaField("datetime_col", enums.SqlTypeNames.DATETIME),
865+
bigquery.SchemaField("float64_col", enums.SqlTypeNames.FLOAT64),
866+
bigquery.SchemaField("geography_col", enums.SqlTypeNames.GEOGRAPHY),
867+
bigquery.SchemaField("int64_col", enums.SqlTypeNames.INT64),
868+
bigquery.SchemaField("numeric_col", enums.SqlTypeNames.NUMERIC),
869+
bigquery.SchemaField("string_col", enums.SqlTypeNames.STRING),
870+
bigquery.SchemaField("time_col", enums.SqlTypeNames.TIME),
871+
bigquery.SchemaField("timestamp_col", enums.SqlTypeNames.TIMESTAMP),
872+
]
873+
839874
df = bigquery_client.list_rows(
840-
scalars_extreme_table, max_results=max_results
875+
scalars_extreme_table, max_results=max_results, selected_fields=schema,
841876
).to_dataframe()
842877

843878
# Extreme values are out-of-bounds for pandas datetime64 values, which use

0 commit comments

Comments
 (0)