Skip to content

Commit 1996596

Browse files
authored
chore: Suppress alpha warnings in test. Fix entity serialization in test (feast-dev#3029)
* chore: Suppress alpha warnings in test. Fix entity serialization in test Signed-off-by: Danny Chiao <danny@tecton.ai> * fix typo Signed-off-by: Danny Chiao <danny@tecton.ai> * revert lambda test Signed-off-by: Danny Chiao <danny@tecton.ai>
1 parent 4bba787 commit 1996596

21 files changed

Lines changed: 154 additions & 81 deletions

File tree

.github/workflows/pr_local_integration_tests.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ jobs:
1515
((github.event.action == 'labeled' && (github.event.label.name == 'approved' || github.event.label.name == 'lgtm' || github.event.label.name == 'ok-to-test')) ||
1616
(github.event.action != 'labeled' && (contains(github.event.pull_request.labels.*.name, 'ok-to-test') || contains(github.event.pull_request.labels.*.name, 'approved') || contains(github.event.pull_request.labels.*.name, 'lgtm')))) ||
1717
github.repository != 'feast-dev/feast'
18-
runs-on: ${{ matrix.os }}p
18+
runs-on: ${{ matrix.os }}
1919
strategy:
2020
fail-fast: false
2121
matrix:

protos/feast/core/StreamFeatureView.proto

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,6 @@ option java_package = "feast.proto.core";
2424

2525

2626
import "google/protobuf/duration.proto";
27-
import "google/protobuf/timestamp.proto";
2827
import "feast/core/OnDemandFeatureView.proto";
2928
import "feast/core/FeatureView.proto";
3029
import "feast/core/Feature.proto";

sdk/python/feast/driver_test_data.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -164,7 +164,7 @@ def create_customer_daily_profile_df(customers, start_date, end_date) -> pd.Data
164164
"event_timestamp": [
165165
pd.Timestamp(dt, unit="ms", tz="UTC").round("ms")
166166
for dt in pd.date_range(
167-
start=start_date, end=end_date, freq="1D", closed="left"
167+
start=start_date, end=end_date, freq="1D", inclusive="left"
168168
)
169169
]
170170
}
@@ -209,7 +209,7 @@ def create_location_stats_df(locations, start_date, end_date) -> pd.DataFrame:
209209
"event_timestamp": [
210210
pd.Timestamp(dt, unit="ms", tz="UTC").round("ms")
211211
for dt in pd.date_range(
212-
start=start_date, end=end_date, freq="1H", closed="left"
212+
start=start_date, end=end_date, freq="1H", inclusive="left"
213213
)
214214
]
215215
}
@@ -256,7 +256,7 @@ def create_global_daily_stats_df(start_date, end_date) -> pd.DataFrame:
256256
"event_timestamp": [
257257
pd.Timestamp(dt, unit="ms", tz="UTC").round("ms")
258258
for dt in pd.date_range(
259-
start=start_date, end=end_date, freq="1D", closed="left"
259+
start=start_date, end=end_date, freq="1D", inclusive="left"
260260
)
261261
]
262262
}

sdk/python/feast/feature_store.py

Lines changed: 36 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@
4040
from google.protobuf.timestamp_pb2 import Timestamp
4141
from tqdm import tqdm
4242

43-
from feast import feature_server, ui_server, utils
43+
from feast import feature_server, flags_helper, ui_server, utils
4444
from feast.base_feature_view import BaseFeatureView
4545
from feast.batch_feature_view import BatchFeatureView
4646
from feast.data_source import DataSource, PushMode
@@ -533,7 +533,7 @@ def _validate_all_feature_views(
533533
sfvs_to_update: List[StreamFeatureView],
534534
):
535535
"""Validates all feature views."""
536-
if len(odfvs_to_update) > 0:
536+
if len(odfvs_to_update) > 0 and not flags_helper.is_test():
537537
warnings.warn(
538538
"On demand feature view is an experimental feature. "
539539
"This API is stable, but the functionality does not scale well for offline retrieval",
@@ -1123,12 +1123,13 @@ def create_saved_dataset(
11231123
Raises:
11241124
ValueError if given retrieval job doesn't have metadata
11251125
"""
1126-
warnings.warn(
1127-
"Saving dataset is an experimental feature. "
1128-
"This API is unstable and it could and most probably will be changed in the future. "
1129-
"We do not guarantee that future changes will maintain backward compatibility.",
1130-
RuntimeWarning,
1131-
)
1126+
if not flags_helper.is_test():
1127+
warnings.warn(
1128+
"Saving dataset is an experimental feature. "
1129+
"This API is unstable and it could and most probably will be changed in the future. "
1130+
"We do not guarantee that future changes will maintain backward compatibility.",
1131+
RuntimeWarning,
1132+
)
11321133

11331134
if not from_.metadata:
11341135
raise ValueError(
@@ -1175,12 +1176,13 @@ def get_saved_dataset(self, name: str) -> SavedDataset:
11751176
Raises:
11761177
SavedDatasetNotFound
11771178
"""
1178-
warnings.warn(
1179-
"Retrieving datasets is an experimental feature. "
1180-
"This API is unstable and it could and most probably will be changed in the future. "
1181-
"We do not guarantee that future changes will maintain backward compatibility.",
1182-
RuntimeWarning,
1183-
)
1179+
if not flags_helper.is_test():
1180+
warnings.warn(
1181+
"Retrieving datasets is an experimental feature. "
1182+
"This API is unstable and it could and most probably will be changed in the future. "
1183+
"We do not guarantee that future changes will maintain backward compatibility.",
1184+
RuntimeWarning,
1185+
)
11841186

11851187
dataset = self._registry.get_saved_dataset(name, self.project)
11861188
provider = self._get_provider()
@@ -1374,12 +1376,13 @@ def push(
13741376
allow_registry_cache: Whether to allow cached versions of the registry.
13751377
to: Whether to push to online or offline store. Defaults to online store only.
13761378
"""
1377-
warnings.warn(
1378-
"Push source is an experimental feature. "
1379-
"This API is unstable and it could and might change in the future. "
1380-
"We do not guarantee that future changes will maintain backward compatibility.",
1381-
RuntimeWarning,
1382-
)
1379+
if not flags_helper.is_test():
1380+
warnings.warn(
1381+
"Push source is an experimental feature. "
1382+
"This API is unstable and it could and might change in the future. "
1383+
"We do not guarantee that future changes will maintain backward compatibility.",
1384+
RuntimeWarning,
1385+
)
13831386
from feast.data_source import PushSource
13841387

13851388
all_fvs = self.list_feature_views(allow_cache=allow_registry_cache)
@@ -2268,11 +2271,12 @@ def serve_ui(
22682271
self, host: str, port: int, get_registry_dump: Callable, registry_ttl_sec: int
22692272
) -> None:
22702273
"""Start the UI server locally"""
2271-
warnings.warn(
2272-
"The Feast UI is an experimental feature. "
2273-
"We do not guarantee that future changes will maintain backward compatibility.",
2274-
RuntimeWarning,
2275-
)
2274+
if flags_helper.is_test():
2275+
warnings.warn(
2276+
"The Feast UI is an experimental feature. "
2277+
"We do not guarantee that future changes will maintain backward compatibility.",
2278+
RuntimeWarning,
2279+
)
22762280
ui_server.start_server(
22772281
self,
22782282
host=host,
@@ -2352,12 +2356,13 @@ def validate_logged_features(
23522356
or None if successful.
23532357
23542358
"""
2355-
warnings.warn(
2356-
"Logged features validation is an experimental feature. "
2357-
"This API is unstable and it could and most probably will be changed in the future. "
2358-
"We do not guarantee that future changes will maintain backward compatibility.",
2359-
RuntimeWarning,
2360-
)
2359+
if not flags_helper.is_test():
2360+
warnings.warn(
2361+
"Logged features validation is an experimental feature. "
2362+
"This API is unstable and it could and most probably will be changed in the future. "
2363+
"We do not guarantee that future changes will maintain backward compatibility.",
2364+
RuntimeWarning,
2365+
)
23612366

23622367
if not isinstance(source, FeatureService):
23632368
raise ValueError("Only feature service is currently supported as a source")

sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark_source.py

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66

77
from pyspark.sql import SparkSession
88

9+
from feast import flags_helper
910
from feast.data_source import DataSource
1011
from feast.errors import DataSourceNoNameException
1112
from feast.infra.offline_stores.offline_utils import get_temp_entity_table_name
@@ -62,11 +63,12 @@ def __init__(
6263
owner=owner,
6364
)
6465

65-
warnings.warn(
66-
"The spark data source API is an experimental feature in alpha development. "
67-
"This API is unstable and it could and most probably will be changed in the future.",
68-
RuntimeWarning,
69-
)
66+
if not flags_helper.is_test():
67+
warnings.warn(
68+
"The spark data source API is an experimental feature in alpha development. "
69+
"This API is unstable and it could and most probably will be changed in the future.",
70+
RuntimeWarning,
71+
)
7072

7173
self.spark_options = SparkOptions(
7274
table=table,

sdk/python/feast/infra/offline_stores/offline_store.py

Lines changed: 15 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020
import pandas as pd
2121
import pyarrow
2222

23+
from feast import flags_helper
2324
from feast.data_source import DataSource
2425
from feast.dqm.errors import ValidationFailed
2526
from feast.feature_logging import LoggingConfig, LoggingSource
@@ -91,12 +92,13 @@ def to_df(
9192
)
9293

9394
if validation_reference:
94-
warnings.warn(
95-
"Dataset validation is an experimental feature. "
96-
"This API is unstable and it could and most probably will be changed in the future. "
97-
"We do not guarantee that future changes will maintain backward compatibility.",
98-
RuntimeWarning,
99-
)
95+
if not flags_helper.is_test():
96+
warnings.warn(
97+
"Dataset validation is an experimental feature. "
98+
"This API is unstable and it could and most probably will be changed in the future. "
99+
"We do not guarantee that future changes will maintain backward compatibility.",
100+
RuntimeWarning,
101+
)
100102

101103
validation_result = validation_reference.profile.validate(features_df)
102104
if not validation_result.is_success:
@@ -136,12 +138,13 @@ def to_arrow(
136138
)
137139

138140
if validation_reference:
139-
warnings.warn(
140-
"Dataset validation is an experimental feature. "
141-
"This API is unstable and it could and most probably will be changed in the future. "
142-
"We do not guarantee that future changes will maintain backward compatibility.",
143-
RuntimeWarning,
144-
)
141+
if not flags_helper.is_test():
142+
warnings.warn(
143+
"Dataset validation is an experimental feature. "
144+
"This API is unstable and it could and most probably will be changed in the future. "
145+
"We do not guarantee that future changes will maintain backward compatibility.",
146+
RuntimeWarning,
147+
)
145148

146149
validation_result = validation_reference.profile.validate(features_df)
147150
if not validation_result.is_success:

sdk/python/feast/stream_feature_view.py

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
import dill
99
from typeguard import typechecked
1010

11-
from feast import utils
11+
from feast import flags_helper, utils
1212
from feast.aggregation import Aggregation
1313
from feast.data_source import DataSource
1414
from feast.entity import Entity
@@ -90,11 +90,12 @@ def __init__(
9090
timestamp_field: Optional[str] = "",
9191
udf: Optional[FunctionType] = None,
9292
):
93-
warnings.warn(
94-
"Stream Feature Views are experimental features in alpha development. "
95-
"Some functionality may still be unstable so functionality can change in the future.",
96-
RuntimeWarning,
97-
)
93+
if not flags_helper.is_test():
94+
warnings.warn(
95+
"Stream Feature Views are experimental features in alpha development. "
96+
"Some functionality may still be unstable so functionality can change in the future.",
97+
RuntimeWarning,
98+
)
9899

99100
if (
100101
type(source).__name__ not in SUPPORTED_STREAM_SOURCES

sdk/python/tests/integration/e2e/test_usage_e2e.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -57,6 +57,7 @@ def test_usage_on(dummy_exporter, enabling_toggle):
5757
online_store=SqliteOnlineStoreConfig(
5858
path=os.path.join(temp_dir, "online.db")
5959
),
60+
entity_key_serialization_version=2,
6061
)
6162
)
6263
entity = Entity(
@@ -95,6 +96,7 @@ def test_usage_off(dummy_exporter, enabling_toggle):
9596
online_store=SqliteOnlineStoreConfig(
9697
path=os.path.join(temp_dir, "online.db")
9798
),
99+
entity_key_serialization_version=2,
98100
)
99101
)
100102
entity = Entity(

sdk/python/tests/integration/e2e/test_validation.py

Lines changed: 19 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -150,25 +150,32 @@ def test_logged_features_validation(environment, universal_data_sources):
150150

151151
# add some non-existing entities to check NotFound feature handling
152152
for i in range(5):
153-
entity_df = entity_df.append(
154-
{
155-
"customer_id": 2000 + i,
156-
"driver_id": 6000 + i,
157-
"event_timestamp": datetime.datetime.now(),
158-
},
159-
ignore_index=True,
153+
entity_df = pd.concat(
154+
[
155+
entity_df,
156+
pd.DataFrame.from_records(
157+
[
158+
{
159+
"customer_id": 2000 + i,
160+
"driver_id": 6000 + i,
161+
"event_timestamp": datetime.datetime.now(),
162+
}
163+
]
164+
),
165+
]
160166
)
161167

168+
store_fs = store.get_feature_service(feature_service.name)
162169
reference_dataset = store.create_saved_dataset(
163170
from_=store.get_historical_features(
164-
entity_df=entity_df, features=feature_service, full_feature_names=True
171+
entity_df=entity_df, features=store_fs, full_feature_names=True
165172
),
166173
name="reference_for_validating_logged_features",
167174
storage=environment.data_source_creator.create_saved_dataset_destination(),
168175
)
169176

170177
log_source_df = store.get_historical_features(
171-
entity_df=entity_df, features=feature_service, full_feature_names=False
178+
entity_df=entity_df, features=store_fs, full_feature_names=False
172179
).to_df()
173180
logs_df = prepare_logs(log_source_df, feature_service, store)
174181

@@ -229,7 +236,9 @@ def test_e2e_validation_via_cli(environment, universal_data_sources):
229236
columns=["order_id", "origin_id", "destination_id", "driver_id"]
230237
)
231238
retrieval_job = store.get_historical_features(
232-
entity_df=entity_df, features=feature_service, full_feature_names=True
239+
entity_df=entity_df,
240+
features=store.get_feature_service(feature_service.name),
241+
full_feature_names=True,
233242
)
234243
logs_df = prepare_logs(retrieval_job.to_df(), feature_service, store)
235244
saved_dataset = store.create_saved_dataset(

sdk/python/tests/integration/feature_repos/repo_configuration.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -368,6 +368,7 @@ def construct_test_environment(
368368
fixture_request: Optional[pytest.FixtureRequest],
369369
test_suite_name: str = "integration_test",
370370
worker_id: str = "worker_id",
371+
entity_key_serialization_version: int = 2,
371372
) -> Environment:
372373
_uuid = str(uuid.uuid4()).replace("-", "")[:6]
373374

@@ -437,6 +438,7 @@ def construct_test_environment(
437438
repo_path=repo_dir_name,
438439
feature_server=feature_server,
439440
go_feature_serving=test_repo_config.go_feature_serving,
441+
entity_key_serialization_version=entity_key_serialization_version,
440442
)
441443

442444
# Create feature_store.yaml out of the config

0 commit comments

Comments
 (0)