Skip to content

Commit ad5694e

Browse files
mirayyuceMiray Yuceachals
authored
fix: Add new value types to types.ts for web ui (feast-dev#2463)
* add new value-types Signed-off-by: Miray Yuce <myuce@twitter.com> * auto formatted files Signed-off-by: Miray Yuce <myuce@twitter.com> * make format-python Signed-off-by: Achal Shah <achals@gmail.com> Co-authored-by: Miray Yuce <myuce@twitter.com> Co-authored-by: Achal Shah <achals@gmail.com>
1 parent be08f10 commit ad5694e

20 files changed

Lines changed: 121 additions & 111 deletions

File tree

sdk/python/feast/feature_store.py

Lines changed: 45 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -399,14 +399,14 @@ def delete_feature_view(self, name: str):
399399
@log_exceptions_and_usage
400400
def delete_feature_service(self, name: str):
401401
"""
402-
Deletes a feature service.
402+
Deletes a feature service.
403403
404-
Args:
405-
name: Name of feature service.
404+
Args:
405+
name: Name of feature service.
406406
407-
Raises:
408-
FeatureServiceNotFoundException: The feature view could not be found.
409-
"""
407+
Raises:
408+
FeatureServiceNotFoundException: The feature view could not be found.
409+
"""
410410
return self._registry.delete_feature_service(name, self.project)
411411

412412
def _get_features(
@@ -903,17 +903,17 @@ def create_saved_dataset(
903903
feature_service: Optional[FeatureService] = None,
904904
) -> SavedDataset:
905905
"""
906-
Execute provided retrieval job and persist its outcome in given storage.
907-
Storage type (eg, BigQuery or Redshift) must be the same as globally configured offline store.
908-
After data successfully persisted saved dataset object with dataset metadata is committed to the registry.
909-
Name for the saved dataset should be unique within project, since it's possible to overwrite previously stored dataset
910-
with the same name.
906+
Execute provided retrieval job and persist its outcome in given storage.
907+
Storage type (eg, BigQuery or Redshift) must be the same as globally configured offline store.
908+
After data successfully persisted saved dataset object with dataset metadata is committed to the registry.
909+
Name for the saved dataset should be unique within project, since it's possible to overwrite previously stored dataset
910+
with the same name.
911911
912-
Returns:
913-
SavedDataset object with attached RetrievalJob
912+
Returns:
913+
SavedDataset object with attached RetrievalJob
914914
915-
Raises:
916-
ValueError if given retrieval job doesn't have metadata
915+
Raises:
916+
ValueError if given retrieval job doesn't have metadata
917917
"""
918918
warnings.warn(
919919
"Saving dataset is an experimental feature. "
@@ -1589,11 +1589,11 @@ def _get_unique_entities(
15891589
join_key_values: Dict[str, List[Value]],
15901590
entity_name_to_join_key_map: Dict[str, str],
15911591
) -> Tuple[Tuple[Dict[str, Value], ...], Tuple[List[int], ...]]:
1592-
""" Return the set of unique composite Entities for a Feature View and the indexes at which they appear.
1592+
"""Return the set of unique composite Entities for a Feature View and the indexes at which they appear.
15931593
1594-
This method allows us to query the OnlineStore for data we need only once
1595-
rather than requesting and processing data for the same combination of
1596-
Entities multiple times.
1594+
This method allows us to query the OnlineStore for data we need only once
1595+
rather than requesting and processing data for the same combination of
1596+
Entities multiple times.
15971597
"""
15981598
# Get the correct set of entity values with the correct join keys.
15991599
table_entity_values = self._get_table_entity_values(
@@ -1629,14 +1629,14 @@ def _read_from_online_store(
16291629
requested_features: List[str],
16301630
table: FeatureView,
16311631
) -> List[Tuple[List[Timestamp], List["FieldStatus.ValueType"], List[Value]]]:
1632-
""" Read and process data from the OnlineStore for a given FeatureView.
1632+
"""Read and process data from the OnlineStore for a given FeatureView.
16331633
1634-
This method guarantees that the order of the data in each element of the
1635-
List returned is the same as the order of `requested_features`.
1634+
This method guarantees that the order of the data in each element of the
1635+
List returned is the same as the order of `requested_features`.
16361636
1637-
This method assumes that `provider.online_read` returns data for each
1638-
combination of Entities in `entity_rows` in the same order as they
1639-
are provided.
1637+
This method assumes that `provider.online_read` returns data for each
1638+
combination of Entities in `entity_rows` in the same order as they
1639+
are provided.
16401640
"""
16411641
# Instantiate one EntityKeyProto per Entity.
16421642
entity_key_protos = [
@@ -1693,23 +1693,23 @@ def _populate_response_from_feature_data(
16931693
requested_features: Iterable[str],
16941694
table: FeatureView,
16951695
):
1696-
""" Populate the GetOnlineFeaturesResponse with feature data.
1697-
1698-
This method assumes that `_read_from_online_store` returns data for each
1699-
combination of Entities in `entity_rows` in the same order as they
1700-
are provided.
1701-
1702-
Args:
1703-
feature_data: A list of data in Protobuf form which was retrieved from the OnlineStore.
1704-
indexes: A list of indexes which should be the same length as `feature_data`. Each list
1705-
of indexes corresponds to a set of result rows in `online_features_response`.
1706-
online_features_response: The object to populate.
1707-
full_feature_names: A boolean that provides the option to add the feature view prefixes to the feature names,
1708-
changing them from the format "feature" to "feature_view__feature" (e.g., "daily_transactions" changes to
1709-
"customer_fv__daily_transactions").
1710-
requested_features: The names of the features in `feature_data`. This should be ordered in the same way as the
1711-
data in `feature_data`.
1712-
table: The FeatureView that `feature_data` was retrieved from.
1696+
"""Populate the GetOnlineFeaturesResponse with feature data.
1697+
1698+
This method assumes that `_read_from_online_store` returns data for each
1699+
combination of Entities in `entity_rows` in the same order as they
1700+
are provided.
1701+
1702+
Args:
1703+
feature_data: A list of data in Protobuf form which was retrieved from the OnlineStore.
1704+
indexes: A list of indexes which should be the same length as `feature_data`. Each list
1705+
of indexes corresponds to a set of result rows in `online_features_response`.
1706+
online_features_response: The object to populate.
1707+
full_feature_names: A boolean that provides the option to add the feature view prefixes to the feature names,
1708+
changing them from the format "feature" to "feature_view__feature" (e.g., "daily_transactions" changes to
1709+
"customer_fv__daily_transactions").
1710+
requested_features: The names of the features in `feature_data`. This should be ordered in the same way as the
1711+
data in `feature_data`.
1712+
table: The FeatureView that `feature_data` was retrieved from.
17131713
"""
17141714
# Add the feature names to the response.
17151715
requested_feature_refs = [
@@ -1973,7 +1973,7 @@ def _group_feature_refs(
19731973
List[Tuple[RequestFeatureView, List[str]]],
19741974
Set[str],
19751975
]:
1976-
""" Get list of feature views and corresponding feature names based on feature references"""
1976+
"""Get list of feature views and corresponding feature names based on feature references"""
19771977

19781978
# view name to view proto
19791979
view_index = {view.projection.name_to_use(): view for view in all_feature_views}
@@ -2046,7 +2046,7 @@ def _print_materialization_log(
20462046

20472047

20482048
def _validate_feature_views(feature_views: List[BaseFeatureView]):
2049-
""" Verify feature views have case-insensitively unique names"""
2049+
"""Verify feature views have case-insensitively unique names"""
20502050
fv_names = set()
20512051
for fv in feature_views:
20522052
case_insensitive_fv_name = fv.name.lower()
@@ -2061,7 +2061,7 @@ def _validate_feature_views(feature_views: List[BaseFeatureView]):
20612061

20622062

20632063
def _validate_data_sources(data_sources: List[DataSource]):
2064-
""" Verify data sources have case-insensitively unique names"""
2064+
"""Verify data sources have case-insensitively unique names"""
20652065
ds_names = set()
20662066
for fv in data_sources:
20672067
case_insensitive_ds_name = fv.name.lower()

sdk/python/feast/infra/offline_stores/bigquery.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@
5656

5757

5858
class BigQueryOfflineStoreConfig(FeastConfigBaseModel):
59-
""" Offline store config for GCP BigQuery """
59+
"""Offline store config for GCP BigQuery"""
6060

6161
type: Literal["bigquery"] = "bigquery"
6262
""" Offline store type selector"""

sdk/python/feast/infra/offline_stores/bigquery_source.py

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -27,20 +27,20 @@ def __init__(
2727
):
2828
"""Create a BigQuerySource from an existing table or query.
2929
30-
Args:
31-
table (optional): The BigQuery table where features can be found.
32-
table_ref (optional): (Deprecated) The BigQuery table where features can be found.
33-
event_timestamp_column: Event timestamp column used for point in time joins of feature values.
34-
created_timestamp_column (optional): Timestamp column when row was created, used for deduplicating rows.
35-
field_mapping: A dictionary mapping of column names in this data source to feature names in a feature table
36-
or view. Only used for feature columns, not entities or timestamp columns.
37-
date_partition_column (optional): Timestamp column used for partitioning.
38-
query (optional): SQL query to execute to generate data for this data source.
39-
name (optional): Name for the source. Defaults to the table_ref if not specified.
40-
Example:
41-
>>> from feast import BigQuerySource
42-
>>> my_bigquery_source = BigQuerySource(table="gcp_project:bq_dataset.bq_table")
43-
"""
30+
Args:
31+
table (optional): The BigQuery table where features can be found.
32+
table_ref (optional): (Deprecated) The BigQuery table where features can be found.
33+
event_timestamp_column: Event timestamp column used for point in time joins of feature values.
34+
created_timestamp_column (optional): Timestamp column when row was created, used for deduplicating rows.
35+
field_mapping: A dictionary mapping of column names in this data source to feature names in a feature table
36+
or view. Only used for feature columns, not entities or timestamp columns.
37+
date_partition_column (optional): Timestamp column used for partitioning.
38+
query (optional): SQL query to execute to generate data for this data source.
39+
name (optional): Name for the source. Defaults to the table_ref if not specified.
40+
Example:
41+
>>> from feast import BigQuerySource
42+
>>> my_bigquery_source = BigQuerySource(table="gcp_project:bq_dataset.bq_table")
43+
"""
4444
if table is None and table_ref is None and query is None:
4545
raise ValueError('No "table" or "query" argument provided.')
4646
if not table and table_ref:

sdk/python/feast/infra/offline_stores/file.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@
3131

3232

3333
class FileOfflineStoreConfig(FeastConfigBaseModel):
34-
""" Offline store config for local (file-based) store """
34+
"""Offline store config for local (file-based) store"""
3535

3636
type: Literal["file"] = "file"
3737
""" Offline store type selector"""

sdk/python/feast/infra/offline_stores/redshift.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@
3939

4040

4141
class RedshiftOfflineStoreConfig(FeastConfigBaseModel):
42-
""" Offline store config for AWS Redshift """
42+
"""Offline store config for AWS Redshift"""
4343

4444
type: Literal["redshift"] = "redshift"
4545
""" Offline store type selector"""
@@ -341,7 +341,7 @@ def _to_arrow_internal(self) -> pa.Table:
341341

342342
@log_exceptions_and_usage
343343
def to_s3(self) -> str:
344-
""" Export dataset to S3 in Parquet format and return path """
344+
"""Export dataset to S3 in Parquet format and return path"""
345345
if self.on_demand_feature_views:
346346
transformed_df = self.to_df()
347347
aws_utils.upload_df_to_s3(self._s3_resource, self._s3_path, transformed_df)
@@ -361,7 +361,7 @@ def to_s3(self) -> str:
361361

362362
@log_exceptions_and_usage
363363
def to_redshift(self, table_name: str) -> None:
364-
""" Save dataset as a new Redshift table """
364+
"""Save dataset as a new Redshift table"""
365365
if self.on_demand_feature_views:
366366
transformed_df = self.to_df()
367367
aws_utils.upload_df_to_redshift(

sdk/python/feast/infra/offline_stores/snowflake.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@
5454

5555

5656
class SnowflakeOfflineStoreConfig(FeastConfigBaseModel):
57-
""" Offline store config for Snowflake """
57+
"""Offline store config for Snowflake"""
5858

5959
type: Literal["snowflake.offline"] = "snowflake.offline"
6060
""" Offline store type selector"""
@@ -336,7 +336,7 @@ def _to_arrow_internal(self) -> pa.Table:
336336
)
337337

338338
def to_snowflake(self, table_name: str) -> None:
339-
""" Save dataset as a new Snowflake table """
339+
"""Save dataset as a new Snowflake table"""
340340
if self.on_demand_feature_views is not None:
341341
transformed_df = self.to_df()
342342

sdk/python/feast/infra/online_stores/datastore.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@
5555

5656

5757
class DatastoreOnlineStoreConfig(FeastConfigBaseModel):
58-
""" Online store config for GCP Datastore """
58+
"""Online store config for GCP Datastore"""
5959

6060
type: Literal["datastore"] = "datastore"
6161
""" Online store type selector"""

sdk/python/feast/infra/online_stores/sqlite.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@
3737

3838

3939
class SqliteOnlineStoreConfig(FeastConfigBaseModel):
40-
""" Online store config for local (SQLite-based) store """
40+
"""Online store config for local (SQLite-based) store"""
4141

4242
type: Literal[
4343
"sqlite", "feast.infra.online_stores.sqlite.SqliteOnlineStore"

sdk/python/feast/infra/provider.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -183,7 +183,7 @@ def retrieve_saved_dataset(
183183
Returns:
184184
RetrievalJob object, which is lazy wrapper for actual query performed under the hood.
185185
186-
"""
186+
"""
187187
...
188188

189189
def get_feature_server_endpoint(self) -> Optional[str]:

sdk/python/feast/infra/utils/aws_utils.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -151,7 +151,7 @@ def execute_redshift_statement(
151151

152152

153153
def get_redshift_statement_result(redshift_data_client, statement_id: str) -> dict:
154-
""" Get the Redshift statement result """
154+
"""Get the Redshift statement result"""
155155
return redshift_data_client.get_statement_result(Id=statement_id)
156156

157157

@@ -306,7 +306,7 @@ def temporarily_upload_df_to_redshift(
306306

307307

308308
def download_s3_directory(s3_resource, bucket: str, key: str, local_dir: str):
309-
""" Download the S3 directory to a local disk """
309+
"""Download the S3 directory to a local disk"""
310310
bucket_obj = s3_resource.Bucket(bucket)
311311
if key != "" and not key.endswith("/"):
312312
key = key + "/"
@@ -318,7 +318,7 @@ def download_s3_directory(s3_resource, bucket: str, key: str, local_dir: str):
318318

319319

320320
def delete_s3_directory(s3_resource, bucket: str, key: str):
321-
""" Delete S3 directory recursively """
321+
"""Delete S3 directory recursively"""
322322
bucket_obj = s3_resource.Bucket(bucket)
323323
if key != "" and not key.endswith("/"):
324324
key = key + "/"
@@ -365,7 +365,7 @@ def unload_redshift_query_to_pa(
365365
iam_role: str,
366366
query: str,
367367
) -> pa.Table:
368-
""" Unload Redshift Query results to S3 and get the results in PyArrow Table format """
368+
"""Unload Redshift Query results to S3 and get the results in PyArrow Table format"""
369369
bucket, key = get_bucket_and_key(s3_path)
370370

371371
execute_redshift_query_and_unload_to_s3(
@@ -388,7 +388,7 @@ def unload_redshift_query_to_df(
388388
iam_role: str,
389389
query: str,
390390
) -> pd.DataFrame:
391-
""" Unload Redshift Query results to S3 and get the results in Pandas DataFrame format """
391+
"""Unload Redshift Query results to S3 and get the results in Pandas DataFrame format"""
392392
table = unload_redshift_query_to_pa(
393393
redshift_data_client,
394394
cluster_id,

0 commit comments

Comments
 (0)