diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index 841f5da87b3..aef486d601a 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -151,6 +151,27 @@ jobs: name: wheels path: dist/* + # We add this step so the docker images can be built as part of the pre-release verification steps. + build-docker-images: + runs-on: ubuntu-latest + needs: get-version + strategy: + matrix: + component: [feature-server, feature-server-python-aws, feature-server-java, feature-transformation-server] + env: + REGISTRY: feastdev + steps: + - uses: actions/checkout@v2 + - name: Set up QEMU + uses: docker/setup-qemu-action@v1 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v1 + - name: Build image + run: | + make build-${{ matrix.component }}-docker REGISTRY=${REGISTRY} VERSION=${VERSION_WITHOUT_PREFIX} + env: + VERSION_WITHOUT_PREFIX: ${{ needs.get-version.outputs.version_without_prefix }} + verify-python-wheels: runs-on: ${{ matrix.os }} needs: [build-python-wheel, build-source-distribution, get-version] diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index ec56d60e4c5..0c9ac1e752c 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -92,6 +92,9 @@ jobs: uses: actions/setup-node@v2 with: node-version: '16' + - name: Set up Homebrew + id: set-up-homebrew + uses: Homebrew/actions/setup-homebrew@master - name: Setup Helm-docs run: | brew install norwoodj/tap/helm-docs diff --git a/CHANGELOG.md b/CHANGELOG.md index b657e9ddd1a..41512e4a49a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,21 @@ # Changelog +## [0.25.1](https://github.com/feast-dev/feast/compare/v0.25.0...v0.25.1) (2022-09-30) + + +### Bug Fixes + +* Add `X-Trino-Extra-Credential` header and remove user override ([#3246](https://github.com/feast-dev/feast/issues/3246)) ([f38506c](https://github.com/feast-dev/feast/commit/f38506cf14a770496f622bf8103d52ff0b18ad12)) +* Add postgres to the feature server Dockerfile to fix helm chart flow ([#3261](https://github.com/feast-dev/feast/issues/3261)) ([a2cb995](https://github.com/feast-dev/feast/commit/a2cb995c8c854f721a06bc6404e69883334c9658)) +* Add stream feature view in the Web UI ([#3257](https://github.com/feast-dev/feast/issues/3257)) ([47d4c93](https://github.com/feast-dev/feast/commit/47d4c93f079e23ebcbb4143c3084213e5c87e57e)) +* Build dockerfile correctly ([#3239](https://github.com/feast-dev/feast/issues/3239)) ([a2dc0d0](https://github.com/feast-dev/feast/commit/a2dc0d0410eb297afddfb1dd4f1f899ab70fa14f)) +* Configuration to stop coercion of tz for entity_df ([#3255](https://github.com/feast-dev/feast/issues/3255)) ([fdc8d67](https://github.com/feast-dev/feast/commit/fdc8d67a0f16dd05b49cbd0d7ef24b197785e1b8)) +* Enable users to upgrade a batch source into a push source ([#3213](https://github.com/feast-dev/feast/issues/3213)) ([8f2fb58](https://github.com/feast-dev/feast/commit/8f2fb587c59a661b121b168f191dbe2c95aaf1ee)) +* Return 422 on bad push source name ([#3214](https://github.com/feast-dev/feast/issues/3214)) ([8abbcd9](https://github.com/feast-dev/feast/commit/8abbcd9960bbd0d5db9f14cc3c2456bae497b58b)) +* Stream feature view not shown in the UI ([#3251](https://github.com/feast-dev/feast/issues/3251)) ([55e28e2](https://github.com/feast-dev/feast/commit/55e28e2f831dfe1129a02fe595d58e4b9b94f0da)) +* Update snowflake materialization messages ([#3230](https://github.com/feast-dev/feast/issues/3230)) ([a63d440](https://github.com/feast-dev/feast/commit/a63d440e4207c1e360630423bcda2c329673fddd)) +* Use configured user in env var instead of "user" for Trino ([#3254](https://github.com/feast-dev/feast/issues/3254)) ([e7ed3d5](https://github.com/feast-dev/feast/commit/e7ed3d5b9a4d27d6352bbaf1cc24d975886752b3)) + # [0.25.0](https://github.com/feast-dev/feast/compare/v0.24.0...v0.25.0) (2022-09-20) diff --git a/Makefile b/Makefile index 0bf0c82669f..fabcb388e84 100644 --- a/Makefile +++ b/Makefile @@ -63,11 +63,9 @@ benchmark-python-local: FEAST_USAGE=False IS_TEST=True FEAST_IS_LOCAL_TEST=True python -m pytest --integration --benchmark --benchmark-autosave --benchmark-save-data sdk/python/tests test-python: - @(docker info > /dev/null 2>&1 && \ - FEAST_USAGE=False \ - IS_TEST=True \ - python -m pytest -n 8 sdk/python/tests \ - ) || echo "This script uses Docker, and it isn't running - please start the Docker Daemon and try again!"; + FEAST_USAGE=False \ + IS_TEST=True \ + python -m pytest -n 8 sdk/python/tests \ test-python-integration: FEAST_USAGE=False IS_TEST=True python -m pytest -n 8 --integration sdk/python/tests diff --git a/OWNERS b/OWNERS index bc7342c3d61..c34fd6baafe 100644 --- a/OWNERS +++ b/OWNERS @@ -3,16 +3,22 @@ # More info at https://www.kubernetes.dev/docs/guide/owners/ approvers: - woop - - tsotnet - achals - adchia - felixwang9817 - mavysavydav - MattDelac - kevjumba + - chhabrakadabra + - sfc-gh-madkins + - zhilingc + - whoahbot + - niklasvm + - toping4445 + - DvirDukhan + - hemidactylus reviewers: - woop - - tsotnet - achals - tedhtchang - adchia @@ -20,3 +26,12 @@ reviewers: - mavysavydav - MattDelac - kevjumba + - chhabrakadabra + - sfc-gh-madkins + - zhilingc + - whoahbot + - niklasvm + - toping4445 + - DvirDukhan + - hemidactylus + \ No newline at end of file diff --git a/README.md b/README.md index c8adfa5f22c..182637018f1 100644 --- a/README.md +++ b/README.md @@ -193,7 +193,8 @@ The list below contains the functionality that contributors are planning to deve * **Feature Serving** * [x] Python Client * [x] [Python feature server](https://docs.feast.dev/reference/feature-servers/python-feature-server) - * [x] [Go feature server](https://docs.feast.dev/reference/feature-servers/go-feature-server) + * [x] [Java feature server (alpha)](https://github.com/feast-dev/feast/blob/master/infra/charts/feast/README.md) + * [x] [Go feature server (alpha)](https://docs.feast.dev/reference/feature-servers/go-feature-server) * **Data Quality Management (See [RFC](https://docs.google.com/document/d/110F72d4NTv80p35wDSONxhhPBqWRwbZXG4f9mNEMd98/edit))** * [x] Data profiling and validation (Great Expectations) * **Feature Discovery and Governance** diff --git a/docs/getting-started/concepts/feature-retrieval.md b/docs/getting-started/concepts/feature-retrieval.md index fd216fc71f5..867e17848b0 100644 --- a/docs/getting-started/concepts/feature-retrieval.md +++ b/docs/getting-started/concepts/feature-retrieval.md @@ -180,7 +180,7 @@ Feast accepts either: - [feature services](feature-retrieval.md#feature-services), which group features needed for a model version - [feature references](feature-retrieval.md#feature-references) -### Example: querying a feature service (recommended) +#### Example: querying a feature service (recommended) ```python training_df = store.get_historical_features( entity_df=entity_df, @@ -188,7 +188,7 @@ training_df = store.get_historical_features( ).to_df() ``` -### Example: querying a list of feature references +#### Example: querying a list of feature references ```python training_df = store.get_historical_features( entity_df=entity_df, @@ -204,7 +204,7 @@ Feast accepts either a **Pandas dataframe** as the entity dataframe (including e Both approaches must specify the full **entity key** needed as well as the **timestamps**. Feast then joins features onto this dataframe. -### Example: entity dataframe for generating training data +#### Example: entity dataframe for generating training data ```python entity_df = pd.DataFrame.from_dict( { @@ -228,7 +228,7 @@ training_df = store.get_historical_features( ).to_df() ``` -### Example: entity SQL query for generating training data +#### Example: entity SQL query for generating training data You can also pass a SQL string to generate the above dataframe. This is useful for getting all entities in a timeframe from some data source. ```python diff --git a/docs/project/release-process.md b/docs/project/release-process.md index 2ddc697730e..0aa7d3fb5ba 100644 --- a/docs/project/release-process.md +++ b/docs/project/release-process.md @@ -19,8 +19,9 @@ After this step, you will have all the changes you need in the branch. ### 2. Pre-release verification A lot of things can go wrong. One of the most common is getting the wheels to build correctly (and not accidentally building dev wheels from improper tagging or local code changes during the release process). +Another possible failure is that the Docker images might not build correctly. -We verify the wheels building in **your fork** of Feast, not the main feast-dev/feast repo. +We verify the building the wheels and Docker images in **your fork** of Feast, not the main feast-dev/feast repo. #### For minor releases (e.g. v0.22.0) 1. Merge upstream master changes into your **fork**. Make sure you are running the workflow off of your fork! @@ -30,7 +31,7 @@ We verify the wheels building in **your fork** of Feast, not the main feast-dev/ > This is important. If you don't have a tag, then the wheels you build will be **dev wheels**, which we can't > push. The release process will automatically produce a tag for you via Semantic Release. 3. Access the `Actions` tab on your GitHub UI on your fork and click the `build_wheels` action. This workflow will - build the python sdk wheels for Python 3.8-3.10 on MacOS 10.15 and Linux and verify that these wheels are correct. + build the python sdk wheels for Python 3.8-3.10 on MacOS 10.15 and Linux and verify that these wheels are correct. It will also build the Docker images. The publish workflow uses this action to publish the python wheels for a new release to PyPI. 4. Look for the header `This workflow has a workflow_dispatch event trigger` and click `Run Workflow` on the right. 5. Run the workflow off of the tag you just created(`v0.22.0` in this case, **not** the master branch) and verify that diff --git a/docs/reference/offline-stores/bigquery.md b/docs/reference/offline-stores/bigquery.md index 0e286d78c49..b7607abf595 100644 --- a/docs/reference/offline-stores/bigquery.md +++ b/docs/reference/offline-stores/bigquery.md @@ -7,6 +7,9 @@ The BigQuery offline store provides support for reading [BigQuerySources](../dat * All joins happen within BigQuery. * Entity dataframes can be provided as a SQL query or can be provided as a Pandas dataframe. A Pandas dataframes will be uploaded to BigQuery as a table (marked for expiration) in order to complete join operations. +## Getting started +In order to use this offline store, you'll need to run `pip install 'feast[gcp]'`. You can get started by then running `feast init -t gcp`. + ## Example {% code title="feature_store.yaml" %} diff --git a/docs/reference/offline-stores/mssql.md b/docs/reference/offline-stores/mssql.md index bec0c8deb82..e352b3dd2aa 100644 --- a/docs/reference/offline-stores/mssql.md +++ b/docs/reference/offline-stores/mssql.md @@ -6,6 +6,9 @@ The MsSQL offline store provides support for reading [MsSQL Sources](../data-sou * Entity dataframes can be provided as a SQL query or can be provided as a Pandas dataframe. +## Getting started +In order to use this offline store, you'll need to run `pip install 'feast[azure]'`. You can get started by then following this [tutorial](https://github.com/feast-dev/feast/blob/master/docs/tutorials/azure/README.md). + ## Disclaimer The MsSQL offline store does not achieve full test coverage. @@ -34,26 +37,26 @@ offline_store: The set of functionality supported by offline stores is described in detail [here](overview.md#functionality). Below is a matrix indicating which functionality is supported by the Spark offline store. -| | MsSql | -| :-------------------------------- | :-- | -| `get_historical_features` (point-in-time correct join) | yes | -| `pull_latest_from_table_or_query` (retrieve latest feature values) | yes | -| `pull_all_from_table_or_query` (retrieve a saved dataset) | yes | -| `offline_write_batch` (persist dataframes to offline store) | no | -| `write_logged_features` (persist logged features to offline store) | no | +| | MsSql | +| :----------------------------------------------------------------- | :---- | +| `get_historical_features` (point-in-time correct join) | yes | +| `pull_latest_from_table_or_query` (retrieve latest feature values) | yes | +| `pull_all_from_table_or_query` (retrieve a saved dataset) | yes | +| `offline_write_batch` (persist dataframes to offline store) | no | +| `write_logged_features` (persist logged features to offline store) | no | Below is a matrix indicating which functionality is supported by `MsSqlServerRetrievalJob`. -| | MsSql | -| --------------------------------- | --- | -| export to dataframe | yes | -| export to arrow table | yes | -| export to arrow batches | no | -| export to SQL | no | -| export to data lake (S3, GCS, etc.) | no | -| export to data warehouse | no | -| local execution of Python-based on-demand transforms | no | -| remote execution of Python-based on-demand transforms | no | -| persist results in the offline store | yes | +| | MsSql | +| ----------------------------------------------------- | ----- | +| export to dataframe | yes | +| export to arrow table | yes | +| export to arrow batches | no | +| export to SQL | no | +| export to data lake (S3, GCS, etc.) | no | +| export to data warehouse | no | +| local execution of Python-based on-demand transforms | no | +| remote execution of Python-based on-demand transforms | no | +| persist results in the offline store | yes | To compare this set of functionality against other offline stores, please see the full [functionality matrix](overview.md#functionality-matrix). diff --git a/docs/reference/offline-stores/postgres.md b/docs/reference/offline-stores/postgres.md index 506666fc378..094ab4885f4 100644 --- a/docs/reference/offline-stores/postgres.md +++ b/docs/reference/offline-stores/postgres.md @@ -10,6 +10,9 @@ The PostgreSQL offline store provides support for reading [PostgreSQLSources](.. The PostgreSQL offline store does not achieve full test coverage. Please do not assume complete stability. +## Getting started +In order to use this offline store, you'll need to run `pip install 'feast[postgres]'`. You can get started by then running `feast init -t postgres`. + ## Example {% code title="feature_store.yaml" %} @@ -42,29 +45,29 @@ The full set of configuration options is available in [PostgreSQLOfflineStoreCon The set of functionality supported by offline stores is described in detail [here](overview.md#functionality). Below is a matrix indicating which functionality is supported by the PostgreSQL offline store. -| | Postgres | -| :-------------------------------- | :-- | -| `get_historical_features` (point-in-time correct join) | yes | -| `pull_latest_from_table_or_query` (retrieve latest feature values) | yes | -| `pull_all_from_table_or_query` (retrieve a saved dataset) | yes | -| `offline_write_batch` (persist dataframes to offline store) | no | -| `write_logged_features` (persist logged features to offline store) | no | +| | Postgres | +| :----------------------------------------------------------------- | :------- | +| `get_historical_features` (point-in-time correct join) | yes | +| `pull_latest_from_table_or_query` (retrieve latest feature values) | yes | +| `pull_all_from_table_or_query` (retrieve a saved dataset) | yes | +| `offline_write_batch` (persist dataframes to offline store) | no | +| `write_logged_features` (persist logged features to offline store) | no | Below is a matrix indicating which functionality is supported by `PostgreSQLRetrievalJob`. -| | Postgres | -| --------------------------------- | --- | -| export to dataframe | yes | -| export to arrow table | yes | -| export to arrow batches | no | -| export to SQL | yes | -| export to data lake (S3, GCS, etc.) | yes | -| export to data warehouse | yes | -| export as Spark dataframe | no | -| local execution of Python-based on-demand transforms | yes | -| remote execution of Python-based on-demand transforms | no | -| persist results in the offline store | yes | -| preview the query plan before execution | yes | -| read partitioned data | yes | +| | Postgres | +| ----------------------------------------------------- | -------- | +| export to dataframe | yes | +| export to arrow table | yes | +| export to arrow batches | no | +| export to SQL | yes | +| export to data lake (S3, GCS, etc.) | yes | +| export to data warehouse | yes | +| export as Spark dataframe | no | +| local execution of Python-based on-demand transforms | yes | +| remote execution of Python-based on-demand transforms | no | +| persist results in the offline store | yes | +| preview the query plan before execution | yes | +| read partitioned data | yes | To compare this set of functionality against other offline stores, please see the full [functionality matrix](overview.md#functionality-matrix). diff --git a/docs/reference/offline-stores/redshift.md b/docs/reference/offline-stores/redshift.md index 2cdf49bdb9f..98092c44bec 100644 --- a/docs/reference/offline-stores/redshift.md +++ b/docs/reference/offline-stores/redshift.md @@ -7,6 +7,9 @@ The Redshift offline store provides support for reading [RedshiftSources](../dat * All joins happen within Redshift. * Entity dataframes can be provided as a SQL query or can be provided as a Pandas dataframe. A Pandas dataframes will be uploaded to Redshift temporarily in order to complete join operations. +## Getting started +In order to use this offline store, you'll need to run `pip install 'feast[aws]'`. You can get started by then running `feast init -t aws`. + ## Example {% code title="feature_store.yaml" %} @@ -32,30 +35,30 @@ The full set of configuration options is available in [RedshiftOfflineStoreConfi The set of functionality supported by offline stores is described in detail [here](overview.md#functionality). Below is a matrix indicating which functionality is supported by the Redshift offline store. -| | Redshift | -| :-------------------------------- | :-- | -| `get_historical_features` (point-in-time correct join) | yes | -| `pull_latest_from_table_or_query` (retrieve latest feature values) | yes | -| `pull_all_from_table_or_query` (retrieve a saved dataset) | yes | -| `offline_write_batch` (persist dataframes to offline store) | yes | -| `write_logged_features` (persist logged features to offline store) | yes | +| | Redshift | +| :----------------------------------------------------------------- | :------- | +| `get_historical_features` (point-in-time correct join) | yes | +| `pull_latest_from_table_or_query` (retrieve latest feature values) | yes | +| `pull_all_from_table_or_query` (retrieve a saved dataset) | yes | +| `offline_write_batch` (persist dataframes to offline store) | yes | +| `write_logged_features` (persist logged features to offline store) | yes | Below is a matrix indicating which functionality is supported by `RedshiftRetrievalJob`. -| | Redshift | -| --------------------------------- | --- | -| export to dataframe | yes | -| export to arrow table | yes | -| export to arrow batches | yes | -| export to SQL | yes | -| export to data lake (S3, GCS, etc.) | no | -| export to data warehouse | yes | -| export as Spark dataframe | no | -| local execution of Python-based on-demand transforms | yes | -| remote execution of Python-based on-demand transforms | no | -| persist results in the offline store | yes | -| preview the query plan before execution | yes | -| read partitioned data | yes | +| | Redshift | +| ----------------------------------------------------- | -------- | +| export to dataframe | yes | +| export to arrow table | yes | +| export to arrow batches | yes | +| export to SQL | yes | +| export to data lake (S3, GCS, etc.) | no | +| export to data warehouse | yes | +| export as Spark dataframe | no | +| local execution of Python-based on-demand transforms | yes | +| remote execution of Python-based on-demand transforms | no | +| persist results in the offline store | yes | +| preview the query plan before execution | yes | +| read partitioned data | yes | To compare this set of functionality against other offline stores, please see the full [functionality matrix](overview.md#functionality-matrix). diff --git a/docs/reference/offline-stores/snowflake.md b/docs/reference/offline-stores/snowflake.md index e40ad7cd7a2..4ac7f164363 100644 --- a/docs/reference/offline-stores/snowflake.md +++ b/docs/reference/offline-stores/snowflake.md @@ -6,6 +6,13 @@ The [Snowflake](https://trial.snowflake.com) offline store provides support for * All joins happen within Snowflake. * Entity dataframes can be provided as a SQL query or can be provided as a Pandas dataframe. A Pandas dataframes will be uploaded to Snowflake as a temporary table in order to complete join operations. +## Getting started +In order to use this offline store, you'll need to run `pip install 'feast[snowflake]'`. + +If you're using a file based registry, then you'll also need to install the relevant cloud extra (`pip install 'feast[snowflake, CLOUD]'` where `CLOUD` is one of `aws`, `gcp`, `azure`) + +You can get started by then running `feast init -t snowflake`. + ## Example {% code title="feature_store.yaml" %} @@ -31,29 +38,29 @@ The full set of configuration options is available in [SnowflakeOfflineStoreConf The set of functionality supported by offline stores is described in detail [here](overview.md#functionality). Below is a matrix indicating which functionality is supported by the Snowflake offline store. -| | Snowflake | -| :-------------------------------- | :-- | -| `get_historical_features` (point-in-time correct join) | yes | -| `pull_latest_from_table_or_query` (retrieve latest feature values) | yes | -| `pull_all_from_table_or_query` (retrieve a saved dataset) | yes | -| `offline_write_batch` (persist dataframes to offline store) | yes | -| `write_logged_features` (persist logged features to offline store) | yes | +| | Snowflake | +| :----------------------------------------------------------------- | :-------- | +| `get_historical_features` (point-in-time correct join) | yes | +| `pull_latest_from_table_or_query` (retrieve latest feature values) | yes | +| `pull_all_from_table_or_query` (retrieve a saved dataset) | yes | +| `offline_write_batch` (persist dataframes to offline store) | yes | +| `write_logged_features` (persist logged features to offline store) | yes | Below is a matrix indicating which functionality is supported by `SnowflakeRetrievalJob`. -| | Snowflake | -| --------------------------------- | --- | -| export to dataframe | yes | -| export to arrow table | yes | -| export to arrow batches | no | -| export to SQL | yes | -| export to data lake (S3, GCS, etc.) | yes | -| export to data warehouse | yes | -| export as Spark dataframe | no | -| local execution of Python-based on-demand transforms | yes | -| remote execution of Python-based on-demand transforms | no | -| persist results in the offline store | yes | -| preview the query plan before execution | yes | -| read partitioned data | yes | +| | Snowflake | +| ----------------------------------------------------- | --------- | +| export to dataframe | yes | +| export to arrow table | yes | +| export to arrow batches | no | +| export to SQL | yes | +| export to data lake (S3, GCS, etc.) | yes | +| export to data warehouse | yes | +| export as Spark dataframe | no | +| local execution of Python-based on-demand transforms | yes | +| remote execution of Python-based on-demand transforms | no | +| persist results in the offline store | yes | +| preview the query plan before execution | yes | +| read partitioned data | yes | To compare this set of functionality against other offline stores, please see the full [functionality matrix](overview.md#functionality-matrix). diff --git a/docs/reference/offline-stores/spark.md b/docs/reference/offline-stores/spark.md index f1ef1300bd7..ae5ea78071e 100644 --- a/docs/reference/offline-stores/spark.md +++ b/docs/reference/offline-stores/spark.md @@ -11,6 +11,9 @@ The Spark offline store provides support for reading [SparkSources](../data-sour The Spark offline store does not achieve full test coverage. Please do not assume complete stability. +## Getting started +In order to use this offline store, you'll need to run `pip install 'feast[spark]'`. You can get started by then running `feast init -t spark`. + ## Example {% code title="feature_store.yaml" %} @@ -39,29 +42,29 @@ The full set of configuration options is available in [SparkOfflineStoreConfig]( The set of functionality supported by offline stores is described in detail [here](overview.md#functionality). Below is a matrix indicating which functionality is supported by the Spark offline store. -| | Spark | -| :-------------------------------- | :-- | -| `get_historical_features` (point-in-time correct join) | yes | -| `pull_latest_from_table_or_query` (retrieve latest feature values) | yes | -| `pull_all_from_table_or_query` (retrieve a saved dataset) | yes | -| `offline_write_batch` (persist dataframes to offline store) | no | -| `write_logged_features` (persist logged features to offline store) | no | +| | Spark | +| :----------------------------------------------------------------- | :---- | +| `get_historical_features` (point-in-time correct join) | yes | +| `pull_latest_from_table_or_query` (retrieve latest feature values) | yes | +| `pull_all_from_table_or_query` (retrieve a saved dataset) | yes | +| `offline_write_batch` (persist dataframes to offline store) | no | +| `write_logged_features` (persist logged features to offline store) | no | Below is a matrix indicating which functionality is supported by `SparkRetrievalJob`. -| | Spark | -| --------------------------------- | --- | -| export to dataframe | yes | -| export to arrow table | yes | -| export to arrow batches | no | -| export to SQL | no | -| export to data lake (S3, GCS, etc.) | no | -| export to data warehouse | no | -| export as Spark dataframe | yes | -| local execution of Python-based on-demand transforms | no | -| remote execution of Python-based on-demand transforms | no | -| persist results in the offline store | yes | -| preview the query plan before execution | yes | -| read partitioned data | yes | +| | Spark | +| ----------------------------------------------------- | ----- | +| export to dataframe | yes | +| export to arrow table | yes | +| export to arrow batches | no | +| export to SQL | no | +| export to data lake (S3, GCS, etc.) | no | +| export to data warehouse | no | +| export as Spark dataframe | yes | +| local execution of Python-based on-demand transforms | no | +| remote execution of Python-based on-demand transforms | no | +| persist results in the offline store | yes | +| preview the query plan before execution | yes | +| read partitioned data | yes | To compare this set of functionality against other offline stores, please see the full [functionality matrix](overview.md#functionality-matrix). diff --git a/docs/reference/offline-stores/trino.md b/docs/reference/offline-stores/trino.md index 8cc604248ff..446db620e32 100644 --- a/docs/reference/offline-stores/trino.md +++ b/docs/reference/offline-stores/trino.md @@ -10,6 +10,9 @@ The Trino offline store provides support for reading [TrinoSources](../data-sour The Trino offline store does not achieve full test coverage. Please do not assume complete stability. +## Getting started +In order to use this offline store, you'll need to run `pip install 'feast[trino]'`. You can then run `feast init`, then swap out `feature_store.yaml` with the below example to connect to Trino. + ## Example {% code title="feature_store.yaml" %} @@ -36,29 +39,29 @@ The full set of configuration options is available in [TrinoOfflineStoreConfig]( The set of functionality supported by offline stores is described in detail [here](overview.md#functionality). Below is a matrix indicating which functionality is supported by the Trino offline store. -| | Trino | -| :-------------------------------- | :-- | -| `get_historical_features` (point-in-time correct join) | yes | -| `pull_latest_from_table_or_query` (retrieve latest feature values) | yes | -| `pull_all_from_table_or_query` (retrieve a saved dataset) | yes | -| `offline_write_batch` (persist dataframes to offline store) | no | -| `write_logged_features` (persist logged features to offline store) | no | +| | Trino | +| :----------------------------------------------------------------- | :---- | +| `get_historical_features` (point-in-time correct join) | yes | +| `pull_latest_from_table_or_query` (retrieve latest feature values) | yes | +| `pull_all_from_table_or_query` (retrieve a saved dataset) | yes | +| `offline_write_batch` (persist dataframes to offline store) | no | +| `write_logged_features` (persist logged features to offline store) | no | Below is a matrix indicating which functionality is supported by `TrinoRetrievalJob`. -| | Trino | -| --------------------------------- | --- | -| export to dataframe | yes | -| export to arrow table | yes | -| export to arrow batches | no | -| export to SQL | yes | -| export to data lake (S3, GCS, etc.) | no | -| export to data warehouse | no | -| export as Spark dataframe | no | -| local execution of Python-based on-demand transforms | yes | -| remote execution of Python-based on-demand transforms | no | -| persist results in the offline store | no | -| preview the query plan before execution | yes | -| read partitioned data | yes | +| | Trino | +| ----------------------------------------------------- | ----- | +| export to dataframe | yes | +| export to arrow table | yes | +| export to arrow batches | no | +| export to SQL | yes | +| export to data lake (S3, GCS, etc.) | no | +| export to data warehouse | no | +| export as Spark dataframe | no | +| local execution of Python-based on-demand transforms | yes | +| remote execution of Python-based on-demand transforms | no | +| persist results in the offline store | no | +| preview the query plan before execution | yes | +| read partitioned data | yes | To compare this set of functionality against other offline stores, please see the full [functionality matrix](overview.md#functionality-matrix). diff --git a/docs/reference/online-stores/cassandra.md b/docs/reference/online-stores/cassandra.md index 48b7b73f439..e012ad250ae 100644 --- a/docs/reference/online-stores/cassandra.md +++ b/docs/reference/online-stores/cassandra.md @@ -8,7 +8,8 @@ The [Cassandra / Astra DB] online store provides support for materializing featu * Each feature view is mapped one-to-one to a specific Cassandra table * This implementation inherits all strengths of Cassandra such as high availability, fault-tolerance, and data distribution -An easy way to get started is the command `feast init REPO_NAME -t cassandra`. +## Getting started +In order to use this online store, you'll need to run `pip install 'feast[cassandra]'`. You can then get started with the command `feast init REPO_NAME -t cassandra`. ### Example (Cassandra) @@ -66,23 +67,23 @@ Storage specifications can be found at `docs/specs/online_store_format.md`. The set of functionality supported by online stores is described in detail [here](overview.md#functionality). Below is a matrix indicating which functionality is supported by the Cassandra online store. -| | Cassandra | -| :-------------------------------------------------------- | :-- | -| write feature values to the online store | yes | -| read feature values from the online store | yes | -| update infrastructure (e.g. tables) in the online store | yes | -| teardown infrastructure (e.g. tables) in the online store | yes | -| generate a plan of infrastructure changes | yes | -| support for on-demand transforms | yes | -| readable by Python SDK | yes | -| readable by Java | no | -| readable by Go | no | -| support for entityless feature views | yes | -| support for concurrent writing to the same key | no | -| support for ttl (time to live) at retrieval | no | -| support for deleting expired data | no | -| collocated by feature view | yes | -| collocated by feature service | no | -| collocated by entity key | no | +| | Cassandra | +| :-------------------------------------------------------- | :-------- | +| write feature values to the online store | yes | +| read feature values from the online store | yes | +| update infrastructure (e.g. tables) in the online store | yes | +| teardown infrastructure (e.g. tables) in the online store | yes | +| generate a plan of infrastructure changes | yes | +| support for on-demand transforms | yes | +| readable by Python SDK | yes | +| readable by Java | no | +| readable by Go | no | +| support for entityless feature views | yes | +| support for concurrent writing to the same key | no | +| support for ttl (time to live) at retrieval | no | +| support for deleting expired data | no | +| collocated by feature view | yes | +| collocated by feature service | no | +| collocated by entity key | no | To compare this set of functionality against other online stores, please see the full [functionality matrix](overview.md#functionality-matrix). diff --git a/docs/reference/online-stores/datastore.md b/docs/reference/online-stores/datastore.md index 0867853f15d..761d246ba7e 100644 --- a/docs/reference/online-stores/datastore.md +++ b/docs/reference/online-stores/datastore.md @@ -4,6 +4,9 @@ The [Datastore](https://cloud.google.com/datastore) online store provides support for materializing feature values into Cloud Datastore. The data model used to store feature values in Datastore is described in more detail [here](../../specs/online_store_format.md#google-datastore-online-store-format). +## Getting started +In order to use this online store, you'll need to run `pip install 'feast[gcp]'`. You can then get started with the command `feast init REPO_NAME -t gcp`. + ## Example {% code title="feature_store.yaml" %} @@ -25,23 +28,23 @@ The full set of configuration options is available in [DatastoreOnlineStoreConfi The set of functionality supported by online stores is described in detail [here](overview.md#functionality). Below is a matrix indicating which functionality is supported by the Datastore online store. -| | Datastore | -| :-------------------------------------------------------- | :-- | -| write feature values to the online store | yes | -| read feature values from the online store | yes | -| update infrastructure (e.g. tables) in the online store | yes | -| teardown infrastructure (e.g. tables) in the online store | yes | -| generate a plan of infrastructure changes | no | -| support for on-demand transforms | yes | -| readable by Python SDK | yes | -| readable by Java | no | -| readable by Go | no | -| support for entityless feature views | yes | -| support for concurrent writing to the same key | no | -| support for ttl (time to live) at retrieval | no | -| support for deleting expired data | no | -| collocated by feature view | yes | -| collocated by feature service | no | -| collocated by entity key | no | +| | Datastore | +| :-------------------------------------------------------- | :-------- | +| write feature values to the online store | yes | +| read feature values from the online store | yes | +| update infrastructure (e.g. tables) in the online store | yes | +| teardown infrastructure (e.g. tables) in the online store | yes | +| generate a plan of infrastructure changes | no | +| support for on-demand transforms | yes | +| readable by Python SDK | yes | +| readable by Java | no | +| readable by Go | no | +| support for entityless feature views | yes | +| support for concurrent writing to the same key | no | +| support for ttl (time to live) at retrieval | no | +| support for deleting expired data | no | +| collocated by feature view | yes | +| collocated by feature service | no | +| collocated by entity key | no | To compare this set of functionality against other online stores, please see the full [functionality matrix](overview.md#functionality-matrix). diff --git a/docs/reference/online-stores/dynamodb.md b/docs/reference/online-stores/dynamodb.md index 2f94c768199..344caccac1d 100644 --- a/docs/reference/online-stores/dynamodb.md +++ b/docs/reference/online-stores/dynamodb.md @@ -4,6 +4,9 @@ The [DynamoDB](https://aws.amazon.com/dynamodb/) online store provides support for materializing feature values into AWS DynamoDB. +## Getting started +In order to use this online store, you'll need to run `pip install 'feast[aws]'`. You can then get started with the command `feast init REPO_NAME -t aws`. + ## Example {% code title="feature_store.yaml" %} @@ -27,7 +30,7 @@ Feast requires the following permissions in order to execute commands for Dynamo | ----------------------- | ----------------------------------------------------------------------------------- | ------------------------------------------------- | | **Apply** |

dynamodb:CreateTable

dynamodb:DescribeTable

dynamodb:DeleteTable

| arn:aws:dynamodb:\:\:table/\* | | **Materialize** | dynamodb.BatchWriteItem | arn:aws:dynamodb:\:\:table/\* | -| **Get Online Features** | dynamodb.BatchGetItem | arn:aws:dynamodb:\:\:table/\* | +| **Get Online Features** | dynamodb.BatchGetItem | arn:aws:dynamodb:\:\:table/\* | The following inline policy can be used to grant Feast the necessary permissions: @@ -59,23 +62,23 @@ Lastly, this IAM role needs to be associated with the desired Redshift cluster. The set of functionality supported by online stores is described in detail [here](overview.md#functionality). Below is a matrix indicating which functionality is supported by the DynamoDB online store. -| | DynamoDB | -| :-------------------------------------------------------- | :-- | -| write feature values to the online store | yes | -| read feature values from the online store | yes | -| update infrastructure (e.g. tables) in the online store | yes | -| teardown infrastructure (e.g. tables) in the online store | yes | -| generate a plan of infrastructure changes | no | -| support for on-demand transforms | yes | -| readable by Python SDK | yes | -| readable by Java | no | -| readable by Go | no | -| support for entityless feature views | yes | -| support for concurrent writing to the same key | no | -| support for ttl (time to live) at retrieval | no | -| support for deleting expired data | no | -| collocated by feature view | yes | -| collocated by feature service | no | -| collocated by entity key | no | +| | DynamoDB | +| :-------------------------------------------------------- | :------- | +| write feature values to the online store | yes | +| read feature values from the online store | yes | +| update infrastructure (e.g. tables) in the online store | yes | +| teardown infrastructure (e.g. tables) in the online store | yes | +| generate a plan of infrastructure changes | no | +| support for on-demand transforms | yes | +| readable by Python SDK | yes | +| readable by Java | no | +| readable by Go | no | +| support for entityless feature views | yes | +| support for concurrent writing to the same key | no | +| support for ttl (time to live) at retrieval | no | +| support for deleting expired data | no | +| collocated by feature view | yes | +| collocated by feature service | no | +| collocated by entity key | no | To compare this set of functionality against other online stores, please see the full [functionality matrix](overview.md#functionality-matrix). diff --git a/docs/reference/online-stores/postgres.md b/docs/reference/online-stores/postgres.md index 083c0006359..3885867dd26 100644 --- a/docs/reference/online-stores/postgres.md +++ b/docs/reference/online-stores/postgres.md @@ -8,6 +8,9 @@ The PostgreSQL online store provides support for materializing feature values in * sslmode, sslkey_path, sslcert_path, and sslrootcert_path are optional +## Getting started +In order to use this online store, you'll need to run `pip install 'feast[postgres]'`. You can get started by then running `feast init -t postgres`. + ## Example {% code title="feature_store.yaml" %} @@ -37,23 +40,23 @@ The full set of configuration options is available in [PostgreSQLOnlineStoreConf The set of functionality supported by online stores is described in detail [here](overview.md#functionality). Below is a matrix indicating which functionality is supported by the Postgres online store. -| | Postgres | -| :-------------------------------------------------------- | :-- | -| write feature values to the online store | yes | -| read feature values from the online store | yes | -| update infrastructure (e.g. tables) in the online store | yes | -| teardown infrastructure (e.g. tables) in the online store | yes | -| generate a plan of infrastructure changes | no | -| support for on-demand transforms | yes | -| readable by Python SDK | yes | -| readable by Java | no | -| readable by Go | no | -| support for entityless feature views | yes | -| support for concurrent writing to the same key | no | -| support for ttl (time to live) at retrieval | no | -| support for deleting expired data | no | -| collocated by feature view | yes | -| collocated by feature service | no | -| collocated by entity key | no | +| | Postgres | +| :-------------------------------------------------------- | :------- | +| write feature values to the online store | yes | +| read feature values from the online store | yes | +| update infrastructure (e.g. tables) in the online store | yes | +| teardown infrastructure (e.g. tables) in the online store | yes | +| generate a plan of infrastructure changes | no | +| support for on-demand transforms | yes | +| readable by Python SDK | yes | +| readable by Java | no | +| readable by Go | no | +| support for entityless feature views | yes | +| support for concurrent writing to the same key | no | +| support for ttl (time to live) at retrieval | no | +| support for deleting expired data | no | +| collocated by feature view | yes | +| collocated by feature service | no | +| collocated by entity key | no | To compare this set of functionality against other online stores, please see the full [functionality matrix](overview.md#functionality-matrix). diff --git a/docs/reference/online-stores/redis.md b/docs/reference/online-stores/redis.md index 80e90348c55..2078ee16b95 100644 --- a/docs/reference/online-stores/redis.md +++ b/docs/reference/online-stores/redis.md @@ -7,6 +7,15 @@ The [Redis](https://redis.io) online store provides support for materializing fe * Both Redis and Redis Cluster are supported. * The data model used to store feature values in Redis is described in more detail [here](../../specs/online\_store\_format.md). +## Getting started +In order to use this online store, you'll need to install the redis extra (along with the dependency needed for the offline store of choice). E.g. +- `pip install 'feast[gcp, redis]'` +- `pip install 'feast[snowflake, redis]'` +- `pip install 'feast[aws, redis]'` +- `pip install 'feast[azure, redis]'` + +You can get started by using any of the other templates (e.g. `feast init -t gcp` or `feast init -t snowflake` or `feast init -t aws`), and then swapping in Redis as the online store as seen below in the examples. + ## Examples Connecting to a single Redis instance: @@ -43,23 +52,23 @@ The full set of configuration options is available in [RedisOnlineStoreConfig](h The set of functionality supported by online stores is described in detail [here](overview.md#functionality). Below is a matrix indicating which functionality is supported by the Redis online store. -| | Redis | -| :-------------------------------------------------------- | :-- | -| write feature values to the online store | yes | -| read feature values from the online store | yes | -| update infrastructure (e.g. tables) in the online store | yes | -| teardown infrastructure (e.g. tables) in the online store | yes | -| generate a plan of infrastructure changes | no | -| support for on-demand transforms | yes | -| readable by Python SDK | yes | -| readable by Java | yes | -| readable by Go | yes | -| support for entityless feature views | yes | -| support for concurrent writing to the same key | yes | -| support for ttl (time to live) at retrieval | yes | -| support for deleting expired data | yes | -| collocated by feature view | no | -| collocated by feature service | no | -| collocated by entity key | yes | +| | Redis | +| :-------------------------------------------------------- | :---- | +| write feature values to the online store | yes | +| read feature values from the online store | yes | +| update infrastructure (e.g. tables) in the online store | yes | +| teardown infrastructure (e.g. tables) in the online store | yes | +| generate a plan of infrastructure changes | no | +| support for on-demand transforms | yes | +| readable by Python SDK | yes | +| readable by Java | yes | +| readable by Go | yes | +| support for entityless feature views | yes | +| support for concurrent writing to the same key | yes | +| support for ttl (time to live) at retrieval | yes | +| support for deleting expired data | yes | +| collocated by feature view | no | +| collocated by feature service | no | +| collocated by entity key | yes | To compare this set of functionality against other online stores, please see the full [functionality matrix](overview.md#functionality-matrix). diff --git a/docs/reference/online-stores/snowflake.md b/docs/reference/online-stores/snowflake.md index d114c87144a..6b6d107285c 100644 --- a/docs/reference/online-stores/snowflake.md +++ b/docs/reference/online-stores/snowflake.md @@ -16,6 +16,9 @@ The data model for using a Snowflake Transient Table as an online store follows (This model may be subject to change when Snowflake Hybrid Tables are released) +## Getting started +In order to use this online store, you'll need to run `pip install 'feast[snowflake]'`. You can then get started with the command `feast init REPO_NAME -t snowflake`. + ## Example {% code title="feature_store.yaml" %} ```yaml @@ -53,23 +56,23 @@ The full set of configuration options is available in [SnowflakeOnlineStoreConfi The set of functionality supported by online stores is described in detail [here](overview.md#functionality). Below is a matrix indicating which functionality is supported by the Snowflake online store. -| | Snowflake | -| :-------------------------------------------------------- | :-- | -| write feature values to the online store | yes | -| read feature values from the online store | yes | -| update infrastructure (e.g. tables) in the online store | yes | -| teardown infrastructure (e.g. tables) in the online store | yes | -| generate a plan of infrastructure changes | no | -| support for on-demand transforms | yes | -| readable by Python SDK | yes | -| readable by Java | no | -| readable by Go | no | -| support for entityless feature views | yes | -| support for concurrent writing to the same key | no | -| support for ttl (time to live) at retrieval | no | -| support for deleting expired data | no | -| collocated by feature view | yes | -| collocated by feature service | no | -| collocated by entity key | no | +| | Snowflake | +| :-------------------------------------------------------- | :-------- | +| write feature values to the online store | yes | +| read feature values from the online store | yes | +| update infrastructure (e.g. tables) in the online store | yes | +| teardown infrastructure (e.g. tables) in the online store | yes | +| generate a plan of infrastructure changes | no | +| support for on-demand transforms | yes | +| readable by Python SDK | yes | +| readable by Java | no | +| readable by Go | no | +| support for entityless feature views | yes | +| support for concurrent writing to the same key | no | +| support for ttl (time to live) at retrieval | no | +| support for deleting expired data | no | +| collocated by feature view | yes | +| collocated by feature service | no | +| collocated by entity key | no | To compare this set of functionality against other online stores, please see the full [functionality matrix](overview.md#functionality-matrix). diff --git a/docs/reference/providers/amazon-web-services.md b/docs/reference/providers/amazon-web-services.md index 3135fedb740..68956a1be93 100644 --- a/docs/reference/providers/amazon-web-services.md +++ b/docs/reference/providers/amazon-web-services.md @@ -5,6 +5,11 @@ * Offline Store: Uses the **Redshift** offline store by default. Also supports File as the offline store. * Online Store: Uses the **DynamoDB** online store by default. Also supports Sqlite as an online store. +## Getting started +In order to use this offline store, you'll need to run (Snowflake) `pip install 'feast[aws, snowflake]'` or (Redshift) `pip install 'feast[aws]'`. + +You can get started by then running `feast init -t snowflake` or `feast init -t aws`. + ## Example {% code title="feature_store.yaml" %} diff --git a/docs/reference/providers/azure.md b/docs/reference/providers/azure.md index 123bf087635..0e7206f076e 100644 --- a/docs/reference/providers/azure.md +++ b/docs/reference/providers/azure.md @@ -10,6 +10,9 @@ The Azure provider does not achieve full test coverage. Please do not assume complete stability. +## Getting started +In order to use this offline store, you'll need to run `pip install 'feast[azure]'`. You can get started by then following this [tutorial](https://github.com/feast-dev/feast/blob/master/docs/tutorials/azure/README.md). + ## Example {% code title="feature_store.yaml" %} diff --git a/docs/reference/providers/google-cloud-platform.md b/docs/reference/providers/google-cloud-platform.md index 713313d16be..96af3b6b2ff 100644 --- a/docs/reference/providers/google-cloud-platform.md +++ b/docs/reference/providers/google-cloud-platform.md @@ -5,6 +5,9 @@ * Offline Store: Uses the **BigQuery** offline store by default. Also supports File as the offline store. * Online Store: Uses the **Datastore** online store by default. Also supports Sqlite as an online store. +## Getting started +In order to use this offline store, you'll need to run `pip install 'feast[gcp]'`. You can get started by then running `feast init -t gcp`. + ## Example {% code title="feature_store.yaml" %} diff --git a/infra/charts/feast-feature-server/Chart.yaml b/infra/charts/feast-feature-server/Chart.yaml index 7095866e4f6..6d8744dca23 100644 --- a/infra/charts/feast-feature-server/Chart.yaml +++ b/infra/charts/feast-feature-server/Chart.yaml @@ -2,7 +2,7 @@ apiVersion: v2 name: feast-feature-server description: Feast Feature Server in Go or Python type: application -version: 0.25.0 +version: 0.25.1 keywords: - machine learning - big data diff --git a/infra/charts/feast-feature-server/README.md b/infra/charts/feast-feature-server/README.md index 9cf5c31a631..41fd4c62371 100644 --- a/infra/charts/feast-feature-server/README.md +++ b/infra/charts/feast-feature-server/README.md @@ -1,6 +1,6 @@ # Feast Python / Go Feature Server Helm Charts -Current chart version is `0.25.0` +Current chart version is `0.25.1` ## Installation @@ -30,7 +30,7 @@ See [here](https://github.com/feast-dev/feast/tree/master/examples/python-helm-d | fullnameOverride | string | `""` | | | image.pullPolicy | string | `"IfNotPresent"` | | | image.repository | string | `"feastdev/feature-server"` | Docker image for Feature Server repository | -| image.tag | string | `"0.25.0"` | The Docker image tag (can be overwritten if custom feature server deps are needed for on demand transforms) | +| image.tag | string | `"0.25.1"` | The Docker image tag (can be overwritten if custom feature server deps are needed for on demand transforms) | | imagePullSecrets | list | `[]` | | | livenessProbe.initialDelaySeconds | int | `30` | | | livenessProbe.periodSeconds | int | `30` | | diff --git a/infra/charts/feast-feature-server/values.yaml b/infra/charts/feast-feature-server/values.yaml index 90954a23cb1..782d219630d 100644 --- a/infra/charts/feast-feature-server/values.yaml +++ b/infra/charts/feast-feature-server/values.yaml @@ -9,7 +9,7 @@ image: repository: feastdev/feature-server pullPolicy: IfNotPresent # image.tag -- The Docker image tag (can be overwritten if custom feature server deps are needed for on demand transforms) - tag: 0.25.0 + tag: 0.25.1 imagePullSecrets: [] nameOverride: "" diff --git a/infra/charts/feast-python-server/Chart.yaml b/infra/charts/feast-python-server/Chart.yaml index 30d90876182..7e1be9ceb14 100644 --- a/infra/charts/feast-python-server/Chart.yaml +++ b/infra/charts/feast-python-server/Chart.yaml @@ -2,7 +2,7 @@ apiVersion: v2 name: feast-python-server description: Feast Feature Server in Python type: application -version: 0.25.0 +version: 0.25.1 keywords: - machine learning - big data diff --git a/infra/charts/feast-python-server/README.md b/infra/charts/feast-python-server/README.md index c5d5393e29f..20829e6d789 100644 --- a/infra/charts/feast-python-server/README.md +++ b/infra/charts/feast-python-server/README.md @@ -2,7 +2,7 @@ > Note: this helm chart is deprecated in favor of [feast-feature-server](../feast-feature-server/README.md) -Current chart version is `0.25.0` +Current chart version is `0.25.1` ## Installation Docker repository and tag are required. Helm install example: diff --git a/infra/charts/feast/Chart.yaml b/infra/charts/feast/Chart.yaml index 02f689f5419..2bd45f334f2 100644 --- a/infra/charts/feast/Chart.yaml +++ b/infra/charts/feast/Chart.yaml @@ -1,7 +1,7 @@ apiVersion: v1 description: Feature store for machine learning name: feast -version: 0.25.0 +version: 0.25.1 keywords: - machine learning - big data diff --git a/infra/charts/feast/README.md b/infra/charts/feast/README.md index 2afdaf645cc..592d5f053fb 100644 --- a/infra/charts/feast/README.md +++ b/infra/charts/feast/README.md @@ -8,7 +8,7 @@ This repo contains Helm charts for Feast Java components that are being installe ## Chart: Feast -Feature store for machine learning Current chart version is `0.25.0` +Feature store for machine learning Current chart version is `0.25.1` ## Installation @@ -65,8 +65,8 @@ See [here](https://github.com/feast-dev/feast/tree/master/examples/java-demo) fo | Repository | Name | Version | |------------|------|---------| | https://charts.helm.sh/stable | redis | 10.5.6 | -| https://feast-helm-charts.storage.googleapis.com | feature-server(feature-server) | 0.25.0 | -| https://feast-helm-charts.storage.googleapis.com | transformation-service(transformation-service) | 0.25.0 | +| https://feast-helm-charts.storage.googleapis.com | feature-server(feature-server) | 0.25.1 | +| https://feast-helm-charts.storage.googleapis.com | transformation-service(transformation-service) | 0.25.1 | ## Values diff --git a/infra/charts/feast/charts/feature-server/Chart.yaml b/infra/charts/feast/charts/feature-server/Chart.yaml index bdaa9ea1fcc..fcb4444b819 100644 --- a/infra/charts/feast/charts/feature-server/Chart.yaml +++ b/infra/charts/feast/charts/feature-server/Chart.yaml @@ -1,8 +1,8 @@ apiVersion: v1 description: "Feast Feature Server: Online feature serving service for Feast" name: feature-server -version: 0.25.0 -appVersion: v0.25.0 +version: 0.25.1 +appVersion: v0.25.1 keywords: - machine learning - big data diff --git a/infra/charts/feast/charts/feature-server/README.md b/infra/charts/feast/charts/feature-server/README.md index aef8c0329a6..c4703c3a179 100644 --- a/infra/charts/feast/charts/feature-server/README.md +++ b/infra/charts/feast/charts/feature-server/README.md @@ -1,6 +1,6 @@ # feature-server -![Version: 0.25.0](https://img.shields.io/badge/Version-0.25.0-informational?style=flat-square) ![AppVersion: v0.25.0](https://img.shields.io/badge/AppVersion-v0.25.0-informational?style=flat-square) +![Version: 0.25.1](https://img.shields.io/badge/Version-0.25.1-informational?style=flat-square) ![AppVersion: v0.25.1](https://img.shields.io/badge/AppVersion-v0.25.1-informational?style=flat-square) Feast Feature Server: Online feature serving service for Feast @@ -17,7 +17,7 @@ Feast Feature Server: Online feature serving service for Feast | envOverrides | object | `{}` | Extra environment variables to set | | image.pullPolicy | string | `"IfNotPresent"` | Image pull policy | | image.repository | string | `"feastdev/feature-server-java"` | Docker image for Feature Server repository | -| image.tag | string | `"0.25.0"` | Image tag | +| image.tag | string | `"0.25.1"` | Image tag | | ingress.grpc.annotations | object | `{}` | Extra annotations for the ingress | | ingress.grpc.auth.enabled | bool | `false` | Flag to enable auth | | ingress.grpc.class | string | `"nginx"` | Which ingress controller to use | diff --git a/infra/charts/feast/charts/feature-server/values.yaml b/infra/charts/feast/charts/feature-server/values.yaml index d3b8a33a645..35380787bc6 100644 --- a/infra/charts/feast/charts/feature-server/values.yaml +++ b/infra/charts/feast/charts/feature-server/values.yaml @@ -5,7 +5,7 @@ image: # image.repository -- Docker image for Feature Server repository repository: feastdev/feature-server-java # image.tag -- Image tag - tag: 0.25.0 + tag: 0.25.1 # image.pullPolicy -- Image pull policy pullPolicy: IfNotPresent diff --git a/infra/charts/feast/charts/transformation-service/Chart.yaml b/infra/charts/feast/charts/transformation-service/Chart.yaml index 104b2f24278..878ed7e0380 100644 --- a/infra/charts/feast/charts/transformation-service/Chart.yaml +++ b/infra/charts/feast/charts/transformation-service/Chart.yaml @@ -1,8 +1,8 @@ apiVersion: v1 description: "Transformation service: to compute on-demand features" name: transformation-service -version: 0.25.0 -appVersion: v0.25.0 +version: 0.25.1 +appVersion: v0.25.1 keywords: - machine learning - big data diff --git a/infra/charts/feast/charts/transformation-service/README.md b/infra/charts/feast/charts/transformation-service/README.md index 37be5b0f106..fea87a55e0b 100644 --- a/infra/charts/feast/charts/transformation-service/README.md +++ b/infra/charts/feast/charts/transformation-service/README.md @@ -1,6 +1,6 @@ # transformation-service -![Version: 0.25.0](https://img.shields.io/badge/Version-0.25.0-informational?style=flat-square) ![AppVersion: v0.25.0](https://img.shields.io/badge/AppVersion-v0.25.0-informational?style=flat-square) +![Version: 0.25.1](https://img.shields.io/badge/Version-0.25.1-informational?style=flat-square) ![AppVersion: v0.25.1](https://img.shields.io/badge/AppVersion-v0.25.1-informational?style=flat-square) Transformation service: to compute on-demand features @@ -13,7 +13,7 @@ Transformation service: to compute on-demand features | envOverrides | object | `{}` | Extra environment variables to set | | image.pullPolicy | string | `"IfNotPresent"` | Image pull policy | | image.repository | string | `"feastdev/feature-transformation-server"` | Docker image for Transformation Server repository | -| image.tag | string | `"0.25.0"` | Image tag | +| image.tag | string | `"0.25.1"` | Image tag | | nodeSelector | object | `{}` | Node labels for pod assignment | | podLabels | object | `{}` | Labels to be added to Feast Serving pods | | replicaCount | int | `1` | Number of pods that will be created | diff --git a/infra/charts/feast/charts/transformation-service/values.yaml b/infra/charts/feast/charts/transformation-service/values.yaml index 5232ac68ca1..1bf259ec187 100644 --- a/infra/charts/feast/charts/transformation-service/values.yaml +++ b/infra/charts/feast/charts/transformation-service/values.yaml @@ -5,7 +5,7 @@ image: # image.repository -- Docker image for Transformation Server repository repository: feastdev/feature-transformation-server # image.tag -- Image tag - tag: 0.25.0 + tag: 0.25.1 # image.pullPolicy -- Image pull policy pullPolicy: IfNotPresent diff --git a/infra/charts/feast/requirements.yaml b/infra/charts/feast/requirements.yaml index a599ac23a44..20d89e3669a 100644 --- a/infra/charts/feast/requirements.yaml +++ b/infra/charts/feast/requirements.yaml @@ -1,12 +1,12 @@ dependencies: - name: feature-server alias: feature-server - version: 0.25.0 + version: 0.25.1 condition: feature-server.enabled repository: https://feast-helm-charts.storage.googleapis.com - name: transformation-service alias: transformation-service - version: 0.25.0 + version: 0.25.1 condition: transformation-service.enabled repository: https://feast-helm-charts.storage.googleapis.com - name: redis diff --git a/java/infra/docker/feature-server/Dockerfile b/java/infra/docker/feature-server/Dockerfile index bf4e172f763..8d246ed86aa 100644 --- a/java/infra/docker/feature-server/Dockerfile +++ b/java/infra/docker/feature-server/Dockerfile @@ -12,13 +12,6 @@ COPY java/serving/pom.xml serving/pom.xml COPY java/serving-client/pom.xml serving-client/pom.xml COPY java/coverage/pom.xml coverage/pom.xml -# Setting Maven repository .m2 directory relative to /build folder gives the -# user to optionally use cached repository when building the image by copying -# the existing .m2 directory to $FEAST_REPO_ROOT/.m2 -ENV MAVEN_OPTS="-Dmaven.repo.local=/build/.m2/repository -DdependencyLocationsEnabled=false -Dmaven.wagon.httpconnectionManager.ttlSeconds=25 -Dmaven.wagon.http.retryHandler.count=3" -COPY java/pom.xml .m2/* .m2/ -RUN mvn dependency:go-offline -DexcludeGroupIds:dev.feast 2>/dev/null || true - COPY java/ . COPY protos/feast datatypes/src/main/proto/feast diff --git a/java/pom.xml b/java/pom.xml index 874daa27984..aa6157a14ff 100644 --- a/java/pom.xml +++ b/java/pom.xml @@ -35,7 +35,7 @@ - 0.25.0 + 0.25.1 https://github.com/feast-dev/feast UTF-8 diff --git a/sdk/python/feast/data_source.py b/sdk/python/feast/data_source.py index 19a780b32ca..54a68ed0480 100644 --- a/sdk/python/feast/data_source.py +++ b/sdk/python/feast/data_source.py @@ -760,7 +760,7 @@ def __init__( def __eq__(self, other): if not isinstance(other, PushSource): - raise TypeError("Comparisons should only involve PushSource class objects.") + return False if not super().__eq__(other): return False diff --git a/sdk/python/feast/errors.py b/sdk/python/feast/errors.py index 834df0e5c48..15ba86781df 100644 --- a/sdk/python/feast/errors.py +++ b/sdk/python/feast/errors.py @@ -398,3 +398,8 @@ def __init__(self): super().__init__( "The entity dataframe specified does not have the timestamp field as a datetime." ) + + +class PushSourceNotFoundException(Exception): + def __init__(self, push_source_name: str): + super().__init__(f"Unable to find push source '{push_source_name}'.") diff --git a/sdk/python/feast/feature_server.py b/sdk/python/feast/feature_server.py index c2596d411c6..7b0cfc4bed8 100644 --- a/sdk/python/feast/feature_server.py +++ b/sdk/python/feast/feature_server.py @@ -13,6 +13,7 @@ import feast from feast import proto_json from feast.data_source import PushMode +from feast.errors import PushSourceNotFoundException from feast.protos.feast.serving.ServingService_pb2 import GetOnlineFeaturesRequest @@ -98,6 +99,11 @@ def push(body=Depends(get_body)): allow_registry_cache=request.allow_registry_cache, to=to, ) + except PushSourceNotFoundException as e: + # Print the original exception on the server side + logger.exception(traceback.format_exc()) + # Raise HTTPException to return the error message to the client + raise HTTPException(status_code=422, detail=str(e)) except Exception as e: # Print the original exception on the server side logger.exception(traceback.format_exc()) diff --git a/sdk/python/feast/feature_store.py b/sdk/python/feast/feature_store.py index 9350220c21c..c56f73023c4 100644 --- a/sdk/python/feast/feature_store.py +++ b/sdk/python/feast/feature_store.py @@ -59,6 +59,7 @@ EntityNotFoundException, FeatureNameCollisionError, FeatureViewNotFoundException, + PushSourceNotFoundException, RequestDataNotFoundInEntityDfException, RequestDataNotFoundInEntityRowsException, ) @@ -1111,7 +1112,8 @@ def get_historical_features( # Check that the right request data is present in the entity_df if type(entity_df) == pd.DataFrame: - entity_df = utils.make_df_tzaware(cast(pd.DataFrame, entity_df)) + if self.config.coerce_tz_aware: + entity_df = utils.make_df_tzaware(cast(pd.DataFrame, entity_df)) for fv in request_feature_views: for feature in fv.features: if feature.name not in entity_df.columns: @@ -1444,6 +1446,9 @@ def push( ) } + if not fvs_with_push_sources: + raise PushSourceNotFoundException(push_source_name) + for fv in fvs_with_push_sources: if to == PushMode.ONLINE or to == PushMode.ONLINE_AND_OFFLINE: self.write_to_online_store( diff --git a/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile b/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile index 751a398ad4f..990c1fd8f03 100644 --- a/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile +++ b/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile @@ -9,8 +9,8 @@ RUN apt update && \ RUN pip install pip --upgrade COPY . . +RUN pip install ".[aws,gcp,snowflake,redis,go,mysql,postgres]" -RUN pip install -r requirements.txt RUN apt update RUN apt install -y -V ca-certificates lsb-release wget RUN wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb diff --git a/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile.dev b/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile.dev index 751a398ad4f..990c1fd8f03 100644 --- a/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile.dev +++ b/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile.dev @@ -9,8 +9,8 @@ RUN apt update && \ RUN pip install pip --upgrade COPY . . +RUN pip install ".[aws,gcp,snowflake,redis,go,mysql,postgres]" -RUN pip install -r requirements.txt RUN apt update RUN apt install -y -V ca-certificates lsb-release wget RUN wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb diff --git a/sdk/python/feast/infra/feature_servers/multicloud/requirements.txt b/sdk/python/feast/infra/feature_servers/multicloud/requirements.txt deleted file mode 100644 index 01d08a4effa..00000000000 --- a/sdk/python/feast/infra/feature_servers/multicloud/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -feast[aws,gcp,snowflake,redis,go,mysql] diff --git a/sdk/python/feast/infra/materialization/snowflake_engine.py b/sdk/python/feast/infra/materialization/snowflake_engine.py index 0219a7923f6..b523413bcde 100644 --- a/sdk/python/feast/infra/materialization/snowflake_engine.py +++ b/sdk/python/feast/infra/materialization/snowflake_engine.py @@ -118,11 +118,6 @@ def update( entities_to_delete: Sequence[Entity], entities_to_keep: Sequence[Entity], ): - click.echo( - f"Deploying materialization functions for {Style.BRIGHT + Fore.GREEN}{project}{Style.RESET_ALL}" - ) - click.echo() - stage_context = f'"{self.repo_config.batch_engine.database}"."{self.repo_config.batch_engine.schema_}"' stage_path = f'{stage_context}."feast_{project}"' with get_snowflake_conn(self.repo_config.batch_engine) as conn: @@ -136,12 +131,13 @@ def update( # if the stage already exists, # assumes that the materialization functions have been deployed if f"feast_{project}" in stage_list["name"].tolist(): - click.echo( - f"Materialization functions for {Style.BRIGHT + Fore.GREEN}{project}{Style.RESET_ALL} already exists" - ) - click.echo() return None + click.echo( + f"Deploying materialization functions for {Style.BRIGHT + Fore.GREEN}{project}{Style.RESET_ALL}" + ) + click.echo() + query = f"CREATE STAGE {stage_path}" execute_snowflake_statement(conn, query) diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino.py index 6c25b5768f4..a5a51311eb9 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino.py +++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino.py @@ -157,7 +157,7 @@ def pull_latest_from_table_or_query( created_timestamp_column: Optional[str], start_date: datetime, end_date: datetime, - user: str = "user", + user: Optional[str] = None, auth: Optional[Authentication] = None, http_scheme: Optional[str] = None, ) -> TrinoRetrievalJob: @@ -176,7 +176,6 @@ def pull_latest_from_table_or_query( timestamps.append(created_timestamp_column) timestamp_desc_string = " DESC, ".join(timestamps) + " DESC" field_string = ", ".join(join_key_columns + feature_name_columns + timestamps) - client = _get_trino_client( config=config, user=user, auth=auth, http_scheme=http_scheme ) @@ -212,7 +211,7 @@ def get_historical_features( registry: Registry, project: str, full_feature_names: bool = False, - user: str = "user", + user: Optional[str] = None, auth: Optional[Authentication] = None, http_scheme: Optional[str] = None, ) -> TrinoRetrievalJob: @@ -303,7 +302,7 @@ def pull_all_from_table_or_query( timestamp_field: str, start_date: datetime, end_date: datetime, - user: str = "user", + user: Optional[str] = None, auth: Optional[Authentication] = None, http_scheme: Optional[str] = None, ) -> RetrievalJob: @@ -375,7 +374,10 @@ def _upload_entity_df_and_get_entity_schema( def _get_trino_client( - config: RepoConfig, user: str, auth: Optional[Any], http_scheme: Optional[str] + config: RepoConfig, + user: Optional[str], + auth: Optional[Any], + http_scheme: Optional[str], ) -> Trino: client = Trino( user=user, diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_queries.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_queries.py index 1d4b5881240..97c61f78a60 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_queries.py +++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_queries.py @@ -36,6 +36,8 @@ def __init__( catalog: Optional[str] = None, auth: Optional[Any] = None, http_scheme: Optional[str] = None, + source: Optional[str] = None, + extra_credential: Optional[str] = None, ): self.host = host or os.getenv("TRINO_HOST") self.port = port or os.getenv("TRINO_PORT") @@ -43,6 +45,8 @@ def __init__( self.catalog = catalog or os.getenv("TRINO_CATALOG") self.auth = auth or os.getenv("TRINO_AUTH") self.http_scheme = http_scheme or os.getenv("TRINO_HTTP_SCHEME") + self.source = source or os.getenv("TRINO_SOURCE") + self.extra_credential = extra_credential or os.getenv("TRINO_EXTRA_CREDENTIAL") self._cursor: Optional[Cursor] = None if self.host is None: @@ -56,6 +60,11 @@ def __init__( def _get_cursor(self) -> Cursor: if self._cursor is None: + headers = ( + {trino.constants.HEADER_EXTRA_CREDENTIAL: self.extra_credential} + if self.extra_credential + else {} + ) self._cursor = trino.dbapi.connect( host=self.host, port=self.port, @@ -63,6 +72,8 @@ def _get_cursor(self) -> Cursor: catalog=self.catalog, auth=self.auth, http_scheme=self.http_scheme, + source=self.source, + http_headers=headers, ).cursor() return self._cursor diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_source.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_source.py index 6e989bd40ce..f09b79069ca 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_source.py @@ -228,7 +228,6 @@ def get_table_column_names_and_types( self, config: RepoConfig ) -> Iterable[Tuple[str, str]]: client = Trino( - user="user", catalog=config.offline_store.catalog, host=config.offline_store.host, port=config.offline_store.port, diff --git a/sdk/python/feast/infra/registry/base_registry.py b/sdk/python/feast/infra/registry/base_registry.py index 5edfae3472d..e1e9ba99e18 100644 --- a/sdk/python/feast/infra/registry/base_registry.py +++ b/sdk/python/feast/infra/registry/base_registry.py @@ -634,6 +634,13 @@ def to_dict(self, project: str) -> Dict[str, List[Any]]: registry_dict["requestFeatureViews"].append( self._message_to_sorted_dict(request_feature_view.to_proto()) ) + for stream_feature_view in sorted( + self.list_stream_feature_views(project=project), + key=lambda stream_feature_view: stream_feature_view.name, + ): + registry_dict["streamFeatureViews"].append( + self._message_to_sorted_dict(stream_feature_view.to_proto()) + ) for saved_dataset in sorted( self.list_saved_datasets(project=project), key=lambda item: item.name ): diff --git a/sdk/python/feast/repo_config.py b/sdk/python/feast/repo_config.py index fab0f25b410..bdff2f55ceb 100644 --- a/sdk/python/feast/repo_config.py +++ b/sdk/python/feast/repo_config.py @@ -166,6 +166,9 @@ class RepoConfig(FeastBaseModel): feature values for entities that have already been written into the online store. """ + coerce_tz_aware: Optional[bool] = True + """ If True, coerces entity_df timestamp columns to be timezone aware (to UTC by default). """ + def __init__(self, **data: Any): super().__init__(**data) diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index 9d10b2c3132..62120717d64 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -423,7 +423,7 @@ numpy==1.23.2 # pandavro # pyarrow # scipy -oauthlib==3.2.0 +oauthlib==3.2.1 # via requests-oauthlib oscrypto==1.3.0 # via snowflake-connector-python @@ -487,7 +487,7 @@ proto-plus==1.22.0 # google-cloud-bigquery-storage # google-cloud-datastore # google-cloud-firestore -protobuf==3.20.1 +protobuf==3.20.2 # via # feast (setup.py) # google-api-core diff --git a/sdk/python/requirements/py3.10-requirements.txt b/sdk/python/requirements/py3.10-requirements.txt index ac12befb87c..91369309175 100644 --- a/sdk/python/requirements/py3.10-requirements.txt +++ b/sdk/python/requirements/py3.10-requirements.txt @@ -107,7 +107,7 @@ partd==1.3.0 # via dask proto-plus==1.22.0 # via feast (setup.py) -protobuf==3.20.1 +protobuf==3.20.2 # via # feast (setup.py) # google-api-core diff --git a/sdk/python/requirements/py3.8-ci-requirements.txt b/sdk/python/requirements/py3.8-ci-requirements.txt index 93011cfdcf4..f401d0272a4 100644 --- a/sdk/python/requirements/py3.8-ci-requirements.txt +++ b/sdk/python/requirements/py3.8-ci-requirements.txt @@ -429,7 +429,7 @@ numpy==1.23.2 # pandavro # pyarrow # scipy -oauthlib==3.2.0 +oauthlib==3.2.1 # via requests-oauthlib oscrypto==1.3.0 # via snowflake-connector-python @@ -495,7 +495,7 @@ proto-plus==1.22.0 # google-cloud-bigquery-storage # google-cloud-datastore # google-cloud-firestore -protobuf==3.20.1 +protobuf==3.20.2 # via # feast (setup.py) # google-api-core diff --git a/sdk/python/requirements/py3.8-requirements.txt b/sdk/python/requirements/py3.8-requirements.txt index c2aef636733..b992ad3fc0a 100644 --- a/sdk/python/requirements/py3.8-requirements.txt +++ b/sdk/python/requirements/py3.8-requirements.txt @@ -111,7 +111,7 @@ pkgutil-resolve-name==1.3.10 # via jsonschema proto-plus==1.22.0 # via feast (setup.py) -protobuf==3.20.1 +protobuf==3.20.2 # via # feast (setup.py) # google-api-core diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index e13eee056bc..a3e26f8e620 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -423,7 +423,7 @@ numpy==1.23.2 # pandavro # pyarrow # scipy -oauthlib==3.2.0 +oauthlib==3.2.1 # via requests-oauthlib oscrypto==1.3.0 # via snowflake-connector-python @@ -487,7 +487,7 @@ proto-plus==1.22.0 # google-cloud-bigquery-storage # google-cloud-datastore # google-cloud-firestore -protobuf==3.20.1 +protobuf==3.20.2 # via # feast (setup.py) # google-api-core diff --git a/sdk/python/requirements/py3.9-requirements.txt b/sdk/python/requirements/py3.9-requirements.txt index 0d3cb22bbca..395302a9f9f 100644 --- a/sdk/python/requirements/py3.9-requirements.txt +++ b/sdk/python/requirements/py3.9-requirements.txt @@ -107,7 +107,7 @@ partd==1.3.0 # via dask proto-plus==1.22.0 # via feast (setup.py) -protobuf==3.20.1 +protobuf==3.20.2 # via # feast (setup.py) # google-api-core diff --git a/sdk/python/tests/integration/e2e/test_python_feature_server.py b/sdk/python/tests/integration/e2e/test_python_feature_server.py index 9c61f6fa198..089efd7a562 100644 --- a/sdk/python/tests/integration/e2e/test_python_feature_server.py +++ b/sdk/python/tests/integration/e2e/test_python_feature_server.py @@ -84,6 +84,29 @@ def test_push(python_fs_client): ) == [initial_temp * 100] +@pytest.mark.integration +@pytest.mark.universal_online_stores +def test_push_source_does_not_exist(python_fs_client): + initial_temp = _get_temperatures_from_feature_server( + python_fs_client, location_ids=[1] + )[0] + response = python_fs_client.post( + "/push", + data=json.dumps( + { + "push_source_name": "push_source_does_not_exist", + "df": { + "location_id": [1], + "temperature": [initial_temp * 100], + "event_timestamp": [str(datetime.utcnow())], + "created": [str(datetime.utcnow())], + }, + } + ), + ) + assert response.status_code == 422 + + def _get_temperatures_from_feature_server(client, location_ids: List[int]): get_request_data = { "features": ["pushable_location_stats:temperature"], diff --git a/sdk/python/tests/unit/diff/test_registry_diff.py b/sdk/python/tests/unit/diff/test_registry_diff.py index 8af6c50a13d..ce40295f8b6 100644 --- a/sdk/python/tests/unit/diff/test_registry_diff.py +++ b/sdk/python/tests/unit/diff/test_registry_diff.py @@ -1,6 +1,6 @@ import pandas as pd -from feast import Field +from feast import Field, PushSource from feast.diff.registry_diff import ( diff_registry_objects, tag_objects_for_keep_delete_update_add, @@ -145,3 +145,27 @@ def post_changed(inputs: pd.DataFrame) -> pd.DataFrame: feast_object_diffs.feast_object_property_diffs[2].property_name == "user_defined_function.body_text" ) + + +def test_diff_registry_objects_batch_to_push_source(simple_dataset_1): + with prep_file_source(df=simple_dataset_1, timestamp_field="ts_1") as file_source: + entity = Entity(name="id", join_keys=["id"]) + pre_changed = FeatureView( + name="fv2", + entities=[entity], + source=file_source, + ) + post_changed = FeatureView( + name="fv2", + entities=[entity], + source=PushSource(name="push_source", batch_source=file_source), + ) + + feast_object_diffs = diff_registry_objects( + pre_changed, post_changed, "feature view" + ) + assert len(feast_object_diffs.feast_object_property_diffs) == 1 + assert ( + feast_object_diffs.feast_object_property_diffs[0].property_name + == "stream_source" + ) diff --git a/setup.py b/setup.py index 37ed471cfa6..cddfb63b414 100644 --- a/setup.py +++ b/setup.py @@ -96,7 +96,7 @@ BYTEWAX_REQUIRED = ["bytewax==0.10.0", "docker>=5.0.2", "kubernetes<=20.13.0"] SNOWFLAKE_REQUIRED = [ - "snowflake-connector-python[pandas]>=2.7.3,<=2.7.8", + "snowflake-connector-python[pandas]>=2.7.3,<3", ] SPARK_REQUIRED = [ diff --git a/ui/package.json b/ui/package.json index 73165523252..bcb5af1fa8b 100644 --- a/ui/package.json +++ b/ui/package.json @@ -1,6 +1,6 @@ { "name": "@feast-dev/feast-ui", - "version": "0.25.0", + "version": "0.25.1", "private": false, "files": [ "dist" diff --git a/ui/public/registry.json b/ui/public/registry.json index 2d5c93c9620..279c9d08327 100644 --- a/ui/public/registry.json +++ b/ui/public/registry.json @@ -29,6 +29,24 @@ "name": "zipcode", "timestampField": "event_timestamp", "type": "BATCH_FILE" + }, + { + "batchSource": { + "fileOptions": { + "uri": "data/zipcode_table.parquet" + }, + "name": "user_stats", + "timestampField": "timestamp", + "type": "BATCH_FILE" + }, + "dataSourceClassType": "feast.data_source.KafkaSource", + "description": "The Kafka stream example", + "kafkaOptions": {"messageFormat": {"jsonFormat": {"schemaJson": "id string, timestamp timestamp"}}, + "watermarkDelayThreshold": "300s"}, + "name": "driver_stats_stream", + "owner": "test@gmail.com", + "timestampField": "timestamp", + "type": "STREAM_KAFKA" } ], "entities": [ @@ -630,5 +648,59 @@ } } ], + "streamFeatureViews": [ + { + "meta": { + "createdTimestamp": "2022-05-11T19:27:03.171556Z", + "lastUpdatedTimestamp": "2022-05-11T19:27:03.171556Z" + }, + "spec": { + "batchSource": { + "createdTimestampColumn": "created_timestamp", + "dataSourceClassType": "feast.infra.offline_stores.file_source.FileSource", + "fileOptions": { + "uri": "data/zipcode_table.parquet" + }, + "name": "zipcode", + "timestampField": "event_timestamp", + "type": "BATCH_FILE" + }, + "features": [ + { + "name": "conv_percentage", + "valueType": "FLOAT" + }, + { + "name": "acc_percentage", + "valueType": "FLOAT" + } + ], + "name": "transaction_stream_example", + "streamSource": { + "batchSource": { + "fileOptions": { + "uri": "data/zipcode_table.parquet" + }, + "name": "user_stats", + "timestampField": "timestamp", + "type": "BATCH_FILE" + }, + "dataSourceClassType": "feast.data_source.KafkaSource", + "description": "The Kafka stream example", + "kafkaOptions": {"messageFormat": {"jsonFormat": {"schemaJson": "id string, timestamp timestamp"}}, + "watermarkDelayThreshold": "300s"}, + "name": "driver_stats_stream", + "owner": "test@gmail.com", + "timestampField": "timestamp", + "type": "STREAM_KAFKA" + }, + "ttl": "86400s", + "userDefinedFunction": { + "body": "@stream_feature_view(\n sources=[driver_stats_stream_source],\n mode=\"spark\",\n schema=[\n Field(name=\"conv_percentage\", dtype=Float32),\n Field(name=\"acc_percentage\", dtype=Float32),\n ],\n timestamp_field=\"event_timestamp\",\n online=True,\n source=driver_stats_stream_source,\n tags={},\n)\ndef driver_hourly_stats_stream(df: DataFrame) -> DataFrame:\n from pyspark.sql.functions import col\n return (\n df.withColumn(\"conv_percentage\", col(\"conv_rate\") * 100.0)\n .withColumn(\"acc_percentage\", col(\"acc_rate\") * 100.0)\n .drop(\"conv_rate\", \"acc_rate\")\n )\n", + "name": "driver_hourly_stats_stream" + } + } + } + ], "project": "credit_scoring_aws" } diff --git a/ui/src/custom-tabs/TabsRegistryContext.tsx b/ui/src/custom-tabs/TabsRegistryContext.tsx index 9f493e6d11b..83820de1535 100644 --- a/ui/src/custom-tabs/TabsRegistryContext.tsx +++ b/ui/src/custom-tabs/TabsRegistryContext.tsx @@ -10,6 +10,7 @@ import { import RegularFeatureViewCustomTabLoadingWrapper from "../utils/custom-tabs/RegularFeatureViewCustomTabLoadingWrapper"; import OnDemandFeatureViewCustomTabLoadingWrapper from "../utils/custom-tabs/OnDemandFeatureViewCustomTabLoadingWrapper"; +import StreamFeatureViewCustomTabLoadingWrapper from "../utils/custom-tabs/StreamFeatureViewCustomTabLoadingWrapper"; import FeatureServiceCustomTabLoadingWrapper from "../utils/custom-tabs/FeatureServiceCustomTabLoadingWrapper"; import FeatureCustomTabLoadingWrapper from "../utils/custom-tabs/FeatureCustomTabLoadingWrapper"; import DataSourceCustomTabLoadingWrapper from "../utils/custom-tabs/DataSourceCustomTabLoadingWrapper"; @@ -19,6 +20,7 @@ import DatasetCustomTabLoadingWrapper from "../utils/custom-tabs/DatasetCustomTa import { RegularFeatureViewCustomTabRegistrationInterface, OnDemandFeatureViewCustomTabRegistrationInterface, + StreamFeatureViewCustomTabRegistrationInterface, FeatureServiceCustomTabRegistrationInterface, FeatureCustomTabRegistrationInterface, DataSourceCustomTabRegistrationInterface, @@ -30,6 +32,7 @@ import { interface FeastTabsRegistryInterface { RegularFeatureViewCustomTabs?: RegularFeatureViewCustomTabRegistrationInterface[]; OnDemandFeatureViewCustomTabs?: OnDemandFeatureViewCustomTabRegistrationInterface[]; + StreamFeatureViewCustomTabs?: StreamFeatureViewCustomTabRegistrationInterface[]; FeatureServiceCustomTabs?: FeatureServiceCustomTabRegistrationInterface[]; FeatureCustomTabs?: FeatureCustomTabRegistrationInterface[]; DataSourceCustomTabs?: DataSourceCustomTabRegistrationInterface[]; @@ -148,6 +151,16 @@ const useOnDemandFeatureViewCustomTabs = (navigate: NavigateFunction) => { ); }; +const useStreamFeatureViewCustomTabs = (navigate: NavigateFunction) => { + const { StreamFeatureViewCustomTabs } = + React.useContext(TabsRegistryContext); + + return useGenericCustomTabsNavigation( + StreamFeatureViewCustomTabs || [], + navigate + ); +}; + const useFeatureServiceCustomTabs = (navigate: NavigateFunction) => { const { FeatureServiceCustomTabs } = React.useContext(TabsRegistryContext); @@ -214,6 +227,16 @@ const useOnDemandFeatureViewCustomTabRoutes = () => { ); }; +const useStreamFeatureViewCustomTabRoutes = () => { + const { StreamFeatureViewCustomTabs } = + React.useContext(TabsRegistryContext); + + return genericCustomTabRoutes( + StreamFeatureViewCustomTabs || [], + StreamFeatureViewCustomTabLoadingWrapper + ); +}; + const useFeatureServiceCustomTabRoutes = () => { const { FeatureServiceCustomTabs } = React.useContext(TabsRegistryContext); @@ -264,6 +287,7 @@ export { // Navigation useRegularFeatureViewCustomTabs, useOnDemandFeatureViewCustomTabs, + useStreamFeatureViewCustomTabs, useFeatureServiceCustomTabs, useFeatureCustomTabs, useDataSourceCustomTabs, @@ -272,6 +296,7 @@ export { // Routes useRegularFeatureViewCustomTabRoutes, useOnDemandFeatureViewCustomTabRoutes, + useStreamFeatureViewCustomTabRoutes, useFeatureServiceCustomTabRoutes, useFeatureCustomTabRoutes, useDataSourceCustomTabRoutes, diff --git a/ui/src/custom-tabs/stream-fv-demo-tab/DemoCustomTab.tsx b/ui/src/custom-tabs/stream-fv-demo-tab/DemoCustomTab.tsx new file mode 100644 index 00000000000..86e59d10c71 --- /dev/null +++ b/ui/src/custom-tabs/stream-fv-demo-tab/DemoCustomTab.tsx @@ -0,0 +1,85 @@ +import React from "react"; + +import { + // Feature View Custom Tabs will get these props + StreamFeatureViewCustomTabProps, +} from "../types"; + +import { + EuiLoadingContent, + EuiEmptyPrompt, + EuiFlexGroup, + EuiFlexItem, + EuiCode, + EuiSpacer, +} from "@elastic/eui"; + +// Separating out the query is not required, +// but encouraged for code readability +import useDemoQuery from "./useDemoQuery"; + +const DemoCustomTab = ({ + id, + feastObjectQuery, +}: StreamFeatureViewCustomTabProps) => { + // Use React Query to fetch data + // that is custom to this tab. + // See: https://react-query.tanstack.com/guides/queries + const { isLoading, isError, isSuccess, data } = useDemoQuery({ + featureView: id, + }); + + if (isLoading) { + // Handle Loading State + // https://elastic.github.io/eui/#/display/loading + return ; + } + + if (isError) { + // Handle Data Fetching Error + // https://elastic.github.io/eui/#/display/empty-prompt + return ( + Unable to load your demo page} + body={ +

+ There was an error loading the Dashboard application. Contact your + administrator for help. +

+ } + /> + ); + } + + // Feast UI uses the Elastic UI component system. + // and are particularly + // useful for layouts. + return ( + + + +

Hello World. The following is fetched data.

+ + {isSuccess && data && ( + +
{JSON.stringify(data, null, 2)}
+
+ )} +
+ +

... and this is data from Feast UI’s own query.

+ + {feastObjectQuery.isSuccess && feastObjectQuery.data && ( + +
{JSON.stringify(feastObjectQuery.data, null, 2)}
+
+ )} +
+
+
+ ); +}; + +export default DemoCustomTab; diff --git a/ui/src/custom-tabs/stream-fv-demo-tab/useDemoQuery.tsx b/ui/src/custom-tabs/stream-fv-demo-tab/useDemoQuery.tsx new file mode 100644 index 00000000000..b93602dbe3b --- /dev/null +++ b/ui/src/custom-tabs/stream-fv-demo-tab/useDemoQuery.tsx @@ -0,0 +1,44 @@ +import { useQuery } from "react-query"; +import { z } from "zod"; + +// Use Zod to check the shape of the +// json object being loaded +const demoSchema = z.object({ + hello: z.string(), + name: z.string().optional(), +}); + +// Make the type of the object available +type DemoDataType = z.infer; + +interface DemoQueryInterface { + featureView: string | undefined; +} + +const useDemoQuery = ({ featureView }: DemoQueryInterface) => { + // React Query manages caching for you based on query keys + // See: https://react-query.tanstack.com/guides/query-keys + const queryKey = `demo-tab-namespace:${featureView}`; + + // Pass the type to useQuery + // so that components consuming the + // result gets nice type hints + // on the other side. + return useQuery( + queryKey, + () => { + // Customizing the URL based on your needs + const url = `/demo-custom-tabs/demo.json`; + + return fetch(url) + .then((res) => res.json()) + .then((data) => demoSchema.parse(data)); // Use zod to parse results + }, + { + enabled: !!featureView, // Only start the query when the variable is not undefined + } + ); +}; + +export default useDemoQuery; +export type { DemoDataType }; diff --git a/ui/src/custom-tabs/types.ts b/ui/src/custom-tabs/types.ts index 1e555d6185c..ea1dbc8757b 100644 --- a/ui/src/custom-tabs/types.ts +++ b/ui/src/custom-tabs/types.ts @@ -1,5 +1,6 @@ import { useLoadOnDemandFeatureView, + useLoadStreamFeatureView, useLoadRegularFeatureView, } from "../pages/feature-views/useLoadFeatureView"; import useLoadFeature from "../pages/features/useLoadFeature"; @@ -48,6 +49,23 @@ interface OnDemandFeatureViewCustomTabRegistrationInterface }: OnDemandFeatureViewCustomTabProps) => JSX.Element; } +// Type for Stream Feature View Custom Tabs +type StreamFeatureViewQueryReturnType = ReturnType< + typeof useLoadStreamFeatureView +>; +interface StreamFeatureViewCustomTabProps { + id: string | undefined; + feastObjectQuery: StreamFeatureViewQueryReturnType; +} +interface StreamFeatureViewCustomTabRegistrationInterface + extends CustomTabRegistrationInterface { + Component: ({ + id, + feastObjectQuery, + ...args + }: StreamFeatureViewCustomTabProps) => JSX.Element; +} + // Type for Entity Custom Tabs interface EntityCustomTabProps { id: string | undefined; @@ -127,6 +145,9 @@ export type { OnDemandFeatureViewQueryReturnType, OnDemandFeatureViewCustomTabProps, OnDemandFeatureViewCustomTabRegistrationInterface, + StreamFeatureViewQueryReturnType, + StreamFeatureViewCustomTabProps, + StreamFeatureViewCustomTabRegistrationInterface, FeatureServiceCustomTabRegistrationInterface, FeatureServiceCustomTabProps, DataSourceCustomTabRegistrationInterface, diff --git a/ui/src/index.tsx b/ui/src/index.tsx index 2233b90c9e6..e38570929d4 100644 --- a/ui/src/index.tsx +++ b/ui/src/index.tsx @@ -18,6 +18,7 @@ import FeastUI from "./FeastUI"; import DataTab from "./custom-tabs/data-tab/DataTab"; import RFVDemoCustomTab from "./custom-tabs/reguar-fv-demo-tab/DemoCustomTab"; import ODFVDemoCustomTab from "./custom-tabs/ondemand-fv-demo-tab/DemoCustomTab"; +import SFVDemoCustomTab from "./custom-tabs/stream-fv-demo-tab/DemoCustomTab"; import FSDemoCustomTab from "./custom-tabs/feature-service-demo-tab/DemoCustomTab"; import DSDemoCustomTab from "./custom-tabs/data-source-demo-tab/DemoCustomTab"; import EntDemoCustomTab from "./custom-tabs/entity-demo-tab/DemoCustomTab"; @@ -46,6 +47,13 @@ const tabsRegistry = { Component: ODFVDemoCustomTab, }, ], + StreamFeatureViewCustomTabs: [ + { + label: "Custom Tab Demo", + path: "demo-tab", + Component: SFVDemoCustomTab, + }, + ], FeatureServiceCustomTabs: [ { label: "Custom Tab Demo", @@ -93,4 +101,4 @@ ReactDOM.render( /> , document.getElementById("root") -); \ No newline at end of file +); diff --git a/ui/src/pages/feature-views/FeatureViewInstance.tsx b/ui/src/pages/feature-views/FeatureViewInstance.tsx index b0fa7c32b03..5352507573f 100644 --- a/ui/src/pages/feature-views/FeatureViewInstance.tsx +++ b/ui/src/pages/feature-views/FeatureViewInstance.tsx @@ -7,8 +7,11 @@ import { FeastFeatureViewType } from "../../parsers/feastFeatureViews"; import RegularFeatureInstance from "./RegularFeatureViewInstance"; import { FEAST_FV_TYPES } from "../../parsers/mergedFVTypes"; import { FeastODFVType } from "../../parsers/feastODFVS"; +import { FeastSFVType } from "../../parsers/feastSFVS"; import useLoadFeatureView from "./useLoadFeatureView"; import OnDemandFeatureInstance from "./OnDemandFeatureViewInstance"; +import StreamFeatureInstance from "./StreamFeatureViewInstance"; + const FeatureViewInstance = () => { const { featureViewName } = useParams(); @@ -45,6 +48,11 @@ const FeatureViewInstance = () => { return ; } + if (data.type === FEAST_FV_TYPES.stream) { + const sfv: FeastSFVType = data.object; + + return ; + } } return

No Data So Sad

; diff --git a/ui/src/pages/feature-views/FeatureViewListingTable.tsx b/ui/src/pages/feature-views/FeatureViewListingTable.tsx index 59f8b1ed7aa..ceb756db804 100644 --- a/ui/src/pages/feature-views/FeatureViewListingTable.tsx +++ b/ui/src/pages/feature-views/FeatureViewListingTable.tsx @@ -35,7 +35,7 @@ const FeatureViewListingTable = ({ href={`/p/${projectName}/feature-view/${name}`} to={`/p/${projectName}/feature-view/${name}`} > - {name} {item.type === "ondemand" && ondemand} + {name} {(item.type === "ondemand" && ondemand) || (item.type === "stream" && stream)} ); }, diff --git a/ui/src/pages/feature-views/StreamFeatureViewInstance.tsx b/ui/src/pages/feature-views/StreamFeatureViewInstance.tsx new file mode 100644 index 00000000000..ba4c0087278 --- /dev/null +++ b/ui/src/pages/feature-views/StreamFeatureViewInstance.tsx @@ -0,0 +1,69 @@ +import React from "react"; +import { Route, Routes, useNavigate } from "react-router-dom"; +import { useParams } from "react-router-dom"; +import { + EuiPageHeader, + EuiPageContent, + EuiPageContentBody, +} from "@elastic/eui"; + +import { FeatureViewIcon32 } from "../../graphics/FeatureViewIcon"; +import { useMatchExact } from "../../hooks/useMatchSubpath"; +import { FeastSFVType } from "../../parsers/feastSFVS"; +import StreamFeatureViewOverviewTab from "./StreamFeatureViewOverviewTab"; + +import { + useStreamFeatureViewCustomTabs, + useStreamFeatureViewCustomTabRoutes, +} from "../../custom-tabs/TabsRegistryContext"; + +interface StreamFeatureInstanceProps { + data: FeastSFVType; +} + +const StreamFeatureInstance = ({ data }: StreamFeatureInstanceProps) => { + const navigate = useNavigate(); + let { featureViewName } = useParams(); + + const { customNavigationTabs } = useStreamFeatureViewCustomTabs(navigate); + const CustomTabRoutes = useStreamFeatureViewCustomTabRoutes(); + + return ( + + { + navigate(""); + }, + }, + ...customNavigationTabs, + ]} + /> + + + + } + /> + {CustomTabRoutes} + + + + + ); +}; + +export default StreamFeatureInstance; diff --git a/ui/src/pages/feature-views/StreamFeatureViewOverviewTab.tsx b/ui/src/pages/feature-views/StreamFeatureViewOverviewTab.tsx new file mode 100644 index 00000000000..56efc428453 --- /dev/null +++ b/ui/src/pages/feature-views/StreamFeatureViewOverviewTab.tsx @@ -0,0 +1,135 @@ +import { + EuiFlexGroup, + EuiFlexItem, + EuiHorizontalRule, + EuiText, + EuiTitle, + EuiPanel, + EuiCodeBlock, + EuiSpacer, +} from "@elastic/eui"; +import React from "react"; +import FeaturesListDisplay from "../../components/FeaturesListDisplay"; +import { + FeastSFVType, +} from "../../parsers/feastSFVS"; +import { useParams } from "react-router-dom"; +import { EntityRelation } from "../../parsers/parseEntityRelationships"; +import { FEAST_FCO_TYPES } from "../../parsers/types"; +import useLoadRelationshipData from "../../queries/useLoadRelationshipsData"; +import ConsumingFeatureServicesList from "./ConsumingFeatureServicesList"; +import EuiCustomLink from "../../components/EuiCustomLink"; + +interface StreamFeatureViewOverviewTabProps { + data: FeastSFVType; +} + +const whereFSconsumesThisFv = (fvName: string) => { + return (r: EntityRelation) => { + return ( + r.source.name === fvName && + r.target.type === FEAST_FCO_TYPES.featureService + ); + }; +}; + +const StreamFeatureViewOverviewTab = ({ + data, +}: StreamFeatureViewOverviewTabProps) => { + const inputs = Object.entries([data.spec.streamSource]); + const { projectName } = useParams(); + + const relationshipQuery = useLoadRelationshipData(); + const fsNames = relationshipQuery.data + ? relationshipQuery.data + .filter(whereFSconsumesThisFv(data.spec.name)) + .map((fs) => { + return fs.target.name; + }) + : []; + + return ( + + + + + +

Transformation

+
+ + + {data.spec.userDefinedFunction.body} + +
+
+
+ + + + +

Features ({data.spec.features.length})

+
+ + {projectName && data.spec.features ? ( + + ) : ( + No Tags sepcified on this feature view. + )} +
+
+ + + +

Inputs ({inputs.length})

+
+ + + {inputs.map(([key, inputGroup]) => { + + return ( + + + Stream Source + + + + {inputGroup.name} + + + + + {JSON.stringify(inputGroup, null, 2)} + + + + ); + })} + +
+ + + +

Consuming Feature Services

+
+ + {fsNames.length > 0 ? ( + + ) : ( + No services consume this feature view + )} +
+
+
+
+ ); +}; + +export default StreamFeatureViewOverviewTab; diff --git a/ui/src/pages/feature-views/useLoadFeatureView.ts b/ui/src/pages/feature-views/useLoadFeatureView.ts index ded7900ea94..7685171b72b 100644 --- a/ui/src/pages/feature-views/useLoadFeatureView.ts +++ b/ui/src/pages/feature-views/useLoadFeatureView.ts @@ -51,5 +51,22 @@ const useLoadOnDemandFeatureView = (featureViewName: string) => { }; }; +const useLoadStreamFeatureView = (featureViewName: string) => { + const registryUrl = useContext(RegistryPathContext); + const registryQuery = useLoadRegistry(registryUrl); + + const data = + registryQuery.data === undefined + ? undefined + : registryQuery.data.objects.streamFeatureViews?.find((fv) => { + return fv.spec.name === featureViewName; + }); + + return { + ...registryQuery, + data, + }; +}; + export default useLoadFeatureView; -export { useLoadRegularFeatureView, useLoadOnDemandFeatureView }; +export { useLoadRegularFeatureView, useLoadOnDemandFeatureView, useLoadStreamFeatureView }; diff --git a/ui/src/parsers/feastRegistry.ts b/ui/src/parsers/feastRegistry.ts index 98e4fccca2a..f84187046a8 100644 --- a/ui/src/parsers/feastRegistry.ts +++ b/ui/src/parsers/feastRegistry.ts @@ -5,6 +5,7 @@ import { FeastFeatureServiceSchema } from "./feastFeatureServices"; import { FeastFeatureViewSchema } from "./feastFeatureViews"; import { FeastSavedDatasetSchema } from "./feastSavedDataset"; import { FeastODFVSchema } from "./feastODFVS"; +import { FeastSFVSchema } from "./feastSFVS"; const FeastRegistrySchema = z.object({ project: z.string(), @@ -12,6 +13,7 @@ const FeastRegistrySchema = z.object({ entities: z.array(FeastEntitySchema).optional(), featureViews: z.array(FeastFeatureViewSchema).optional(), onDemandFeatureViews: z.array(FeastODFVSchema).optional(), + streamFeatureViews: z.array(FeastSFVSchema).optional(), featureServices: z.array(FeastFeatureServiceSchema).optional(), savedDatasets: z.array(FeastSavedDatasetSchema).optional(), }); diff --git a/ui/src/parsers/feastSFVS.ts b/ui/src/parsers/feastSFVS.ts new file mode 100644 index 00000000000..f21b3d1cdac --- /dev/null +++ b/ui/src/parsers/feastSFVS.ts @@ -0,0 +1,41 @@ +import { z } from "zod"; +import { FeastFeatureColumnSchema } from "./feastFeatureViews"; +import {FeastDatasourceSchema} from "./feastDatasources"; + +const FeatureViewProjectionSchema = z.object({ + featureViewProjection: z.object({ + featureViewName: z.string(), + featureColumns: z.array(FeastFeatureColumnSchema), + }), +}); + +const StreamSourceSchema = z.object({ + type: z.string(), + name: z.string(), + owner: z.string().optional(), + description: z.string().optional(), +}); + +const FeastSFVSchema = z.object({ + spec: z.object({ + name: z.string(), + features: z.array(FeastFeatureColumnSchema), + batchSource: FeastDatasourceSchema, + streamSource: StreamSourceSchema, + userDefinedFunction: z.object({ + name: z.string(), + body: z.string(), + }), + }), + meta: z.object({ + createdTimestamp: z.string().transform((val) => new Date(val)), + lastUpdatedTimestamp: z.string().transform((val) => new Date(val)), + }), +}); + +type FeastSFVType = z.infer; +type StreamSourceType = z.infer; +type FeatureViewProjectionType = z.infer; + +export { FeastSFVSchema }; +export type { FeastSFVType, StreamSourceType, FeatureViewProjectionType}; diff --git a/ui/src/parsers/mergedFVTypes.ts b/ui/src/parsers/mergedFVTypes.ts index 6a53b18e94d..edf1adee9e5 100644 --- a/ui/src/parsers/mergedFVTypes.ts +++ b/ui/src/parsers/mergedFVTypes.ts @@ -3,11 +3,13 @@ import { FeastFeatureViewType, } from "./feastFeatureViews"; import { FeastODFVType } from "./feastODFVS"; +import { FeastSFVType } from "./feastSFVS"; import { FeastRegistryType } from "./feastRegistry"; enum FEAST_FV_TYPES { regular = "regular", ondemand = "ondemand", + stream = "stream" } interface regularFVInterface { @@ -24,7 +26,14 @@ interface ODFVInterface { object: FeastODFVType; } -type genericFVType = regularFVInterface | ODFVInterface; +interface SFVInterface { + name: string; + type: FEAST_FV_TYPES.stream; + features: FeastFeatureColumnType[]; + object: FeastSFVType; +} + +type genericFVType = regularFVInterface | ODFVInterface | SFVInterface; const mergedFVTypes = (objects: FeastRegistryType) => { const mergedFVMap: Record = {}; @@ -55,9 +64,21 @@ const mergedFVTypes = (objects: FeastRegistryType) => { mergedFVList.push(obj); }); + objects.streamFeatureViews?.forEach((sfv) => { + const obj: genericFVType = { + name: sfv.spec.name, + type: FEAST_FV_TYPES.stream, + features: sfv.spec.features, + object: sfv, + }; + + mergedFVMap[sfv.spec.name] = obj; + mergedFVList.push(obj); + }); + return { mergedFVMap, mergedFVList }; }; export default mergedFVTypes; export { FEAST_FV_TYPES }; -export type { genericFVType, regularFVInterface, ODFVInterface }; +export type { genericFVType, regularFVInterface, ODFVInterface, SFVInterface }; diff --git a/ui/src/parsers/parseEntityRelationships.ts b/ui/src/parsers/parseEntityRelationships.ts index f54bff63a1c..8424bb7a44f 100644 --- a/ui/src/parsers/parseEntityRelationships.ts +++ b/ui/src/parsers/parseEntityRelationships.ts @@ -88,6 +88,32 @@ const parseEntityRelationships = (objects: FeastRegistryType) => { }); }); + objects.streamFeatureViews?.forEach((fv) => { + // stream source + links.push({ + source: { + type: FEAST_FCO_TYPES["dataSource"], + name: fv.spec.streamSource.name, + }, + target: { + type: FEAST_FCO_TYPES["featureView"], + name: fv.spec.name, + }, + }); + + // batch source + links.push({ + source: { + type: FEAST_FCO_TYPES["dataSource"], + name: fv.spec.batchSource.name, + }, + target: { + type: FEAST_FCO_TYPES["featureView"], + name: fv.spec.name, + }, + }); + }); + return links; }; diff --git a/ui/src/utils/custom-tabs/StreamFeatureViewCustomTabLoadingWrapper.tsx b/ui/src/utils/custom-tabs/StreamFeatureViewCustomTabLoadingWrapper.tsx new file mode 100644 index 00000000000..098ab848a55 --- /dev/null +++ b/ui/src/utils/custom-tabs/StreamFeatureViewCustomTabLoadingWrapper.tsx @@ -0,0 +1,46 @@ +import React from "react"; + +import { useParams } from "react-router-dom"; +import useLoadFeatureView from "../../pages/feature-views/useLoadFeatureView"; +import { + StreamFeatureViewCustomTabProps, + StreamFeatureViewQueryReturnType, +} from "../../custom-tabs/types"; +import { FEAST_FV_TYPES } from "../../parsers/mergedFVTypes"; + +interface StreamFeatureViewCustomTabLoadingWrapperProps { + Component: (props: StreamFeatureViewCustomTabProps) => JSX.Element; +} + +const StreamFeatureViewCustomTabLoadingWrapper = ({ + Component, +}: StreamFeatureViewCustomTabLoadingWrapperProps) => { + const { featureViewName } = useParams(); + + if (!featureViewName) { + throw new Error( + `This route has no 'featureViewName' part. This route is likely not supposed to render this component.` + ); + } + + const feastObjectQuery = useLoadFeatureView(featureViewName); + + if ( + feastObjectQuery.isSuccess && + feastObjectQuery.data && + feastObjectQuery.data.type !== FEAST_FV_TYPES.stream + ) { + throw new Error( + `This should not happen. Somehow a custom tab on a SFV page received data that does not have the shape?` + ); + } + + return ( + + ); +}; + +export default StreamFeatureViewCustomTabLoadingWrapper;