From 2de4d8116d9f3848bc0b23792bd7df6cd33f1648 Mon Sep 17 00:00:00 2001 From: Danny Chiao Date: Wed, 10 Aug 2022 13:23:28 -0400 Subject: [PATCH 01/15] ci: Make release depend again on web ui publish Signed-off-by: Danny Chiao --- .github/workflows/release.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 9fcbc1e052..c56bd36534 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -74,6 +74,7 @@ jobs: release: name: release + needs: publish-web-ui-npm runs-on: ubuntu-latest env: GITHUB_TOKEN: ${{ github.event.inputs.token }} From 3fc38a473800efbfe2d65348be3326c816fa1f00 Mon Sep 17 00:00:00 2001 From: Felix Wang Date: Wed, 10 Aug 2022 09:38:52 -0700 Subject: [PATCH 02/15] chore: Update provider docstrings (#3056) Update provider docstrings Signed-off-by: Felix Wang Signed-off-by: Felix Wang --- .../feast/infra/passthrough_provider.py | 2 +- sdk/python/feast/infra/provider.py | 188 ++++++++++++------ 2 files changed, 124 insertions(+), 66 deletions(-) diff --git a/sdk/python/feast/infra/passthrough_provider.py b/sdk/python/feast/infra/passthrough_provider.py index 0b09f5df43..d8a1641783 100644 --- a/sdk/python/feast/infra/passthrough_provider.py +++ b/sdk/python/feast/infra/passthrough_provider.py @@ -37,7 +37,7 @@ class PassthroughProvider(Provider): """ - The Passthrough provider delegates all operations to the underlying online and offline stores. + The passthrough provider delegates all operations to the underlying online and offline stores. """ def __init__(self, config: RepoConfig): diff --git a/sdk/python/feast/infra/provider.py b/sdk/python/feast/infra/provider.py index 086c9ec6b3..e99a09a9e2 100644 --- a/sdk/python/feast/infra/provider.py +++ b/sdk/python/feast/infra/provider.py @@ -1,4 +1,4 @@ -import abc +from abc import ABC, abstractmethod from datetime import datetime from pathlib import Path from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union @@ -27,12 +27,18 @@ } -class Provider(abc.ABC): - @abc.abstractmethod +class Provider(ABC): + """ + A provider defines an implementation of a feature store object. It orchestrates the various + components of a feature store, such as the offline store, online store, and materialization + engine. It is configured through a RepoConfig object. + """ + + @abstractmethod def __init__(self, config: RepoConfig): - ... + pass - @abc.abstractmethod + @abstractmethod def update_infra( self, project: str, @@ -43,22 +49,20 @@ def update_infra( partial: bool, ): """ - Reconcile cloud resources with the objects declared in the feature repo. + Reconciles cloud resources with the specified set of Feast objects. Args: - project: Project to which tables belong - tables_to_delete: Tables that were deleted from the feature repo, so provider needs to - clean up the corresponding cloud resources. - tables_to_keep: Tables that are still in the feature repo. Depending on implementation, - provider may or may not need to update the corresponding resources. - entities_to_delete: Entities that were deleted from the feature repo, so provider needs to - clean up the corresponding cloud resources. - entities_to_keep: Entities that are still in the feature repo. Depending on implementation, - provider may or may not need to update the corresponding resources. - partial: if true, then tables_to_delete and tables_to_keep are *not* exhaustive lists. - There may be other tables that are not touched by this update. + project: Feast project to which the objects belong. + tables_to_delete: Feature views whose corresponding infrastructure should be deleted. + tables_to_keep: Feature views whose corresponding infrastructure should not be deleted, and + may need to be updated. + entities_to_delete: Entities whose corresponding infrastructure should be deleted. + entities_to_keep: Entities whose corresponding infrastructure should not be deleted, and + may need to be updated. + partial: If true, tables_to_delete and tables_to_keep are not exhaustive lists, so + infrastructure corresponding to other feature views should be not be touched. """ - ... + pass def plan_infra( self, config: RepoConfig, desired_registry_proto: RegistryProto @@ -72,7 +76,7 @@ def plan_infra( """ return Infra() - @abc.abstractmethod + @abstractmethod def teardown_infra( self, project: str, @@ -80,16 +84,16 @@ def teardown_infra( entities: Sequence[Entity], ): """ - Tear down all cloud resources for a repo. + Tears down all cloud resources for the specified set of Feast objects. Args: - project: Feast project to which tables belong - tables: Tables that are declared in the feature repo. - entities: Entities that are declared in the feature repo. + project: Feast project to which the objects belong. + tables: Feature views whose corresponding infrastructure should be deleted. + entities: Entities whose corresponding infrastructure should be deleted. """ - ... + pass - @abc.abstractmethod + @abstractmethod def online_write_batch( self, config: RepoConfig, @@ -100,21 +104,20 @@ def online_write_batch( progress: Optional[Callable[[int], Any]], ) -> None: """ - Write a batch of feature rows to the online store. This is a low level interface, not - expected to be used by the users directly. + Writes a batch of feature rows to the online store. If a tz-naive timestamp is passed to this method, it is assumed to be UTC. Args: - config: The RepoConfig for the current FeatureStore. - table: Feast FeatureView - data: a list of quadruplets containing Feature data. Each quadruplet contains an Entity Key, - a dict containing feature values, an event timestamp for the row, and - the created timestamp for the row if it exists. - progress: Optional function to be called once every mini-batch of rows is written to - the online store. Can be used to display progress. + config: The config for the current feature store. + table: Feature view to which these feature rows correspond. + data: A list of quadruplets containing feature data. Each quadruplet contains an entity + key, a dict containing feature values, an event timestamp for the row, and the created + timestamp for the row if it exists. + progress: Function to be called once a batch of rows is written to the online store, used + to show progress. """ - ... + pass def ingest_df( self, @@ -123,7 +126,12 @@ def ingest_df( df: pd.DataFrame, ): """ - Ingests a DataFrame directly into the online store + Persists a dataframe to the online store. + + Args: + feature_view: The feature view to which the dataframe corresponds. + entities: The entities that are referenced by the dataframe. + df: The dataframe to be persisted. """ pass @@ -133,11 +141,15 @@ def ingest_df_to_offline_store( df: pyarrow.Table, ): """ - Ingests a DataFrame directly into the offline store + Persists a dataframe to the offline store. + + Args: + feature_view: The feature view to which the dataframe corresponds. + df: The dataframe to be persisted. """ pass - @abc.abstractmethod + @abstractmethod def materialize_single_feature_view( self, config: RepoConfig, @@ -148,9 +160,21 @@ def materialize_single_feature_view( project: str, tqdm_builder: Callable[[int], tqdm], ) -> None: + """ + Writes latest feature values in the specified time range to the online store. + + Args: + config: The config for the current feature store. + feature_view: The feature view to materialize. + start_date: The start of the time range. + end_date: The end of the time range. + registry: The registry for the current feature store. + project: Feast project to which the objects belong. + tqdm_builder: A function to monitor the progress of materialization. + """ pass - @abc.abstractmethod + @abstractmethod def get_historical_features( self, config: RepoConfig, @@ -161,9 +185,28 @@ def get_historical_features( project: str, full_feature_names: bool, ) -> RetrievalJob: + """ + Retrieves the point-in-time correct historical feature values for the specified entity rows. + + Args: + config: The config for the current feature store. + feature_views: A list containing all feature views that are referenced in the entity rows. + feature_refs: The features to be retrieved. + entity_df: A collection of rows containing all entity columns on which features need to be joined, + as well as the timestamp column used for point-in-time joins. Either a pandas dataframe can be + provided or a SQL query. + registry: The registry for the current feature store. + project: Feast project to which the feature views belong. + full_feature_names: If True, feature names will be prefixed with the corresponding feature view name, + changing them from the format "feature" to "feature_view__feature" (e.g. "daily_transactions" + changes to "customer_fv__daily_transactions"). + + Returns: + A RetrievalJob that can be executed to get the features. + """ pass - @abc.abstractmethod + @abstractmethod def online_read( self, config: RepoConfig, @@ -172,32 +215,38 @@ def online_read( requested_features: List[str] = None, ) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]: """ - Read feature values given an Entity Key. This is a low level interface, not - expected to be used by the users directly. + Reads features values for the given entity keys. + + Args: + config: The config for the current feature store. + table: The feature view whose feature values should be read. + entity_keys: The list of entity keys for which feature values should be read. + requested_features: The list of features that should be read. Returns: - Data is returned as a list, one item per entity key. Each item in the list is a tuple - of event_ts for the row, and the feature data as a dict from feature names to values. - Values are returned as Value proto message. + A list of the same length as entity_keys. Each item in the list is a tuple where the first + item is the event timestamp for the row, and the second item is a dict mapping feature names + to values, which are returned in proto format. """ - ... + pass - @abc.abstractmethod + @abstractmethod def retrieve_saved_dataset( self, config: RepoConfig, dataset: SavedDataset ) -> RetrievalJob: """ - Read saved dataset from offline store. - All parameters for retrieval (like path, datetime boundaries, column names for both keys and features, etc) - are determined from SavedDataset object. + Reads a saved dataset. - Returns: - RetrievalJob object, which is lazy wrapper for actual query performed under the hood. + Args: + config: The config for the current feature store. + dataset: A SavedDataset object containing all parameters necessary for retrieving the dataset. + Returns: + A RetrievalJob that can be executed to get the saved dataset. """ - ... + pass - @abc.abstractmethod + @abstractmethod def write_feature_service_logs( self, feature_service: FeatureService, @@ -206,16 +255,20 @@ def write_feature_service_logs( registry: BaseRegistry, ): """ - Write features and entities logged by a feature server to an offline store. + Writes features and entities logged by a feature server to the offline store. - Schema of logs table is being inferred from the provided feature service. - Only feature services with configured logging are accepted. + The schema of the logs table is inferred from the specified feature service. Only feature + services with configured logging are accepted. - Logs dataset can be passed as Arrow Table or path to parquet directory. + Args: + feature_service: The feature service to be logged. + logs: The logs, either as an arrow table or as a path to a parquet directory. + config: The config for the current feature store. + registry: The registry for the current feature store. """ - ... + pass - @abc.abstractmethod + @abstractmethod def retrieve_feature_service_logs( self, feature_service: FeatureService, @@ -225,14 +278,19 @@ def retrieve_feature_service_logs( registry: BaseRegistry, ) -> RetrievalJob: """ - Read logged features from an offline store for a given time window [from, to). - Target table is determined based on logging configuration from the feature service. + Reads logged features for the specified time window. - Returns: - RetrievalJob object, which wraps the query to the offline store. + Args: + feature_service: The feature service whose logs should be retrieved. + start_date: The start of the window. + end_date: The end of the window. + config: The config for the current feature store. + registry: The registry for the current feature store. + Returns: + A RetrievalJob that can be executed to get the feature service logs. """ - ... + pass def get_feature_server_endpoint(self) -> Optional[str]: """Returns endpoint for the feature server, if it exists.""" From 53b2c81f0d7a5466db37426f49e7e0f3cdce32de Mon Sep 17 00:00:00 2001 From: Danny Chiao Date: Wed, 10 Aug 2022 16:46:55 -0400 Subject: [PATCH 03/15] docs: Improve release process notes (#3060) * docs: Improve release process notes Signed-off-by: Danny Chiao * more links Signed-off-by: Danny Chiao * more links Signed-off-by: Danny Chiao Signed-off-by: Danny Chiao --- docs/project/release-process.md | 79 +++++++++++++++++++++++++-------- 1 file changed, 61 insertions(+), 18 deletions(-) diff --git a/docs/project/release-process.md b/docs/project/release-process.md index 7fb9c2a560..2ddc697730 100644 --- a/docs/project/release-process.md +++ b/docs/project/release-process.md @@ -4,23 +4,49 @@ For Feast maintainers, these are the concrete steps for making a new release. -### Pre-release Verification (Verification that wheels are built correctly) for minor release. +### 1. (for patch releases) Cherry-pick changes into the branch from master +If you were cutting Feast 0.22.3, for example, you might do: +1. `git checkout v0.22-branch` (or `git pull upstream v0.22-branch --rebase` if you've cut a release before) +2. `git cherry-pick [COMMIT FROM MASTER]` +3. `git push upstream v0.22-branch` to commit changes to the release branch + +> Note: if you're handling a maintenance release (i.e. an older version), semantic release may complain at you. See +> [Sample PR](https://github.com/feast-dev/feast/commit/40f2a6e13dd7d2a5ca5bff1af378e8712621d4f2) to enable an older +> branch to cut releases. + +After this step, you will have all the changes you need in the branch. + +### 2. Pre-release verification +A lot of things can go wrong. One of the most common is getting the wheels to build correctly (and not accidentally +building dev wheels from improper tagging or local code changes during the release process). + +We verify the wheels building in **your fork** of Feast, not the main feast-dev/feast repo. + +#### For minor releases (e.g. v0.22.0) 1. Merge upstream master changes into your **fork**. Make sure you are running the workflow off of your fork! 2. Create a tag manually for the release on your fork. For example, if you are doing a release for version 0.22.0, create a tag by doing the following. - Checkout master branch and run `git tag v0.22.0`. - Run `git push --tags` to push the tag to your forks master branch. -3. Access the `Actions` tab on your github UI on your fork and click the `build_wheels` action. This workflow will build the python sdk wheels for Python 3.8-3.10 on MacOS 10.15 and Linux and verify that these wheels are correct. The publish workflow uses this action to publish the python wheels for a new release to pypi. + > This is important. If you don't have a tag, then the wheels you build will be **dev wheels**, which we can't + > push. The release process will automatically produce a tag for you via Semantic Release. +3. Access the `Actions` tab on your GitHub UI on your fork and click the `build_wheels` action. This workflow will + build the python sdk wheels for Python 3.8-3.10 on MacOS 10.15 and Linux and verify that these wheels are correct. + The publish workflow uses this action to publish the python wheels for a new release to PyPI. 4. Look for the header `This workflow has a workflow_dispatch event trigger` and click `Run Workflow` on the right. -5. Run the workflow off of the tag you just created(`v0.22.0` in this case) and verify that the workflow worked (i.e ensure that all jobs are green). +5. Run the workflow off of the tag you just created(`v0.22.0` in this case, **not** the master branch) and verify that + the workflow worked (i.e ensure that all jobs are green). -### Pre-release Verification (Verification that wheels are built correctly) for patch release. -1. Check out the branch of your release (e.g `v0.22-branch` on your local **fork**) and push this to your fork (`git push -u origin `). -2. Cherry pick commits that are relevant to the patch release onto your forked branch. -3. Checkout the release branch and add a patch release tag (e.g `v0.22.1`) by running `git tag `. -4. Push tags to your origin branch with `git push origin `. -5. Kick off `build_wheels` workflow in the same way as is detailed in the last section on of the patch release tag. +#### For patch releases (e.g. v0.22.3) +You should already have checked out the existing minor release branch from step 1 (e.g. `v0.22-branch`). +1. Push the minor release branch to your fork (`git push -u origin `). +2. Add a patch release tag (e.g `v0.22.1`) by running `git tag `. + > This is important. If you don't have a tag, then the wheels you build will be **dev wheels**, which we can't + > push. The release process will automatically produce a tag for you via Semantic Release. +3. Push tags to your **origin branch** (not the upstream feast-dev/feast branch) with `git push origin `. +4. Kick off `build_wheels` workflow in your fork in the same way as is detailed in the last section, running the + workflow from this tag you just pushed up. -### Release for Python and Java SDK +### 3. Release for Python and Java SDK 1. Generate a [Personal Access Token](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token) or retrieve your saved personal access token. * The personal access token should have all of the permissions under the `repo` checkbox. 2. Access the `Actions` tab on the main `feast-dev/feast` repo and find the `release` action. @@ -28,15 +54,31 @@ For Feast maintainers, these are the concrete steps for making a new release. * If you are making a minor or major release, you should run it off of the master branch. * If you are making a patch release, run it off of the corresponding minor release branch. 4. Try the dry run first with your personal access token. If this succeeds, uncheck `Dry Run` and run the release workflow. -5. All of the jobs should succeed besides the UI job which needs to be released separately. Ping a maintainer on Slack to run the UI release manually. -6. Try to install the feast release in your local environment and test out the `feast init` -> `feast apply` workflow to verify as a sanity check that the release worked correctly. +5. Then try running normally (without dry run). + - First, the `release` workflow will kick off. This publishes an NPM package for the Web UI ([NPM package](http://npmjs.com/package/@feast-dev/feast-ui)), + bumps files versions (e.g. helm chart, UI, Java pom.xml files), and generate a changelog using Semantic Release. + All jobs should succeed. + - Second, the `publish` workflow will kick off. This builds all the Python wheels ([PyPI link](https://pypi.org/project/feast/), + publishes helm charts, publishes the Python and Java feature servers to Docker ([DockerHub images](https://hub.docker.com/u/feastdev)), + publishes the Java Serving Client + Datatypes libraries to Maven ([Maven repo](https://mvnrepository.com/artifact/dev.feast)) +6. Try to install the Feast Python release in your local environment and test out the `feast init` -> `feast apply` + workflow to verify as a sanity check that the release worked correctly. +7. Verify the releases all show the new version: + - [NPM package](http://npmjs.com/package/@feast-dev/feast-ui) + - [PyPI link](https://pypi.org/project/feast/) + - [DockerHub images (Java + Python feature servers, feature transformation server)](https://hub.docker.com/u/feastdev) + - [Maven repo (feast-datatypes, feast-serving-client)](https://mvnrepository.com/artifact/dev.feast) + +### 4. (for minor releases) Post-release steps +#### 4a: Creating a new branch +Create a new branch based on master (i.e. v0.22-branch) and push to the main Feast repo. This will be where +cherry-picks go for future patch releases and where documentation will point. -### (for minor releases) Post-release steps -1. Create a new branch based on master (i.e. v0.22-branch) and push to the main Feast repo. This will be where cherry-picks go for future patch releases and where documentation will point. -2. Write a summary of the release in the GitHub release - 1. By default, Semantic Release will pull in messages from commits (features vs fixes, etc). But this is hard to digest still, so it helps to have a high level overview. +#### 4b: Adding a high level summary in the GitHub release notes +By default, Semantic Release will pull in messages from commits (features vs fixes, etc). But this is hard to digest, +so it helps to have a high level overview. See https://github.com/feast-dev/feast/releases for the releases. -### Update documentation +#### 4c: Update documentation In the Feast Gitbook (ask [Danny Chiao](https://tectonfeast.slack.com/team/U029405HFEU) in Slack for access): 1. Create a new space within the Feast collection @@ -56,4 +98,5 @@ In the Feast Gitbook (ask [Danny Chiao](https://tectonfeast.slack.com/team/U0294 5. Configure the default space to be your new branch and save ![](new_branch_part_5.png) -6. Verify on docs.feast.dev that this new space is the default (this may take a few minutes to propagate, and your browser cache may be caching the old branch as the default) \ No newline at end of file +6. Verify on [docs.feast.dev](http://docs.feast.dev) that this new space is the default (this may take a few minutes to + propagate, and your browser cache may be caching the old branch as the default) \ No newline at end of file From 42cc91c7c2d7bb44575efb654f29ed46b3a8f750 Mon Sep 17 00:00:00 2001 From: Achal Shah Date: Wed, 10 Aug 2022 13:47:53 -0700 Subject: [PATCH 04/15] docs: Merge the feature repo docs pages (#3061) * docs: Merge the feature repo docs pages Signed-off-by: Achal Shah * delete Signed-off-by: Achal Shah Signed-off-by: Achal Shah --- docs/SUMMARY.md | 1 - .../architecture-and-components/README.md | 4 --- .../feature-repository.md | 27 ------------------- docs/getting-started/concepts/feature-repo.md | 15 ++++++++--- 4 files changed, 12 insertions(+), 35 deletions(-) delete mode 100644 docs/getting-started/architecture-and-components/feature-repository.md diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md index aa95d40368..fa1a49bc1f 100644 --- a/docs/SUMMARY.md +++ b/docs/SUMMARY.md @@ -19,7 +19,6 @@ * [\[Alpha\] Saved dataset](getting-started/concepts/dataset.md) * [Architecture](getting-started/architecture-and-components/README.md) * [Overview](getting-started/architecture-and-components/overview.md) - * [Feature repository](getting-started/architecture-and-components/feature-repository.md) * [Registry](getting-started/architecture-and-components/registry.md) * [Offline store](getting-started/architecture-and-components/offline-store.md) * [Online store](getting-started/architecture-and-components/online-store.md) diff --git a/docs/getting-started/architecture-and-components/README.md b/docs/getting-started/architecture-and-components/README.md index 6e6b5f6ee2..a67761b2fc 100644 --- a/docs/getting-started/architecture-and-components/README.md +++ b/docs/getting-started/architecture-and-components/README.md @@ -4,10 +4,6 @@ [overview.md](overview.md) {% endcontent-ref %} -{% content-ref url="feature-repository.md" %} -[feature-repository.md](feature-repository.md) -{% endcontent-ref %} - {% content-ref url="registry.md" %} [registry.md](registry.md) {% endcontent-ref %} diff --git a/docs/getting-started/architecture-and-components/feature-repository.md b/docs/getting-started/architecture-and-components/feature-repository.md deleted file mode 100644 index d231600eb8..0000000000 --- a/docs/getting-started/architecture-and-components/feature-repository.md +++ /dev/null @@ -1,27 +0,0 @@ -# Feature repository - -Feast users use Feast to manage two important sets of configuration: - -* Configuration about how to run Feast on your infrastructure -* Feature definitions - -With Feast, the above configuration can be written declaratively and stored as code in a central location. This central location is called a feature repository. The feature repository is the declarative source of truth for what the desired state of a feature store should be. - -The Feast CLI uses the feature repository to configure, deploy, and manage your feature store. - -An example structure of a feature repository is shown below: - -```text -$ tree -a -. -├── data -│ └── driver_stats.parquet -├── driver_features.py -├── feature_store.yaml -└── .feastignore - -1 directory, 4 files -``` - -For more details, see the [Feature repository](../../reference/feature-repository/) reference. - diff --git a/docs/getting-started/concepts/feature-repo.md b/docs/getting-started/concepts/feature-repo.md index 0316019bea..a56f42ce10 100644 --- a/docs/getting-started/concepts/feature-repo.md +++ b/docs/getting-started/concepts/feature-repo.md @@ -1,13 +1,22 @@ # Feature Repository -## Feature Repo +## Feature repository +Feast users use Feast to manage two important sets of configuration: + +* Configuration about how to run Feast on your infrastructure +* Feature definitions + +With Feast, the above configuration can be written declaratively and stored as code in a central location. This central location is called a feature repository. The feature repository is the declarative source of truth for what the desired state of a feature store should be. A feature repository is the collection of python files that define entities, feature views and data sources. Feature Repos also have a `feature_store.yaml` file at their root. -Users can collaborate by making and reviewing changes to Feast object definitions (feature views, entities, etc) in the feature repo. +Users can collaborate by making and reviewing changes to Feast object definitions (feature views, entities, etc.) in the feature repo. But, these objects must be applied, either through API, or the CLI, for them to be available by downstream Feast actions (such as materialization, or retrieving online features). Internally, Feast only looks at the registry when performing these actions, and not at the feature repo directly. ## Declarative Feature Definitions When using the CLI to apply changes (via `feast apply`), the CLI determines the state of the feature repo from the source files and updates the registry state to reflect the definitions in the feature repo files. -This means that new feature views are added to the registry, existing feature views are updated as necessary, and Feast objects removed from the source files are deleted from the registry. \ No newline at end of file +This means that new feature views are added to the registry, existing feature views are updated as necessary, and Feast objects removed from the source files are deleted from the registry. + +For more details, see the [Feature repository](../../reference/feature-repository/) reference. + From a32d2475b1d47f24f2698a15ff596407a2035f17 Mon Sep 17 00:00:00 2001 From: Danny Chiao Date: Wed, 10 Aug 2022 16:43:58 -0400 Subject: [PATCH 05/15] fix: Fix on demand feature view output in feast plan + Web UI crash (#3057) * fix: Fix on demand feature view output in feast plan + Web UI crash with ODFV Signed-off-by: Danny Chiao * lint Signed-off-by: Danny Chiao * fix tests Signed-off-by: Danny Chiao Signed-off-by: Danny Chiao --- protos/feast/core/OnDemandFeatureView.proto | 3 ++ sdk/python/feast/diff/registry_diff.py | 39 ++++++++++++++----- sdk/python/feast/on_demand_feature_view.py | 17 +++++--- sdk/python/feast/registry.py | 8 ++-- .../feature_repos/universal/feature_views.py | 2 + .../tests/unit/test_on_demand_feature_view.py | 4 ++ 6 files changed, 55 insertions(+), 18 deletions(-) diff --git a/protos/feast/core/OnDemandFeatureView.proto b/protos/feast/core/OnDemandFeatureView.proto index 33c51f5c4d..50bf8b6f55 100644 --- a/protos/feast/core/OnDemandFeatureView.proto +++ b/protos/feast/core/OnDemandFeatureView.proto @@ -83,4 +83,7 @@ message UserDefinedFunction { // The python-syntax function body (serialized by dill) bytes body = 2; + + // The string representation of the udf + string body_text = 3; } diff --git a/sdk/python/feast/diff/registry_diff.py b/sdk/python/feast/diff/registry_diff.py index fc0acf0223..56d5b84c71 100644 --- a/sdk/python/feast/diff/registry_diff.py +++ b/sdk/python/feast/diff/registry_diff.py @@ -17,6 +17,7 @@ from feast.protos.feast.core.OnDemandFeatureView_pb2 import ( OnDemandFeatureView as OnDemandFeatureViewProto, ) +from feast.protos.feast.core.OnDemandFeatureView_pb2 import OnDemandFeatureViewSpec from feast.protos.feast.core.RequestFeatureView_pb2 import ( RequestFeatureView as RequestFeatureViewProto, ) @@ -137,19 +138,39 @@ def diff_registry_objects( else: current_spec = current_proto.spec new_spec = new_proto.spec - if current_spec != new_spec: + if current != new: for _field in current_spec.DESCRIPTOR.fields: if _field.name in FIELDS_TO_IGNORE: continue - if getattr(current_spec, _field.name) != getattr(new_spec, _field.name): - transition = TransitionType.UPDATE - property_diffs.append( - PropertyDiff( - _field.name, - getattr(current_spec, _field.name), - getattr(new_spec, _field.name), + elif getattr(current_spec, _field.name) != getattr(new_spec, _field.name): + if _field.name == "user_defined_function": + current_spec = cast(OnDemandFeatureViewSpec, current_proto) + new_spec = cast(OnDemandFeatureViewSpec, new_proto) + current_udf = current_spec.user_defined_function + new_udf = new_spec.user_defined_function + for _udf_field in current_udf.DESCRIPTOR.fields: + if _udf_field.name == "body": + continue + if getattr(current_udf, _udf_field.name) != getattr( + new_udf, _udf_field.name + ): + transition = TransitionType.UPDATE + property_diffs.append( + PropertyDiff( + _field.name + "." + _udf_field.name, + getattr(current_udf, _udf_field.name), + getattr(new_udf, _udf_field.name), + ) + ) + else: + transition = TransitionType.UPDATE + property_diffs.append( + PropertyDiff( + _field.name, + getattr(current_spec, _field.name), + getattr(new_spec, _field.name), + ) ) - ) return FeastObjectDiff( name=new_spec.name, feast_object_type=object_type, diff --git a/sdk/python/feast/on_demand_feature_view.py b/sdk/python/feast/on_demand_feature_view.py index bb45dd6eb6..fa5e9245fe 100644 --- a/sdk/python/feast/on_demand_feature_view.py +++ b/sdk/python/feast/on_demand_feature_view.py @@ -61,12 +61,12 @@ class OnDemandFeatureView(BaseFeatureView): maintainer. """ - # TODO(adchia): remove inputs from proto and declaration name: str features: List[Field] source_feature_view_projections: Dict[str, FeatureViewProjection] source_request_sources: Dict[str, RequestSource] udf: FunctionType + udf_string: str description: str tags: Dict[str, str] owner: str @@ -81,6 +81,7 @@ def __init__( # noqa: C901 List[Any] ] = None, # Typed as Any because @typechecked can't deal with the List[Union] udf: Optional[FunctionType] = None, + udf_string: str = "", inputs: Optional[ Dict[str, Union[FeatureView, FeatureViewProjection, RequestSource]] ] = None, @@ -99,8 +100,9 @@ def __init__( # noqa: C901 sources (optional): A map from input source names to the actual input sources, which may be feature views, or request data sources. These sources serve as inputs to the udf, which will refer to them by name. - udf (optional): The user defined transformation function, which must take pandas + udf: The user defined transformation function, which must take pandas dataframes as inputs. + udf_string: The source code version of the udf (for diffing and displaying in Web UI) inputs (optional): (Deprecated) A map from input source names to the actual input sources, which may be feature views, feature view projections, or request data sources. These sources serve as inputs to the udf, which will refer to them by name. @@ -233,9 +235,8 @@ def __init__( # noqa: C901 odfv_source.name ] = odfv_source.projection - if _udf is None: - raise ValueError("The `udf` parameter must be specified.") - self.udf = _udf # type: ignore + self.udf = udf # type: ignore + self.udf_string = udf_string @property def proto_class(self) -> Type[OnDemandFeatureViewProto]: @@ -249,6 +250,7 @@ def __copy__(self): sources=list(self.source_feature_view_projections.values()) + list(self.source_request_sources.values()), udf=self.udf, + udf_string=self.udf_string, description=self.description, tags=self.tags, owner=self.owner, @@ -269,6 +271,7 @@ def __eq__(self, other): self.source_feature_view_projections != other.source_feature_view_projections or self.source_request_sources != other.source_request_sources + or self.udf_string != other.udf_string or self.udf.__code__.co_code != other.udf.__code__.co_code ): return False @@ -310,6 +313,7 @@ def to_proto(self) -> OnDemandFeatureViewProto: user_defined_function=UserDefinedFunctionProto( name=self.udf.__name__, body=dill.dumps(self.udf, recurse=True), + body_text=self.udf_string, ), description=self.description, tags=self.tags, @@ -362,6 +366,7 @@ def from_proto(cls, on_demand_feature_view_proto: OnDemandFeatureViewProto): udf=dill.loads( on_demand_feature_view_proto.spec.user_defined_function.body ), + udf_string=on_demand_feature_view_proto.spec.user_defined_function.body_text, description=on_demand_feature_view_proto.spec.description, tags=dict(on_demand_feature_view_proto.spec.tags), owner=on_demand_feature_view_proto.spec.owner, @@ -651,6 +656,7 @@ def mainify(obj): obj.__module__ = "__main__" def decorator(user_function): + udf_string = dill.source.getsource(user_function) mainify(user_function) on_demand_feature_view_obj = OnDemandFeatureView( name=user_function.__name__, @@ -660,6 +666,7 @@ def decorator(user_function): description=description, tags=tags, owner=owner, + udf_string=udf_string, ) functools.update_wrapper( wrapper=on_demand_feature_view_obj, wrapped=user_function diff --git a/sdk/python/feast/registry.py b/sdk/python/feast/registry.py index 336bb2429f..b6613f467e 100644 --- a/sdk/python/feast/registry.py +++ b/sdk/python/feast/registry.py @@ -24,7 +24,6 @@ from typing import Any, Dict, List, Optional from urllib.parse import urlparse -import dill from google.protobuf.internal.containers import RepeatedCompositeFieldContainer from google.protobuf.json_format import MessageToJson from proto import Message @@ -732,9 +731,10 @@ def to_dict(self, project: str) -> Dict[str, List[Any]]: key=lambda on_demand_feature_view: on_demand_feature_view.name, ): odfv_dict = self._message_to_sorted_dict(on_demand_feature_view.to_proto()) - odfv_dict["spec"]["userDefinedFunction"]["body"] = dill.source.getsource( - on_demand_feature_view.udf - ) + + odfv_dict["spec"]["userDefinedFunction"][ + "body" + ] = on_demand_feature_view.udf_string registry_dict["onDemandFeatureViews"].append(odfv_dict) for request_feature_view in sorted( self.list_request_feature_views(project=project), diff --git a/sdk/python/tests/integration/feature_repos/universal/feature_views.py b/sdk/python/tests/integration/feature_repos/universal/feature_views.py index b6e9aa8fc0..ea5f0e6ce4 100644 --- a/sdk/python/tests/integration/feature_repos/universal/feature_views.py +++ b/sdk/python/tests/integration/feature_repos/universal/feature_views.py @@ -88,6 +88,7 @@ def conv_rate_plus_100_feature_view( schema=[] if infer_features else _features, sources=sources, udf=conv_rate_plus_100, + udf_string="raw udf source", ) @@ -125,6 +126,7 @@ def similarity_feature_view( sources=sources, schema=[] if infer_features else _fields, udf=similarity, + udf_string="similarity raw udf", ) diff --git a/sdk/python/tests/unit/test_on_demand_feature_view.py b/sdk/python/tests/unit/test_on_demand_feature_view.py index 5a0f5c98d8..239eb603f0 100644 --- a/sdk/python/tests/unit/test_on_demand_feature_view.py +++ b/sdk/python/tests/unit/test_on_demand_feature_view.py @@ -57,6 +57,7 @@ def test_hash(): Field(name="output2", dtype=Float32), ], udf=udf1, + udf_string="udf1 source code", ) on_demand_feature_view_2 = OnDemandFeatureView( name="my-on-demand-feature-view", @@ -66,6 +67,7 @@ def test_hash(): Field(name="output2", dtype=Float32), ], udf=udf1, + udf_string="udf1 source code", ) on_demand_feature_view_3 = OnDemandFeatureView( name="my-on-demand-feature-view", @@ -75,6 +77,7 @@ def test_hash(): Field(name="output2", dtype=Float32), ], udf=udf2, + udf_string="udf2 source code", ) on_demand_feature_view_4 = OnDemandFeatureView( name="my-on-demand-feature-view", @@ -84,6 +87,7 @@ def test_hash(): Field(name="output2", dtype=Float32), ], udf=udf2, + udf_string="udf2 source code", description="test", ) From a49f70c1528acabf3429ee211212fce265454aea Mon Sep 17 00:00:00 2001 From: Niklas von Maltzahn Date: Thu, 11 Aug 2022 18:57:32 +0200 Subject: [PATCH 06/15] fix: Fix Spark offline store type conversion to arrow (#3071) * Fix unit tests related to empty list types Signed-off-by: niklasvm * formatting Signed-off-by: niklasvm Signed-off-by: niklasvm --- .../offline_stores/contrib/spark_offline_store/spark.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py index 2437714dec..6e56e04fcd 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py +++ b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py @@ -1,3 +1,4 @@ +import tempfile import warnings from datetime import datetime from typing import Dict, List, Optional, Tuple, Union @@ -6,6 +7,7 @@ import pandas import pandas as pd import pyarrow +import pyarrow.parquet as pq import pyspark from pydantic import StrictStr from pyspark import SparkConf @@ -267,8 +269,11 @@ def _to_df_internal(self) -> pd.DataFrame: def _to_arrow_internal(self) -> pyarrow.Table: """Return dataset as pyarrow Table synchronously""" - df = self.to_df() - return pyarrow.Table.from_pandas(df) # noqa + + # write to temp parquet and then load it as pyarrow table from disk + with tempfile.TemporaryDirectory() as temp_dir: + self.to_spark_df().write.parquet(temp_dir, mode="overwrite") + return pq.read_table(temp_dir) def persist(self, storage: SavedDatasetStorage): """ From 0ff0ec473a03e54fdf950b4476b9312b27629230 Mon Sep 17 00:00:00 2001 From: Danny Chiao Date: Thu, 11 Aug 2022 19:15:26 -0400 Subject: [PATCH 07/15] fix: Fix incorrect on demand feature view diffing and improve Java tests (#3074) * fix: Fix ODFV bug Signed-off-by: Danny Chiao --- .github/workflows/java_master_only.yml | 88 +++++++++++++++++- .github/workflows/java_pr.yml | 86 +++++++++++++++++ java/CONTRIBUTING.md | 1 + java/serving/README.md | 2 + java/serving/pom.xml | 22 +++++ .../docker-compose/feast10/definitions.py | 8 +- .../docker-compose/feast10/registry.db | Bin 14203 -> 0 bytes .../docker-compose/feast10/setup_it.py | 86 +++++++++++++++++ sdk/python/feast/diff/registry_diff.py | 4 +- .../tests/unit/diff/test_registry_diff.py | 56 +++++++++++ 10 files changed, 344 insertions(+), 9 deletions(-) delete mode 100644 java/serving/src/test/resources/docker-compose/feast10/registry.db create mode 100644 java/serving/src/test/resources/docker-compose/feast10/setup_it.py diff --git a/.github/workflows/java_master_only.yml b/.github/workflows/java_master_only.yml index f5297615f6..194024a168 100644 --- a/.github/workflows/java_master_only.yml +++ b/.github/workflows/java_master_only.yml @@ -69,6 +69,52 @@ jobs: java-version: '11' java-package: jdk architecture: x64 + - name: Setup Python (to call feast apply) + uses: actions/setup-python@v2 + id: setup-python + with: + python-version: 3.8 + architecture: x64 + - name: Setup Go + id: setup-go + uses: actions/setup-go@v2 + with: + go-version: 1.18.0 + - name: Upgrade pip version + run: | + pip install --upgrade "pip>=21.3.1,<22.1" + - name: Get pip cache dir + id: pip-cache + run: | + echo "::set-output name=dir::$(pip cache dir)" + - name: pip cache + uses: actions/cache@v2 + with: + path: | + ${{ steps.pip-cache.outputs.dir }} + /opt/hostedtoolcache/Python + /Users/runner/hostedtoolcache/Python + key: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-${{ hashFiles(format('**/py{0}-ci-requirements.txt', env.PYTHON)) }} + restore-keys: | + ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip- + - name: Install pip-tools + run: pip install pip-tools + - name: Install apache-arrow on ubuntu + run: | + sudo apt update + sudo apt install -y -V ca-certificates lsb-release wget + wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb + sudo apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb + sudo apt update + sudo apt install -y -V libarrow-dev + - name: Install Python dependencies + run: make install-python-ci-dependencies + - uses: actions/cache@v2 + with: + path: ~/.m2/repository + key: ${{ runner.os }}-it-maven-${{ hashFiles('**/pom.xml') }} + restore-keys: | + ${{ runner.os }}-it-maven- - uses: actions/cache@v2 with: path: ~/.m2/repository @@ -95,10 +141,46 @@ jobs: java-version: '11' java-package: jdk architecture: x64 - - uses: actions/setup-python@v2 + - name: Setup Python (to call feast apply) + uses: actions/setup-python@v2 + id: setup-python + with: + python-version: 3.8 + architecture: x64 + - name: Setup Go + id: setup-go + uses: actions/setup-go@v2 with: - python-version: '3.8' - architecture: 'x64' + go-version: 1.18.0 + - name: Upgrade pip version + run: | + pip install --upgrade "pip>=21.3.1,<22.1" + - name: Get pip cache dir + id: pip-cache + run: | + echo "::set-output name=dir::$(pip cache dir)" + - name: pip cache + uses: actions/cache@v2 + with: + path: | + ${{ steps.pip-cache.outputs.dir }} + /opt/hostedtoolcache/Python + /Users/runner/hostedtoolcache/Python + key: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-${{ hashFiles(format('**/py{0}-ci-requirements.txt', env.PYTHON)) }} + restore-keys: | + ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip- + - name: Install pip-tools + run: pip install pip-tools + - name: Install apache-arrow on ubuntu + run: | + sudo apt update + sudo apt install -y -V ca-certificates lsb-release wget + wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb + sudo apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb + sudo apt update + sudo apt install -y -V libarrow-dev + - name: Install Python dependencies + run: make install-python-ci-dependencies - uses: actions/cache@v2 with: path: ~/.m2/repository diff --git a/.github/workflows/java_pr.yml b/.github/workflows/java_pr.yml index 328a8e7c7b..c552428664 100644 --- a/.github/workflows/java_pr.yml +++ b/.github/workflows/java_pr.yml @@ -40,6 +40,52 @@ jobs: java-version: '11' java-package: jdk architecture: x64 + - name: Setup Python (to call feast apply) + uses: actions/setup-python@v2 + id: setup-python + with: + python-version: 3.8 + architecture: x64 + - name: Setup Go + id: setup-go + uses: actions/setup-go@v2 + with: + go-version: 1.18.0 + - name: Upgrade pip version + run: | + pip install --upgrade "pip>=21.3.1,<22.1" + - name: Get pip cache dir + id: pip-cache + run: | + echo "::set-output name=dir::$(pip cache dir)" + - name: pip cache + uses: actions/cache@v2 + with: + path: | + ${{ steps.pip-cache.outputs.dir }} + /opt/hostedtoolcache/Python + /Users/runner/hostedtoolcache/Python + key: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-${{ hashFiles(format('**/py{0}-ci-requirements.txt', env.PYTHON)) }} + restore-keys: | + ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip- + - name: Install pip-tools + run: pip install pip-tools + - name: Install apache-arrow on ubuntu + run: | + sudo apt update + sudo apt install -y -V ca-certificates lsb-release wget + wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb + sudo apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb + sudo apt update + sudo apt install -y -V libarrow-dev + - name: Install Python dependencies + run: make install-python-ci-dependencies + - uses: actions/cache@v2 + with: + path: ~/.m2/repository + key: ${{ runner.os }}-it-maven-${{ hashFiles('**/pom.xml') }} + restore-keys: | + ${{ runner.os }}-it-maven- - uses: actions/cache@v2 with: path: ~/.m2/repository @@ -129,6 +175,46 @@ jobs: aws-region: us-west-2 - name: Use AWS CLI run: aws sts get-caller-identity + - name: Setup Python (to call feast apply) + uses: actions/setup-python@v2 + id: setup-python + with: + python-version: 3.8 + architecture: x64 + - name: Setup Go + id: setup-go + uses: actions/setup-go@v2 + with: + go-version: 1.18.0 + - name: Upgrade pip version + run: | + pip install --upgrade "pip>=21.3.1,<22.1" + - name: Get pip cache dir + id: pip-cache + run: | + echo "::set-output name=dir::$(pip cache dir)" + - name: pip cache + uses: actions/cache@v2 + with: + path: | + ${{ steps.pip-cache.outputs.dir }} + /opt/hostedtoolcache/Python + /Users/runner/hostedtoolcache/Python + key: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-${{ hashFiles(format('**/py{0}-ci-requirements.txt', env.PYTHON)) }} + restore-keys: | + ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip- + - name: Install pip-tools + run: pip install pip-tools + - name: Install apache-arrow on ubuntu + run: | + sudo apt update + sudo apt install -y -V ca-certificates lsb-release wget + wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb + sudo apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb + sudo apt update + sudo apt install -y -V libarrow-dev + - name: Install Python dependencies + run: make install-python-ci-dependencies - name: Run integration tests run: make test-java-integration - name: Save report diff --git a/java/CONTRIBUTING.md b/java/CONTRIBUTING.md index 74549034b9..7ccfe108c0 100644 --- a/java/CONTRIBUTING.md +++ b/java/CONTRIBUTING.md @@ -59,6 +59,7 @@ mvn spotless:apply ### Project Makefile The Project Makefile provides useful shorthands for common development tasks: +> Note: These commands rely on a local version of `feast` (Python) to be installed Run all Unit tests: ``` diff --git a/java/serving/README.md b/java/serving/README.md index a0d87563a9..0a153ceab8 100644 --- a/java/serving/README.md +++ b/java/serving/README.md @@ -136,4 +136,6 @@ Unit & Integration Tests can be used to verify functionality: mvn test -pl serving --also-make # run integration tests mvn verify -pl serving --also-make +# run integration tests with debugger +mvn -Dmaven.failsafe.debug verify -pl serving --also-make ``` \ No newline at end of file diff --git a/java/serving/pom.xml b/java/serving/pom.xml index e597775f9b..9eea11ef96 100644 --- a/java/serving/pom.xml +++ b/java/serving/pom.xml @@ -82,6 +82,28 @@ + + + org.codehaus.mojo + exec-maven-plugin + 1.6.0 + + + + python + src/test/resources/docker-compose/feast10/ + + setup_it.py + + + feast_test_apply + process-test-resources + + exec + + + + diff --git a/java/serving/src/test/resources/docker-compose/feast10/definitions.py b/java/serving/src/test/resources/docker-compose/feast10/definitions.py index 806995ec06..0693358a12 100644 --- a/java/serving/src/test/resources/docker-compose/feast10/definitions.py +++ b/java/serving/src/test/resources/docker-compose/feast10/definitions.py @@ -73,8 +73,9 @@ def transformed_conv_rate(features_df: pd.DataFrame) -> pd.DataFrame: entity = Entity(name="entity", value_type=ValueType.STRING,) -benchmark_feature_views = [ - FeatureView( +benchmark_feature_views = [] +for i in range(25): + fv = FeatureView( name=f"feature_view_{i}", entities=[entity], ttl=Duration(seconds=86400), @@ -82,8 +83,7 @@ def transformed_conv_rate(features_df: pd.DataFrame) -> pd.DataFrame: online=True, source=generated_data_source, ) - for i in range(25) -] + benchmark_feature_views.append(fv) benchmark_feature_service = FeatureService( name=f"benchmark_feature_service", features=benchmark_feature_views, diff --git a/java/serving/src/test/resources/docker-compose/feast10/registry.db b/java/serving/src/test/resources/docker-compose/feast10/registry.db deleted file mode 100644 index 746934e3d0a09c348f8046087fd6144d0d237d61..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 14203 zcmc(mUx-|18OCQeyUFI{-!iGK*(+V5w1Mo*`R0G{LX1{4kPC}9y*ZwpbEZ4(?#w!K zc6FhFdZ8{dpx6l%1xYE^_Cg4SLX8vyRxGs8N-0uGAyq5sl@U=YTJU?{-A%sd$vH3= z!bxD6_xqkR^UQB&=R41PzT-7K;~kEMy{&i{^rD5?#l7NFy=dvq_*aj0<8YJ&8^gh) zaVIGiyw7-__72AVq?c?j%r5Ocx?6U}`>b~;2%^pP_3ee3C2#MceW|;&i$#Cwbl3OW z?Pho7Y^xbH&el5h#@Urneyp@we%!8yCTg8II{)rZfAzzI3s3#_z2}SOgQE9V(L1^K z9##jN!?o>Tl!VEsgs%7Bntk!5W8UG;puZIi!z5mqTRQ5^hn>!Db#UpVcOu++B#6SE zeWqmC+ZZj(n%Vg${(1V`>U?qWM7(9+c#!ngWBW4e8)pu6hOvE~=;3=y_w3ow*I(WU zhu_?clOGg)#kbt+cZcEfpxa&R^<(RCFpNjb-QJpgY;AYqLI7SHJ&{ zbd0qrFxCiTjWE^-V~sG@2xE;f)(B&bFxCiTEjNreKmIr!V|@yYb;4LDjCI0TCyaH% zSSO5i!dNGab;4NB4da`C+3BP%F^wrOHV9*bFg6HdgD^G-V}md@2xEgVHV9)QH;g;) zKADcOIR(ZhVQdn{CShz6#wKBG62>NBY!b#MVQl7x@kj6fEFEKO3XCnn*dmNA!q_5= zEyCC$j4i_0B8)A<*vbv#&3`?gjH)gz-XS@^pXVscr{LJuj!aK6B^eX}q281LG2uc_bmM|bNVL)iYfZ&7y;Ryo* z6b5o5y7PzY8ALM^jcZ&Z4Gb_*pNaZR)MuhT6ZM&>&qRGD>gPuE&C9Q65X}rXE;UU~1&8ALN9k84~r4-7C-O+BDsqMCa^!9+FrfP#r?_5nq1L~p(Nw+y10 z>Blv$@dpN&s0JWVFj0*_pkSgJfKOl zeE9neqSIt1!&P+xx z6@h|@>ct{ZFj2i+1PUga-vy(Wets)D_q)^H_0!&|JJ;*?yLF>@xYg^#+#h|mc=xsi z*q>mxI^5o_V82-1(k@hPX_qOtv~bUi2!Mdegp4`$xbxXT6Ena1#e#yP|Zd;L1 zi&b}Cyt=iRb!&0!)?(MK#jjh7VYe2?Zk=k`xDxr6g_C3?d=S(e}p^JH0KH_WG6 zZn+Z4w@{O1Nw+XhmL=Q5JXw}x3-e@IaxKiKT5h`%36M~eWr>e4PnIP-!aP})=m_&< zS%M?Xi{)Pz@4npStm%4Ps{{k$8V1BR42W+S5aTc)&S5~T!+>~)0Wl8)>F#|HNgl!h zb}xAd73^N}5GvTc|U}* z$|t7^N9h9Tx|j2ORIqzF&qoEjm-Bp7uzNYrM+Li=^ZXR|H8@JwO4q%dQ=@|2%Q-bF z*u9)nqk`SbIW;QSy_{30xUa)ex@@}cWzQ2T*uCs}LIt~*eNU)h_p) zT|TH__p;9i73^Mi`lPsT!BM)dy6$CT4Jz2ZY^^~ByO+&1s9^W9y#^KRUN+dIxNpNz zy2QHfWj_ll*uCs%K?S>)JuRqU_p+-673^O2wcPFAwn-hc+7HD6alWLBZ}doq~ehYd!^qbzdrc;lFdVXF-qZ6X?}(L*f@BbcQB-P_YV(JF zTrA_LZKqh#p2nJ5@Q&W81N(Qi^o{gei@vh><@5=>v#11j7M0TOEIRYSfh-2E9e85) z^7Udt|Bbz@tAf}4UB$R$zbF!IkpYZEJB< z-q;S;;%}Gt?m`=tZQpT}lphVZ!m_p=`;U}I!%jJ|_hpsstyOv>)uHS={e)7&QivI(E%Qx2m diff --git a/java/serving/src/test/resources/docker-compose/feast10/setup_it.py b/java/serving/src/test/resources/docker-compose/feast10/setup_it.py new file mode 100644 index 0000000000..733ebdfb49 --- /dev/null +++ b/java/serving/src/test/resources/docker-compose/feast10/setup_it.py @@ -0,0 +1,86 @@ +from pathlib import Path +from feast.repo_config import load_repo_config +from datetime import datetime, timedelta + +import numpy as np +import pandas as pd + +from definitions import ( + benchmark_feature_service, + benchmark_feature_views, + driver, + driver_hourly_stats_view, + entity, + transformed_conv_rate, +) + +from feast import FeatureStore + + +def setup_data(): + start = datetime.now() - timedelta(days=10) + + df = pd.DataFrame() + df["driver_id"] = np.arange(1000, 1010) + df["created"] = datetime.now() + df["conv_rate"] = np.arange(0, 1, 0.1) + df["acc_rate"] = np.arange(0.5, 1, 0.05) + df["avg_daily_trips"] = np.arange(0, 1000, 100) + + # some of rows are beyond 7 days to test OUTSIDE_MAX_AGE status + df["event_timestamp"] = start + pd.Series(np.arange(0, 10)).map( + lambda days: timedelta(days=days) + ) + + # Store data in parquet files. Parquet is convenient for local development mode. For + # production, you can use your favorite DWH, such as BigQuery. See Feast documentation + # for more info. + df.to_parquet("driver_stats.parquet") + + # For Benchmarks + # Please read more in Feast RFC-031 + # (link https://docs.google.com/document/d/12UuvTQnTTCJhdRgy6h10zSbInNGSyEJkIxpOcgOen1I/edit) + # about this benchmark setup + def generate_data( + num_rows: int, num_features: int, destination: str + ) -> pd.DataFrame: + features = [f"feature_{i}" for i in range(num_features)] + columns = ["entity", "event_timestamp"] + features + df = pd.DataFrame(0, index=np.arange(num_rows), columns=columns) + df["event_timestamp"] = datetime.utcnow() + for column in features: + df[column] = np.random.randint(1, num_rows, num_rows) + + df["entity"] = "key-" + pd.Series(np.arange(1, num_rows + 1)).astype( + pd.StringDtype() + ) + + df.to_parquet(destination) + + generate_data(10**3, 250, "benchmark_data.parquet") + + +def main(): + print("Running setup_it.py") + + setup_data() + existing_repo_config = load_repo_config(Path(".")) + + # Update to default online store since otherwise, relies on Dockerized Redis service + fs = FeatureStore(config=existing_repo_config.copy(update={"online_store": {}})) + fs.apply( + [ + driver_hourly_stats_view, + transformed_conv_rate, + driver, + entity, + benchmark_feature_service, + *benchmark_feature_views, + ] + ) + + print("setup_it finished") + + +if __name__ == "__main__": + main() diff --git a/sdk/python/feast/diff/registry_diff.py b/sdk/python/feast/diff/registry_diff.py index 56d5b84c71..37c8af9155 100644 --- a/sdk/python/feast/diff/registry_diff.py +++ b/sdk/python/feast/diff/registry_diff.py @@ -144,8 +144,8 @@ def diff_registry_objects( continue elif getattr(current_spec, _field.name) != getattr(new_spec, _field.name): if _field.name == "user_defined_function": - current_spec = cast(OnDemandFeatureViewSpec, current_proto) - new_spec = cast(OnDemandFeatureViewSpec, new_proto) + current_spec = cast(OnDemandFeatureViewSpec, current_spec) + new_spec = cast(OnDemandFeatureViewSpec, new_spec) current_udf = current_spec.user_defined_function new_udf = new_spec.user_defined_function for _udf_field in current_udf.DESCRIPTOR.fields: diff --git a/sdk/python/tests/unit/diff/test_registry_diff.py b/sdk/python/tests/unit/diff/test_registry_diff.py index 0effdfba97..924416c202 100644 --- a/sdk/python/tests/unit/diff/test_registry_diff.py +++ b/sdk/python/tests/unit/diff/test_registry_diff.py @@ -1,9 +1,14 @@ +import pandas as pd + +from feast import Field from feast.diff.registry_diff import ( diff_registry_objects, tag_objects_for_keep_delete_update_add, ) from feast.entity import Entity from feast.feature_view import FeatureView +from feast.on_demand_feature_view import on_demand_feature_view +from feast.types import String from tests.utils.data_source_test_creator import prep_file_source @@ -96,3 +101,54 @@ def test_diff_registry_objects_feature_views(simple_dataset_1): assert feast_object_diffs.feast_object_property_diffs[0].val_declared == { "when": "after" } + + +def test_diff_odfv(simple_dataset_1): + with prep_file_source(df=simple_dataset_1, timestamp_field="ts_1") as file_source: + entity = Entity(name="id", join_keys=["id"]) + fv = FeatureView( + name="fv2", + entities=[entity], + source=file_source, + tags={"when": "before"}, + ) + + @on_demand_feature_view( + sources=[fv], + schema=[Field(name="first_char", dtype=String)], + ) + def pre_changed(inputs: pd.DataFrame) -> pd.DataFrame: + df = pd.DataFrame() + df["first_char"] = inputs["string_col"].str[:1].astype("string") + return df + + @on_demand_feature_view( + sources=[fv], + schema=[Field(name="first_char", dtype=String)], + ) + def post_changed(inputs: pd.DataFrame) -> pd.DataFrame: + df = pd.DataFrame() + df["first_char"] = inputs["string_col"].str[:1].astype("string") + "hi" + return df + + feast_object_diffs = diff_registry_objects( + pre_changed, pre_changed, "on demand feature view" + ) + assert len(feast_object_diffs.feast_object_property_diffs) == 0 + + feast_object_diffs = diff_registry_objects( + pre_changed, post_changed, "on demand feature view" + ) + + # Note that user_defined_function.body is excluded because it always changes (dill is non-deterministic), even + # if no code is changed + assert len(feast_object_diffs.feast_object_property_diffs) == 3 + assert feast_object_diffs.feast_object_property_diffs[0].property_name == "name" + assert ( + feast_object_diffs.feast_object_property_diffs[1].property_name + == "user_defined_function.name" + ) + assert ( + feast_object_diffs.feast_object_property_diffs[2].property_name + == "user_defined_function.body_text" + ) From 9778b543a71141405ca9573f03c121e9120d8469 Mon Sep 17 00:00:00 2001 From: Danny Chiao Date: Thu, 11 Aug 2022 19:32:23 -0400 Subject: [PATCH 08/15] ci: Fix improper workflow setup. Exclude test resource phase when Java integration tests are skipped Signed-off-by: Danny Chiao --- .github/workflows/java_master_only.yml | 42 +------------------ .github/workflows/java_pr.yml | 42 +------------------ java/infra/docker/feature-server/Dockerfile | 4 +- java/serving/pom.xml | 1 + .../docker-compose/feast10/setup_it.py | 4 +- 5 files changed, 9 insertions(+), 84 deletions(-) diff --git a/.github/workflows/java_master_only.yml b/.github/workflows/java_master_only.yml index 194024a168..c3548991bb 100644 --- a/.github/workflows/java_master_only.yml +++ b/.github/workflows/java_master_only.yml @@ -69,46 +69,6 @@ jobs: java-version: '11' java-package: jdk architecture: x64 - - name: Setup Python (to call feast apply) - uses: actions/setup-python@v2 - id: setup-python - with: - python-version: 3.8 - architecture: x64 - - name: Setup Go - id: setup-go - uses: actions/setup-go@v2 - with: - go-version: 1.18.0 - - name: Upgrade pip version - run: | - pip install --upgrade "pip>=21.3.1,<22.1" - - name: Get pip cache dir - id: pip-cache - run: | - echo "::set-output name=dir::$(pip cache dir)" - - name: pip cache - uses: actions/cache@v2 - with: - path: | - ${{ steps.pip-cache.outputs.dir }} - /opt/hostedtoolcache/Python - /Users/runner/hostedtoolcache/Python - key: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-${{ hashFiles(format('**/py{0}-ci-requirements.txt', env.PYTHON)) }} - restore-keys: | - ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip- - - name: Install pip-tools - run: pip install pip-tools - - name: Install apache-arrow on ubuntu - run: | - sudo apt update - sudo apt install -y -V ca-certificates lsb-release wget - wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb - sudo apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb - sudo apt update - sudo apt install -y -V libarrow-dev - - name: Install Python dependencies - run: make install-python-ci-dependencies - uses: actions/cache@v2 with: path: ~/.m2/repository @@ -131,6 +91,8 @@ jobs: integration-test: if: github.repository == 'feast-dev/feast' runs-on: ubuntu-latest + env: + PYTHON: 3.8 steps: - uses: actions/checkout@v2 with: diff --git a/.github/workflows/java_pr.yml b/.github/workflows/java_pr.yml index c552428664..72f419e409 100644 --- a/.github/workflows/java_pr.yml +++ b/.github/workflows/java_pr.yml @@ -40,46 +40,6 @@ jobs: java-version: '11' java-package: jdk architecture: x64 - - name: Setup Python (to call feast apply) - uses: actions/setup-python@v2 - id: setup-python - with: - python-version: 3.8 - architecture: x64 - - name: Setup Go - id: setup-go - uses: actions/setup-go@v2 - with: - go-version: 1.18.0 - - name: Upgrade pip version - run: | - pip install --upgrade "pip>=21.3.1,<22.1" - - name: Get pip cache dir - id: pip-cache - run: | - echo "::set-output name=dir::$(pip cache dir)" - - name: pip cache - uses: actions/cache@v2 - with: - path: | - ${{ steps.pip-cache.outputs.dir }} - /opt/hostedtoolcache/Python - /Users/runner/hostedtoolcache/Python - key: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-${{ hashFiles(format('**/py{0}-ci-requirements.txt', env.PYTHON)) }} - restore-keys: | - ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip- - - name: Install pip-tools - run: pip install pip-tools - - name: Install apache-arrow on ubuntu - run: | - sudo apt update - sudo apt install -y -V ca-certificates lsb-release wget - wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb - sudo apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb - sudo apt update - sudo apt install -y -V libarrow-dev - - name: Install Python dependencies - run: make install-python-ci-dependencies - uses: actions/cache@v2 with: path: ~/.m2/repository @@ -135,6 +95,8 @@ jobs: github.repository == 'feast-dev/feast' runs-on: ubuntu-latest needs: unit-test-java + env: + PYTHON: 3.8 steps: - uses: actions/checkout@v2 with: diff --git a/java/infra/docker/feature-server/Dockerfile b/java/infra/docker/feature-server/Dockerfile index 5cd0e6e37b..525539f52d 100644 --- a/java/infra/docker/feature-server/Dockerfile +++ b/java/infra/docker/feature-server/Dockerfile @@ -16,7 +16,7 @@ COPY java/coverage/pom.xml coverage/pom.xml # user to optionally use cached repository when building the image by copying # the existing .m2 directory to $FEAST_REPO_ROOT/.m2 ENV MAVEN_OPTS="-Dmaven.repo.local=/build/.m2/repository -DdependencyLocationsEnabled=false -Dmaven.wagon.httpconnectionManager.ttlSeconds=25 -Dmaven.wagon.http.retryHandler.count=3" -COPY java/pom.xml .m2/* .m2/ +#COPY java/pom.xml .m2/* .m2/ RUN mvn dependency:go-offline -DexcludeGroupIds:dev.feast 2>/dev/null || true COPY java/ . @@ -24,7 +24,7 @@ COPY protos/feast datatypes/src/main/proto/feast ARG VERSION=dev RUN mvn --also-make --projects serving -Drevision=$VERSION \ - -DskipUTs=true --batch-mode clean package + -DskipUTs=true -DskipITs=true --batch-mode clean package # # Download grpc_health_probe to run health check for Feast Serving # https://kubernetes.io/blog/2018/10/01/health-checking-grpc-servers-on-kubernetes/ diff --git a/java/serving/pom.xml b/java/serving/pom.xml index 9eea11ef96..8f0cf407e9 100644 --- a/java/serving/pom.xml +++ b/java/serving/pom.xml @@ -95,6 +95,7 @@ setup_it.py + ${skipITs} feast_test_apply process-test-resources diff --git a/java/serving/src/test/resources/docker-compose/feast10/setup_it.py b/java/serving/src/test/resources/docker-compose/feast10/setup_it.py index 733ebdfb49..503b66f328 100644 --- a/java/serving/src/test/resources/docker-compose/feast10/setup_it.py +++ b/java/serving/src/test/resources/docker-compose/feast10/setup_it.py @@ -42,8 +42,8 @@ def setup_data(): # (link https://docs.google.com/document/d/12UuvTQnTTCJhdRgy6h10zSbInNGSyEJkIxpOcgOen1I/edit) # about this benchmark setup def generate_data( - num_rows: int, num_features: int, destination: str - ) -> pd.DataFrame: + num_rows, num_features, destination + ): features = [f"feature_{i}" for i in range(num_features)] columns = ["entity", "event_timestamp"] + features df = pd.DataFrame(0, index=np.arange(num_rows), columns=columns) From 1451a82ff70903728a2ee0c4f567534a35ddf767 Mon Sep 17 00:00:00 2001 From: Danny Chiao Date: Thu, 11 Aug 2022 19:48:18 -0400 Subject: [PATCH 09/15] ci: Fix accidental m2 exclusion Signed-off-by: Danny Chiao --- java/infra/docker/feature-server/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/java/infra/docker/feature-server/Dockerfile b/java/infra/docker/feature-server/Dockerfile index 525539f52d..a728340d6b 100644 --- a/java/infra/docker/feature-server/Dockerfile +++ b/java/infra/docker/feature-server/Dockerfile @@ -16,7 +16,7 @@ COPY java/coverage/pom.xml coverage/pom.xml # user to optionally use cached repository when building the image by copying # the existing .m2 directory to $FEAST_REPO_ROOT/.m2 ENV MAVEN_OPTS="-Dmaven.repo.local=/build/.m2/repository -DdependencyLocationsEnabled=false -Dmaven.wagon.httpconnectionManager.ttlSeconds=25 -Dmaven.wagon.http.retryHandler.count=3" -#COPY java/pom.xml .m2/* .m2/ +COPY java/pom.xml .m2/* .m2/ RUN mvn dependency:go-offline -DexcludeGroupIds:dev.feast 2>/dev/null || true COPY java/ . From eb885b176cf0d211987cab852e49ba4af93185b3 Mon Sep 17 00:00:00 2001 From: sfc-gh-madkins <82121043+sfc-gh-madkins@users.noreply.github.com> Date: Fri, 12 Aug 2022 11:48:29 -0500 Subject: [PATCH 10/15] fix: Fix field mapping logic during feature inference (#3067) fix: Fix bug where feature inference was improperly registering timestamp and created field if part of field_mapping Signed-off-by: Miles Adkins Signed-off-by: Miles Adkins --- sdk/python/feast/inference.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/sdk/python/feast/inference.py b/sdk/python/feast/inference.py index 0b8e42b4e9..84e7a1373f 100644 --- a/sdk/python/feast/inference.py +++ b/sdk/python/feast/inference.py @@ -199,10 +199,10 @@ def _infer_features_and_entities( fv.batch_source.timestamp_field, fv.batch_source.created_timestamp_column, } - for column in columns_to_exclude: - if column in fv.batch_source.field_mapping: - columns_to_exclude.remove(column) - columns_to_exclude.add(fv.batch_source.field_mapping[column]) + for original_col, mapped_col in fv.batch_source.field_mapping.items(): + if mapped_col in columns_to_exclude: + columns_to_exclude.remove(mapped_col) + columns_to_exclude.add(original_col) table_column_names_and_types = fv.batch_source.get_table_column_names_and_types( config From 4d4a7c59a7202c600a263eb29d99e05f3343b283 Mon Sep 17 00:00:00 2001 From: Danny Chiao Date: Fri, 12 Aug 2022 15:19:57 -0400 Subject: [PATCH 11/15] ci: Fix go unit tests failure (#3078) * ci: Fix go unit tests failure Signed-off-by: Danny Chiao * Temporary fix? Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang Signed-off-by: Danny Chiao Signed-off-by: Kevin Zhang Co-authored-by: Kevin Zhang --- Makefile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 67be3ba248..4dbe2a7b4b 100644 --- a/Makefile +++ b/Makefile @@ -229,7 +229,7 @@ install-go-ci-dependencies: python -m pip install pybindgen==0.22.0 protobuf==3.20.1 install-protoc-dependencies: - pip install grpcio-tools==1.47.0 mypy-protobuf==3.1.0 + pip install --ignore-installed protobuf grpcio-tools==1.47.0 mypy-protobuf==3.1.0 compile-protos-go: install-go-proto-dependencies install-protoc-dependencies python setup.py build_go_protos @@ -242,7 +242,7 @@ install-feast-ci-locally: # Needs feast package to setup the feature store # CGO flag is due to this issue: https://github.com/golang/go/wiki/InvalidFlag -test-go: compile-protos-go compile-go-lib install-feast-ci-locally +test-go: compile-protos-go compile-protos-python compile-go-lib install-feast-ci-locally CGO_LDFLAGS_ALLOW=".*" go test -tags cgo,ccalloc ./... format-go: From 9d2e042c4e78fea890cc8e8daf67affa91b814a9 Mon Sep 17 00:00:00 2001 From: Felix Wang Date: Fri, 12 Aug 2022 13:50:56 -0700 Subject: [PATCH 12/15] docs: Codebase structure (#3050) * Codebase structure docs Signed-off-by: Felix Wang * Address code review Signed-off-by: Felix Wang Signed-off-by: Felix Wang --- CONTRIBUTING.md | 2 + docs/SUMMARY.md | 1 + docs/reference/codebase-structure.md | 131 +++++++++++++++++++++++++++ 3 files changed, 134 insertions(+) create mode 100644 docs/reference/codebase-structure.md diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 43ab6a58b8..2470028350 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -48,6 +48,8 @@ the main Feast repository: - [Feast Java Serving](#feast-java-serving) - [Feast Go Client](#feast-go-client) +Please see [this page](https://docs.feast.dev/reference/codebase-structure) for more details on the structure of the entire codebase. + ## Community See [Contribution process](https://docs.feast.dev/project/contributing) and [Community](https://docs.feast.dev/community) for details on how to get more involved in the community. diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md index fa1a49bc1f..bdfe9555dd 100644 --- a/docs/SUMMARY.md +++ b/docs/SUMMARY.md @@ -59,6 +59,7 @@ ## Reference +* [Codebase Structure](reference/codebase-structure.md) * [Data sources](reference/data-sources/README.md) * [File](reference/data-sources/file.md) * [Snowflake](reference/data-sources/snowflake.md) diff --git a/docs/reference/codebase-structure.md b/docs/reference/codebase-structure.md new file mode 100644 index 0000000000..b75227860b --- /dev/null +++ b/docs/reference/codebase-structure.md @@ -0,0 +1,131 @@ +# Codebase structure + +Let's examine the Feast codebase. +This analysis is accurate as of Feast 0.23. + +``` +$ tree -L 1 -d +. +├── docs +├── examples +├── go +├── infra +├── java +├── protos +├── sdk +└── ui +``` + +## Python SDK + +The Python SDK lives in `sdk/python/feast`. +The majority of Feast logic lives in these Python files: +* The core Feast objects ([entities](../getting-started/concepts/entity.md), [feature views](../getting-started/concepts/feature-view.md), [data sources](../getting-started/concepts/dataset.md), etc.) are defined in their respective Python files, such as `entity.py`, `feature_view.py`, and `data_source.py`. +* The `FeatureStore` class is defined in `feature_store.py` and the associated configuration object (the Python representation of the `feature_store.yaml` file) are defined in `repo_config.py`. +* The CLI and other core feature store logic are defined in `cli.py` and `repo_operations.py`. +* The type system that is used to manage conversion between Feast types and external typing systems is managed in `type_map.py`. +* The Python feature server (the server that is started through the `feast serve` command) is defined in `feature_server.py`. + +There are also several important submodules: +* `infra/` contains all the infrastructure components, such as the provider, offline store, online store, batch materialization engine, and registry. +* `dqm/` covers data quality monitoring, such as the dataset profiler. +* `diff/` covers the logic for determining how to apply infrastructure changes upon feature repo changes (e.g. the output of `feast plan` and `feast apply`). +* `embedded_go/` covers the Go feature server. +* `ui/` contains the embedded Web UI, to be launched on the `feast ui` command. + +Of these submodules, `infra/` is the most important. +It contains the interfaces for the [provider](getting-started/architecture-and-components/provider.md), [offline store](getting-started/architecture-and-components/offline-store.md), [online store](getting-started/architecture-and-components/online-store.md), [batch materialization engine](getting-started/architecture-and-components/batch-materialization-engine.md), and [registry](getting-started/architecture-and-components/registry.md), as well as all of their individual implementations. + +``` +$ tree --dirsfirst -L 1 infra +infra +├── contrib +├── feature_servers +├── materialization +├── offline_stores +├── online_stores +├── registry +├── transformation_servers +├── utils +├── __init__.py +├── aws.py +├── gcp.py +├── infra_object.py +├── key_encoding_utils.py +├── local.py +├── passthrough_provider.py +└── provider.py +``` + +The tests for the Python SDK are contained in `sdk/python/tests`. +For more details, see this [overview](../how-to-guides/adding-or-reusing-tests.md#test-suite-overview) of the test suite. + +### Example flow: `feast apply` + +Let's walk through how `feast apply` works by tracking its execution across the codebase. + +1. All CLI commands are in `cli.py`. + Most of these commands are backed by methods in `repo_operations.py`. + The `feast apply` command triggers `apply_total_command`, which then calls `apply_total` in `repo_operations.py`. +2. With a `FeatureStore` object (from `feature_store.py`) that is initialized based on the `feature_store.yaml` in the current working directory, `apply_total` first parses the feature repo with `parse_repo` and then calls either `FeatureStore.apply` or `FeatureStore._apply_diffs` to apply those changes to the feature store. +3. Let's examine `FeatureStore.apply`. + It splits the objects based on class (e.g. `Entity`, `FeatureView`, etc.) and then calls the appropriate registry method to apply or delete the object. + For example, it might call `self._registry.apply_entity` to apply an entity. + If the default file-based registry is used, this logic can be found in `infra/registry/registry.py`. +4. Then the feature store must update its cloud infrastructure (e.g. online store tables) to match the new feature repo, so it calls `Provider.update_infra`, which can be found in `infra/provider.py`. +5. Assuming the provider is a built-in provider (e.g. one of the local, GCP, or AWS providers), it will call `PassthroughProvider.update_infra` in `infra/passthrough_provider.py`. +6. This delegates to the online store and batch materialization engine. + For example, if the feature store is configured to use the Redis online store then the `update` method from `infra/online_stores/redis.py` will be called. + And if the local materialization engine is configured then the `update` method from `infra/materialization/local_engine.py` will be called. + +At this point, the `feast apply` command is complete. + +### Example flow: `feast materialize` + +Let's walk through how `feast materialize` works by tracking its execution across the codebase. + +1. The `feast materialize` command triggers `materialize_command` in `cli.py`, which then calls `FeatureStore.materialize` from `feature_store.py`. +2. This then calls `Provider.materialize_single_feature_view`, which can be found in `infra/provider.py`. +3. As with `feast apply`, the provider is most likely backed by the passthrough provider, in which case `PassthroughProvider.materialize_single_feature_view` will be called. +4. This delegates to the underlying batch materialization engine. + Assuming that the local engine has been configured, `LocalMaterializationEngine.materialize` from `infra/materialization/local_engine.py` will be called. +5. Since materialization involves reading features from the offline store and writing them to the online store, the local engine will delegate to both the offline store and online store. + Specifically, it will call `OfflineStore.pull_latest_from_table_or_query` and `OnlineStore.online_write_batch`. + These two calls will be routed to the offline store and online store that have been configured. + +### Example flow: `get_historical_features` + +Let's walk through how `get_historical_features` works by tracking its execution across the codebase. + +1. We start with `FeatureStore.get_historical_features` in `feature_store.py`. + This method does some internal preparation, and then delegates the actual execution to the underlying provider by calling `Provider.get_historical_features`, which can be found in `infra/provider.py`. +2. As with `feast apply`, the provider is most likely backed by the passthrough provider, in which case `PassthroughProvider.get_historical_features` will be called. +3. That call simply delegates to `OfflineStore.get_historical_features`. + So if the feature store is configured to use Snowflake as the offline store, `SnowflakeOfflineStore.get_historical_features` will be executed. + +## Java SDK + +The `java/` directory contains the Java serving component. +See [here](https://github.com/feast-dev/feast/blob/master/java/CONTRIBUTING.md) for more details on how the repo is structured. + +## Go feature server + +The `go/` directory contains the Go feature server. +Most of the files here have logic to help with reading features from the online store. +Within `go/`, the `internal/feast/` directory contains most of the core logic: +* `onlineserving/` covers the core serving logic. +* `model/` contains the implementations of the Feast objects (entity, feature view, etc.). + * For example, `entity.go` is the Go equivalent of `entity.py`. It contains a very simple Go implementation of the entity object. +* `registry/` covers the registry. + * Currently only the file-based registry supported (the sql-based registry is unsupported). Additionally, the file-based registry only supports a file-based registry store, not the GCS or S3 registry stores. +* `onlinestore/` covers the online stores (currently only Redis and SQLite are supported). + +## Protobufs + +Feast uses [protobuf](https://github.com/protocolbuffers/protobuf) to store serialized versions of the core Feast objects. +The protobuf definitions are stored in `protos/feast`. + +## Web UI + +The `ui/` directory contains the Web UI. +See [here](https://github.com/feast-dev/feast/blob/master/ui/CONTRIBUTING.md) for more details on the structure of the Web UI. From ac782bfdf344a1ea111b6541c87aebf5d843d464 Mon Sep 17 00:00:00 2001 From: Kevin Zhang Date: Fri, 12 Aug 2022 14:03:54 -0700 Subject: [PATCH 13/15] chore: Update offline and online store docs (#3048) * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang Signed-off-by: Kevin Zhang --- .../adding-a-new-offline-store.md | 32 ++++++++++++++++++- .../adding-support-for-a-new-online-store.md | 22 +++++++++---- 2 files changed, 46 insertions(+), 8 deletions(-) diff --git a/docs/how-to-guides/customizing-feast/adding-a-new-offline-store.md b/docs/how-to-guides/customizing-feast/adding-a-new-offline-store.md index 91b23eaad5..b2818b748f 100644 --- a/docs/how-to-guides/customizing-feast/adding-a-new-offline-store.md +++ b/docs/how-to-guides/customizing-feast/adding-a-new-offline-store.md @@ -406,7 +406,37 @@ Even if you have created the `OfflineStore` class in a separate repo, you can st ``` If the integration tests fail, this indicates that there is a mistake in the implementation of this offline store! -5. Remember to add your datasource to `repo_config.py` similar to how we added `spark`, `trino`, etc, to the dictionary `OFFLINE_STORE_CLASS_FOR_TYPE` and add the necessary configuration to `repo_configuration.py`. Namely, `AVAILABLE_OFFLINE_STORES` should load your repo configuration module. + +5. Remember to add your datasource to `repo_config.py` similar to how we added `spark`, `trino`, etc, to the dictionary `OFFLINE_STORE_CLASS_FOR_TYPE`. This will allow Feast to load your class from the `feature_store.yaml`. + +6. Finally, add a Makefile target to the Makefile to run your datastore specific tests by setting the `FULL_REPO_CONFIGS_MODULE` and `PYTEST_PLUGINS` environment variable. The `PYTEST_PLUGINS` environment variable allows pytest to load in the `DataSourceCreator` for your datasource. You can remove certain tests that are not relevant or still do not work for your datastore using the `-k` option. + +{% code title="Makefile" %} +```Makefile +test-python-universal-spark: + PYTHONPATH='.' \ + FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.offline_stores.contrib.spark_repo_configuration \ + PYTEST_PLUGINS=feast.infra.offline_stores.contrib.spark_offline_store.tests \ + FEAST_USAGE=False IS_TEST=True \ + python -m pytest -n 8 --integration \ + -k "not test_historical_retrieval_fails_on_validation and \ + not test_historical_retrieval_with_validation and \ + not test_historical_features_persisting and \ + not test_historical_retrieval_fails_on_validation and \ + not test_universal_cli and \ + not test_go_feature_server and \ + not test_feature_logging and \ + not test_reorder_columns and \ + not test_logged_features_validation and \ + not test_lambda_materialization_consistency and \ + not test_offline_write and \ + not test_push_features_to_offline_store.py and \ + not gcs_registry and \ + not s3_registry and \ + not test_universal_types" \ + sdk/python/tests +``` +{% endcode %} ### 7. Dependencies diff --git a/docs/how-to-guides/customizing-feast/adding-support-for-a-new-online-store.md b/docs/how-to-guides/customizing-feast/adding-support-for-a-new-online-store.md index fe16347b73..52f0897138 100644 --- a/docs/how-to-guides/customizing-feast/adding-support-for-a-new-online-store.md +++ b/docs/how-to-guides/customizing-feast/adding-support-for-a-new-online-store.md @@ -318,7 +318,7 @@ online_store: feast_custom_online_store.mysql.MySQLOnlineStore ## 4. Testing the OnlineStore class -### Integrating with the integration test suite and unit test suite. +### 4.1 Integrating with the integration test suite and unit test suite. Even if you have created the `OnlineStore` class in a separate repo, you can still test your implementation against the Feast test suite, as long as you have Feast as a submodule in your repo. @@ -352,7 +352,7 @@ If you are planning to start the online store up locally(e.g spin up a local Red } ``` -If you are planning instead to use a Dockerized container to run your tests against your online store, you can define a `OnlineStoreCreator` and replace the `None` object above with your `OnlineStoreCreator` class. +If you are planning instead to use a Dockerized container to run your tests against your online store, you can define a `OnlineStoreCreator` and replace the `None` object above with your `OnlineStoreCreator` class. You should make this class available to pytest through the `PYTEST_PLUGINS` environment variable. If you create a containerized docker image for testing, developers who are trying to test with your online store will not have to spin up their own instance of the online store for testing. An example of an `OnlineStoreCreator` is shown below: @@ -372,12 +372,20 @@ class RedisOnlineStoreCreator(OnlineStoreCreator): ``` {% endcode %} -3\. You should swap out the `FULL_REPO_CONFIGS` environment variable and run the integration tests against your online store. In the example repo, the file that overwrites `FULL_REPO_CONFIGS` is `feast_custom_online_store/feast_tests.py`, so you would run: - -```bash -export FULL_REPO_CONFIGS_MODULE='feast_custom_online_store.feast_tests' -make test-python-universal +3\. Add a Makefile target to the Makefile to run your datastore specific tests by setting the `FULL_REPO_CONFIGS_MODULE` environment variable. Add `PYTEST_PLUGINS` if pytest is having trouble loading your `DataSourceCreator`. You can remove certain tests that are not relevant or still do not work for your datastore using the `-k` option. + +{% code title="Makefile" %} +```Makefile +test-python-universal-cassandra: + PYTHONPATH='.' \ + FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.online_stores.contrib.cassandra_repo_configuration \ + PYTEST_PLUGINS=sdk.python.tests.integration.feature_repos.universal.online_store.cassandra \ + FEAST_USAGE=False \ + IS_TEST=True \ + python -m pytest -x --integration \ + sdk/python/tests ``` +{% endcode %} * If there are some tests that fail, this indicates that there is a mistake in the implementation of this online store! From bafa520b3275f6b3e9e9cc15f38102a916bb9bab Mon Sep 17 00:00:00 2001 From: Danny Chiao Date: Mon, 15 Aug 2022 11:25:26 -0400 Subject: [PATCH 14/15] chore: lint error Signed-off-by: Danny Chiao --- sdk/python/feast/on_demand_feature_view.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/sdk/python/feast/on_demand_feature_view.py b/sdk/python/feast/on_demand_feature_view.py index fa5e9245fe..2b52c1761c 100644 --- a/sdk/python/feast/on_demand_feature_view.py +++ b/sdk/python/feast/on_demand_feature_view.py @@ -235,6 +235,10 @@ def __init__( # noqa: C901 odfv_source.name ] = odfv_source.projection + if _udf is None: + raise ValueError("The `udf` parameter must be specified.") + assert _udf + self.udf = udf # type: ignore self.udf_string = udf_string From 7d9248214a832f499dd902e8babee0a21e68c05b Mon Sep 17 00:00:00 2001 From: feast-ci-bot Date: Mon, 15 Aug 2022 15:45:49 +0000 Subject: [PATCH 15/15] chore(release): release 0.23.2 ## [0.23.2](https://github.com/feast-dev/feast/compare/v0.23.1...v0.23.2) (2022-08-15) ### Bug Fixes * Fix field mapping logic during feature inference ([#3067](https://github.com/feast-dev/feast/issues/3067)) ([eb885b1](https://github.com/feast-dev/feast/commit/eb885b176cf0d211987cab852e49ba4af93185b3)) * Fix incorrect on demand feature view diffing and improve Java tests ([#3074](https://github.com/feast-dev/feast/issues/3074)) ([0ff0ec4](https://github.com/feast-dev/feast/commit/0ff0ec473a03e54fdf950b4476b9312b27629230)) * Fix on demand feature view output in feast plan + Web UI crash ([#3057](https://github.com/feast-dev/feast/issues/3057)) ([a32d247](https://github.com/feast-dev/feast/commit/a32d2475b1d47f24f2698a15ff596407a2035f17)) * Fix Spark offline store type conversion to arrow ([#3071](https://github.com/feast-dev/feast/issues/3071)) ([a49f70c](https://github.com/feast-dev/feast/commit/a49f70c1528acabf3429ee211212fce265454aea)) --- CHANGELOG.md | 10 ++++++++++ infra/charts/feast-feature-server/Chart.yaml | 2 +- infra/charts/feast-feature-server/README.md | 2 +- infra/charts/feast-python-server/Chart.yaml | 2 +- infra/charts/feast-python-server/README.md | 2 +- infra/charts/feast/Chart.yaml | 2 +- infra/charts/feast/README.md | 6 +++--- infra/charts/feast/charts/feature-server/Chart.yaml | 4 ++-- infra/charts/feast/charts/feature-server/README.md | 4 ++-- infra/charts/feast/charts/feature-server/values.yaml | 2 +- .../feast/charts/transformation-service/Chart.yaml | 4 ++-- .../feast/charts/transformation-service/README.md | 4 ++-- .../feast/charts/transformation-service/values.yaml | 2 +- infra/charts/feast/requirements.yaml | 4 ++-- java/pom.xml | 2 +- ui/package.json | 2 +- 16 files changed, 32 insertions(+), 22 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d4ea5f20d6..7e893bdeb7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,15 @@ # Changelog +## [0.23.2](https://github.com/feast-dev/feast/compare/v0.23.1...v0.23.2) (2022-08-15) + + +### Bug Fixes + +* Fix field mapping logic during feature inference ([#3067](https://github.com/feast-dev/feast/issues/3067)) ([eb885b1](https://github.com/feast-dev/feast/commit/eb885b176cf0d211987cab852e49ba4af93185b3)) +* Fix incorrect on demand feature view diffing and improve Java tests ([#3074](https://github.com/feast-dev/feast/issues/3074)) ([0ff0ec4](https://github.com/feast-dev/feast/commit/0ff0ec473a03e54fdf950b4476b9312b27629230)) +* Fix on demand feature view output in feast plan + Web UI crash ([#3057](https://github.com/feast-dev/feast/issues/3057)) ([a32d247](https://github.com/feast-dev/feast/commit/a32d2475b1d47f24f2698a15ff596407a2035f17)) +* Fix Spark offline store type conversion to arrow ([#3071](https://github.com/feast-dev/feast/issues/3071)) ([a49f70c](https://github.com/feast-dev/feast/commit/a49f70c1528acabf3429ee211212fce265454aea)) + ## [0.23.1](https://github.com/feast-dev/feast/compare/v0.23.0...v0.23.1) (2022-08-10) diff --git a/infra/charts/feast-feature-server/Chart.yaml b/infra/charts/feast-feature-server/Chart.yaml index aca9574b0c..936bc87bb4 100644 --- a/infra/charts/feast-feature-server/Chart.yaml +++ b/infra/charts/feast-feature-server/Chart.yaml @@ -2,7 +2,7 @@ apiVersion: v2 name: feast-feature-server description: Feast Feature Server in Go or Python type: application -version: 0.23.1 +version: 0.23.2 keywords: - machine learning - big data diff --git a/infra/charts/feast-feature-server/README.md b/infra/charts/feast-feature-server/README.md index e83f75b39d..d36f0c6f26 100644 --- a/infra/charts/feast-feature-server/README.md +++ b/infra/charts/feast-feature-server/README.md @@ -1,6 +1,6 @@ # feast-feature-server -![Version: 0.23.1](https://img.shields.io/badge/Version-0.23.1-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square) +![Version: 0.23.2](https://img.shields.io/badge/Version-0.23.2-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square) Feast Feature Server in Go or Python diff --git a/infra/charts/feast-python-server/Chart.yaml b/infra/charts/feast-python-server/Chart.yaml index b8b229fd59..d319420c48 100644 --- a/infra/charts/feast-python-server/Chart.yaml +++ b/infra/charts/feast-python-server/Chart.yaml @@ -2,7 +2,7 @@ apiVersion: v2 name: feast-python-server description: Feast Feature Server in Python type: application -version: 0.23.1 +version: 0.23.2 keywords: - machine learning - big data diff --git a/infra/charts/feast-python-server/README.md b/infra/charts/feast-python-server/README.md index 5f25b7d8fe..2e30e08257 100644 --- a/infra/charts/feast-python-server/README.md +++ b/infra/charts/feast-python-server/README.md @@ -1,6 +1,6 @@ # feast-python-server -![Version: 0.23.1](https://img.shields.io/badge/Version-0.23.1-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square) +![Version: 0.23.2](https://img.shields.io/badge/Version-0.23.2-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square) Feast Feature Server in Python diff --git a/infra/charts/feast/Chart.yaml b/infra/charts/feast/Chart.yaml index e7bc00d8a6..eb4f7ff11b 100644 --- a/infra/charts/feast/Chart.yaml +++ b/infra/charts/feast/Chart.yaml @@ -1,7 +1,7 @@ apiVersion: v1 description: Feature store for machine learning name: feast -version: 0.23.1 +version: 0.23.2 keywords: - machine learning - big data diff --git a/infra/charts/feast/README.md b/infra/charts/feast/README.md index b7741c4b45..86f879fcda 100644 --- a/infra/charts/feast/README.md +++ b/infra/charts/feast/README.md @@ -8,7 +8,7 @@ This repo contains Helm charts for Feast components that are being installed on ## Chart: Feast -Feature store for machine learning Current chart version is `0.23.1` +Feature store for machine learning Current chart version is `0.23.2` ## Installation @@ -55,8 +55,8 @@ For more details, please see: https://docs.feast.dev/how-to-guides/running-feast | Repository | Name | Version | |------------|------|---------| | https://charts.helm.sh/stable | redis | 10.5.6 | -| https://feast-helm-charts.storage.googleapis.com | feature-server(feature-server) | 0.23.1 | -| https://feast-helm-charts.storage.googleapis.com | transformation-service(transformation-service) | 0.23.1 | +| https://feast-helm-charts.storage.googleapis.com | feature-server(feature-server) | 0.23.2 | +| https://feast-helm-charts.storage.googleapis.com | transformation-service(transformation-service) | 0.23.2 | ## Values diff --git a/infra/charts/feast/charts/feature-server/Chart.yaml b/infra/charts/feast/charts/feature-server/Chart.yaml index 42b366e0e7..f7a07e16fd 100644 --- a/infra/charts/feast/charts/feature-server/Chart.yaml +++ b/infra/charts/feast/charts/feature-server/Chart.yaml @@ -1,8 +1,8 @@ apiVersion: v1 description: "Feast Feature Server: Online feature serving service for Feast" name: feature-server -version: 0.23.1 -appVersion: v0.23.1 +version: 0.23.2 +appVersion: v0.23.2 keywords: - machine learning - big data diff --git a/infra/charts/feast/charts/feature-server/README.md b/infra/charts/feast/charts/feature-server/README.md index 84ed8abd25..9bf732c661 100644 --- a/infra/charts/feast/charts/feature-server/README.md +++ b/infra/charts/feast/charts/feature-server/README.md @@ -1,6 +1,6 @@ # feature-server -![Version: 0.23.1](https://img.shields.io/badge/Version-0.23.1-informational?style=flat-square) ![AppVersion: v0.23.1](https://img.shields.io/badge/AppVersion-v0.23.1-informational?style=flat-square) +![Version: 0.23.2](https://img.shields.io/badge/Version-0.23.2-informational?style=flat-square) ![AppVersion: v0.23.2](https://img.shields.io/badge/AppVersion-v0.23.2-informational?style=flat-square) Feast Feature Server: Online feature serving service for Feast @@ -17,7 +17,7 @@ Feast Feature Server: Online feature serving service for Feast | envOverrides | object | `{}` | Extra environment variables to set | | image.pullPolicy | string | `"IfNotPresent"` | Image pull policy | | image.repository | string | `"feastdev/feature-server-java"` | Docker image for Feature Server repository | -| image.tag | string | `"0.23.1"` | Image tag | +| image.tag | string | `"0.23.2"` | Image tag | | ingress.grpc.annotations | object | `{}` | Extra annotations for the ingress | | ingress.grpc.auth.enabled | bool | `false` | Flag to enable auth | | ingress.grpc.class | string | `"nginx"` | Which ingress controller to use | diff --git a/infra/charts/feast/charts/feature-server/values.yaml b/infra/charts/feast/charts/feature-server/values.yaml index f23c77dd1a..761204f048 100644 --- a/infra/charts/feast/charts/feature-server/values.yaml +++ b/infra/charts/feast/charts/feature-server/values.yaml @@ -5,7 +5,7 @@ image: # image.repository -- Docker image for Feature Server repository repository: feastdev/feature-server-java # image.tag -- Image tag - tag: 0.23.1 + tag: 0.23.2 # image.pullPolicy -- Image pull policy pullPolicy: IfNotPresent diff --git a/infra/charts/feast/charts/transformation-service/Chart.yaml b/infra/charts/feast/charts/transformation-service/Chart.yaml index 834ce7fe56..7028232f65 100644 --- a/infra/charts/feast/charts/transformation-service/Chart.yaml +++ b/infra/charts/feast/charts/transformation-service/Chart.yaml @@ -1,8 +1,8 @@ apiVersion: v1 description: "Transformation service: to compute on-demand features" name: transformation-service -version: 0.23.1 -appVersion: v0.23.1 +version: 0.23.2 +appVersion: v0.23.2 keywords: - machine learning - big data diff --git a/infra/charts/feast/charts/transformation-service/README.md b/infra/charts/feast/charts/transformation-service/README.md index 84525fc0d6..462eaf4c81 100644 --- a/infra/charts/feast/charts/transformation-service/README.md +++ b/infra/charts/feast/charts/transformation-service/README.md @@ -1,6 +1,6 @@ # transformation-service -![Version: 0.23.1](https://img.shields.io/badge/Version-0.23.1-informational?style=flat-square) ![AppVersion: v0.23.1](https://img.shields.io/badge/AppVersion-v0.23.1-informational?style=flat-square) +![Version: 0.23.2](https://img.shields.io/badge/Version-0.23.2-informational?style=flat-square) ![AppVersion: v0.23.2](https://img.shields.io/badge/AppVersion-v0.23.2-informational?style=flat-square) Transformation service: to compute on-demand features @@ -13,7 +13,7 @@ Transformation service: to compute on-demand features | envOverrides | object | `{}` | Extra environment variables to set | | image.pullPolicy | string | `"IfNotPresent"` | Image pull policy | | image.repository | string | `"feastdev/feature-transformation-server"` | Docker image for Transformation Server repository | -| image.tag | string | `"0.23.1"` | Image tag | +| image.tag | string | `"0.23.2"` | Image tag | | nodeSelector | object | `{}` | Node labels for pod assignment | | podLabels | object | `{}` | Labels to be added to Feast Serving pods | | replicaCount | int | `1` | Number of pods that will be created | diff --git a/infra/charts/feast/charts/transformation-service/values.yaml b/infra/charts/feast/charts/transformation-service/values.yaml index 53841df813..63f3fa9b1d 100644 --- a/infra/charts/feast/charts/transformation-service/values.yaml +++ b/infra/charts/feast/charts/transformation-service/values.yaml @@ -5,7 +5,7 @@ image: # image.repository -- Docker image for Transformation Server repository repository: feastdev/feature-transformation-server # image.tag -- Image tag - tag: 0.23.1 + tag: 0.23.2 # image.pullPolicy -- Image pull policy pullPolicy: IfNotPresent diff --git a/infra/charts/feast/requirements.yaml b/infra/charts/feast/requirements.yaml index 8d24fcd9e6..a92531f6b8 100644 --- a/infra/charts/feast/requirements.yaml +++ b/infra/charts/feast/requirements.yaml @@ -1,12 +1,12 @@ dependencies: - name: feature-server alias: feature-server - version: 0.23.1 + version: 0.23.2 condition: feature-server.enabled repository: https://feast-helm-charts.storage.googleapis.com - name: transformation-service alias: transformation-service - version: 0.23.1 + version: 0.23.2 condition: transformation-service.enabled repository: https://feast-helm-charts.storage.googleapis.com - name: redis diff --git a/java/pom.xml b/java/pom.xml index cac85cdd38..bbfbb3ca14 100644 --- a/java/pom.xml +++ b/java/pom.xml @@ -35,7 +35,7 @@ - 0.23.1 + 0.23.2 https://github.com/feast-dev/feast UTF-8 diff --git a/ui/package.json b/ui/package.json index 21bdc65674..cdd8238f48 100644 --- a/ui/package.json +++ b/ui/package.json @@ -1,6 +1,6 @@ { "name": "@feast-dev/feast-ui", - "version": "0.23.1", + "version": "0.23.2", "private": false, "files": [ "dist"