From 8688acd1731aa04b041090c7b1c049bfba1717ed Mon Sep 17 00:00:00 2001 From: Rob Howley Date: Mon, 15 Jan 2024 14:59:14 -0500 Subject: [PATCH 001/122] feat: Add python bytes to array type conversion support proto (#3874) * feat: add redis sentinel support format lint Signed-off-by: snowron Signed-off-by: Rob Howley * chore: Bump pyarrow Bumps [pyarrow](https://github.com/apache/arrow) from 6.0.0 to 14.0.1. - [Commits](https://github.com/apache/arrow/compare/go/arrow/v6.0.0...apache-arrow-14.0.1) --- updated-dependencies: - dependency-name: pyarrow dependency-type: direct:production ... Signed-off-by: dependabot[bot] Signed-off-by: Rob Howley * fix: upgrade the pyarrow to latest v14.0.1 for CVE-2023-47248. Signed-off-by: Shuchu Han Signed-off-by: Rob Howley * feat: add bytes to array type conversion in python -> proto Signed-off-by: Rob Howley * ignore type like in other proto val assignments Signed-off-by: Rob Howley * run black Signed-off-by: Rob Howley * floats can also appear as ints Signed-off-by: Rob Howley --------- Signed-off-by: snowron Signed-off-by: Rob Howley Signed-off-by: dependabot[bot] Signed-off-by: Shuchu Han Co-authored-by: snowron Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Shuchu Han Co-authored-by: Rob Howley --- sdk/python/feast/type_map.py | 17 +++++++++++++- sdk/python/tests/unit/test_type_map.py | 32 ++++++++++++++++++++++++++ 2 files changed, 48 insertions(+), 1 deletion(-) diff --git a/sdk/python/feast/type_map.py b/sdk/python/feast/type_map.py index cdb65f886e..9dbbb5a64c 100644 --- a/sdk/python/feast/type_map.py +++ b/sdk/python/feast/type_map.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import json from collections import defaultdict from datetime import datetime, timezone from typing import ( @@ -297,7 +298,7 @@ def _type_err(item, dtype): None, ), ValueType.FLOAT: ("float_val", lambda x: float(x), None), - ValueType.DOUBLE: ("double_val", lambda x: x, {float, np.float64}), + ValueType.DOUBLE: ("double_val", lambda x: x, {float, np.float64, int, np.int_}), ValueType.STRING: ("string_val", lambda x: str(x), None), ValueType.BYTES: ("bytes_val", lambda x: x, {bytes}), ValueType.BOOL: ("bool_val", lambda x: x, {bool, np.bool_, int, np.int_}), @@ -353,6 +354,19 @@ def _python_value_to_proto_value( feast_value_type ] + # Bytes to array type conversion + if isinstance(sample, (bytes, bytearray)): + # Bytes of an array containing elements of bytes not supported + if feast_value_type == ValueType.BYTES_LIST: + raise _type_err(sample, ValueType.BYTES_LIST) + + json_value = json.loads(sample) + if isinstance(json_value, list): + if feast_value_type == ValueType.BOOL_LIST: + json_value = [bool(item) for item in json_value] + return [ProtoValue(**{field_name: proto_type(val=json_value)})] # type: ignore + raise _type_err(sample, valid_types[0]) + if sample is not None and not all( type(item) in valid_types for item in sample ): @@ -631,6 +645,7 @@ def redshift_to_feast_value_type(redshift_type_as_str: str) -> ValueType: "varchar": ValueType.STRING, "timestamp": ValueType.UNIX_TIMESTAMP, "timestamptz": ValueType.UNIX_TIMESTAMP, + "super": ValueType.BYTES, # skip date, geometry, hllsketch, time, timetz } diff --git a/sdk/python/tests/unit/test_type_map.py b/sdk/python/tests/unit/test_type_map.py index 78ff15fe93..9b21900e6d 100644 --- a/sdk/python/tests/unit/test_type_map.py +++ b/sdk/python/tests/unit/test_type_map.py @@ -48,3 +48,35 @@ def test_python_values_to_proto_values_bool(values): converted = feast_value_type_to_python_type(protos[0]) assert converted is bool(values[0]) + + +@pytest.mark.parametrize( + "values, value_type, expected", + ( + (np.array([b"[1,2,3]"]), ValueType.INT64_LIST, [1, 2, 3]), + (np.array([b"[1,2,3]"]), ValueType.INT32_LIST, [1, 2, 3]), + (np.array([b"[1.5,2.5,3.5]"]), ValueType.FLOAT_LIST, [1.5, 2.5, 3.5]), + (np.array([b"[1.5,2.5,3.5]"]), ValueType.DOUBLE_LIST, [1.5, 2.5, 3.5]), + (np.array([b'["a","b","c"]']), ValueType.STRING_LIST, ["a", "b", "c"]), + (np.array([b"[true,false]"]), ValueType.BOOL_LIST, [True, False]), + (np.array([b"[1,0]"]), ValueType.BOOL_LIST, [True, False]), + (np.array([None]), ValueType.STRING_LIST, None), + ([b"[1,2,3]"], ValueType.INT64_LIST, [1, 2, 3]), + ([b"[1,2,3]"], ValueType.INT32_LIST, [1, 2, 3]), + ([b"[1.5,2.5,3.5]"], ValueType.FLOAT_LIST, [1.5, 2.5, 3.5]), + ([b"[1.5,2.5,3.5]"], ValueType.DOUBLE_LIST, [1.5, 2.5, 3.5]), + ([b'["a","b","c"]'], ValueType.STRING_LIST, ["a", "b", "c"]), + ([b"[true,false]"], ValueType.BOOL_LIST, [True, False]), + ([b"[1,0]"], ValueType.BOOL_LIST, [True, False]), + ([None], ValueType.STRING_LIST, None), + ), +) +def test_python_values_to_proto_values_bytes_to_list(values, value_type, expected): + protos = python_values_to_proto_values(values, value_type) + converted = feast_value_type_to_python_type(protos[0]) + assert converted == expected + + +def test_python_values_to_proto_values_bytes_to_list_not_supported(): + with pytest.raises(TypeError): + _ = python_values_to_proto_values([b"[]"], ValueType.BYTES_LIST) From 99178f930917b001a09c825ee51f7d9c73b4d7ed Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Jan 2024 15:44:31 -0500 Subject: [PATCH 002/122] chore: Bump jupyter-server from 2.10.1 to 2.11.2 in /sdk/python/requirements (#3858) chore: Bump jupyter-server in /sdk/python/requirements Bumps [jupyter-server](https://github.com/jupyter-server/jupyter_server) from 2.10.1 to 2.11.2. - [Release notes](https://github.com/jupyter-server/jupyter_server/releases) - [Changelog](https://github.com/jupyter-server/jupyter_server/blob/main/CHANGELOG.md) - [Commits](https://github.com/jupyter-server/jupyter_server/compare/v2.10.1...v2.11.2) --- updated-dependencies: - dependency-name: jupyter-server dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- sdk/python/requirements/py3.10-ci-requirements.txt | 2 +- sdk/python/requirements/py3.8-ci-requirements.txt | 2 +- sdk/python/requirements/py3.9-ci-requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index 2cbfa6cbc9..5eb5cb294d 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -400,7 +400,7 @@ jupyter-events==0.9.0 # via jupyter-server jupyter-lsp==2.2.0 # via jupyterlab -jupyter-server==2.10.1 +jupyter-server==2.11.2 # via # jupyter-lsp # jupyterlab diff --git a/sdk/python/requirements/py3.8-ci-requirements.txt b/sdk/python/requirements/py3.8-ci-requirements.txt index 8449af824f..911ef15e39 100644 --- a/sdk/python/requirements/py3.8-ci-requirements.txt +++ b/sdk/python/requirements/py3.8-ci-requirements.txt @@ -416,7 +416,7 @@ jupyter-events==0.9.0 # via jupyter-server jupyter-lsp==2.2.0 # via jupyterlab -jupyter-server==2.10.1 +jupyter-server==2.11.2 # via # jupyter-lsp # jupyterlab diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index 5bc9210fd8..c7ed9f021e 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -407,7 +407,7 @@ jupyter-events==0.9.0 # via jupyter-server jupyter-lsp==2.2.0 # via jupyterlab -jupyter-server==2.10.1 +jupyter-server==2.11.2 # via # jupyter-lsp # jupyterlab From f4a3cb1a2b64e7cbb488cf2fcd8c2d6ed536f089 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 16 Jan 2024 10:38:29 -0500 Subject: [PATCH 003/122] chore: Bump jinja2 from 3.1.2 to 3.1.3 in /sdk/python/requirements (#3882) Bumps [jinja2](https://github.com/pallets/jinja) from 3.1.2 to 3.1.3. - [Release notes](https://github.com/pallets/jinja/releases) - [Changelog](https://github.com/pallets/jinja/blob/main/CHANGES.rst) - [Commits](https://github.com/pallets/jinja/compare/3.1.2...3.1.3) --- updated-dependencies: - dependency-name: jinja2 dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- sdk/python/requirements/py3.10-ci-requirements.txt | 2 +- sdk/python/requirements/py3.10-requirements.txt | 2 +- sdk/python/requirements/py3.8-ci-requirements.txt | 2 +- sdk/python/requirements/py3.8-requirements.txt | 2 +- sdk/python/requirements/py3.9-ci-requirements.txt | 2 +- sdk/python/requirements/py3.9-requirements.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index 5eb5cb294d..362b38ea47 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -349,7 +349,7 @@ isort==5.12.0 # via feast (setup.py) jedi==0.19.1 # via ipython -jinja2==3.1.2 +jinja2==3.1.3 # via # altair # feast (setup.py) diff --git a/sdk/python/requirements/py3.10-requirements.txt b/sdk/python/requirements/py3.10-requirements.txt index 0508614aa6..18486d7fa9 100644 --- a/sdk/python/requirements/py3.10-requirements.txt +++ b/sdk/python/requirements/py3.10-requirements.txt @@ -89,7 +89,7 @@ importlib-metadata==6.8.0 # feast (setup.py) importlib-resources==6.1.1 # via feast (setup.py) -jinja2==3.1.2 +jinja2==3.1.3 # via feast (setup.py) jsonschema==4.20.0 # via feast (setup.py) diff --git a/sdk/python/requirements/py3.8-ci-requirements.txt b/sdk/python/requirements/py3.8-ci-requirements.txt index 911ef15e39..e219935b01 100644 --- a/sdk/python/requirements/py3.8-ci-requirements.txt +++ b/sdk/python/requirements/py3.8-ci-requirements.txt @@ -365,7 +365,7 @@ isort==5.12.0 # via feast (setup.py) jedi==0.19.1 # via ipython -jinja2==3.1.2 +jinja2==3.1.3 # via # altair # feast (setup.py) diff --git a/sdk/python/requirements/py3.8-requirements.txt b/sdk/python/requirements/py3.8-requirements.txt index 3f6a15a1ef..c180c50c81 100644 --- a/sdk/python/requirements/py3.8-requirements.txt +++ b/sdk/python/requirements/py3.8-requirements.txt @@ -92,7 +92,7 @@ importlib-resources==6.1.1 # feast (setup.py) # jsonschema # jsonschema-specifications -jinja2==3.1.2 +jinja2==3.1.3 # via feast (setup.py) jsonschema==4.20.0 # via feast (setup.py) diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index c7ed9f021e..3acecd892c 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -356,7 +356,7 @@ isort==5.12.0 # via feast (setup.py) jedi==0.19.1 # via ipython -jinja2==3.1.2 +jinja2==3.1.3 # via # altair # feast (setup.py) diff --git a/sdk/python/requirements/py3.9-requirements.txt b/sdk/python/requirements/py3.9-requirements.txt index 120ecf9eb3..3b6f88b4e2 100644 --- a/sdk/python/requirements/py3.9-requirements.txt +++ b/sdk/python/requirements/py3.9-requirements.txt @@ -89,7 +89,7 @@ importlib-metadata==6.8.0 # feast (setup.py) importlib-resources==6.1.1 # via feast (setup.py) -jinja2==3.1.2 +jinja2==3.1.3 # via feast (setup.py) jsonschema==4.20.0 # via feast (setup.py) From ac6529c380bc623a15054350c9936b69617b4bee Mon Sep 17 00:00:00 2001 From: Willem Pienaar <6728866+woop@users.noreply.github.com> Date: Tue, 16 Jan 2024 13:57:03 -0500 Subject: [PATCH 004/122] Move maintainers to emeritus (#3888) --- community/maintainers.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/community/maintainers.md b/community/maintainers.md index e66dbeb762..8aca48fd0d 100644 --- a/community/maintainers.md +++ b/community/maintainers.md @@ -9,9 +9,6 @@ In alphabetical order | Name | GitHub Username | Email | Organization | | -------------- | ---------------- |-----------------------------| ------------------ | | Achal Shah | `achals` | achals@gmail.com | Tecton | -| Felix Wang | `felixwang9817` | wangfelix98@gmail.com | Tecton | -| Kevin Zhang | `kevjumba` | kevin.zhang.13499@gmail.com | Tecton | -| Miles Adkins | `sfc-gh-madkins` | miles.adkins@snowflake.com | Snowflake | | Willem Pienaar | `woop` | will.pienaar@gmail.com | Tecton | | Zhiling Chen | `zhilingc` | chnzhlng@gmail.com | GetGround | @@ -29,3 +26,6 @@ In alphabetical order | Danny Chiao | adchia | danny@tecton.ai | Tecton | | David Liu | mavysavydav | davidyliuliu@gmail.com | Twitter | | Matt Delacour | MattDelac | mdelacour@hey.com | Shopify | +| Miles Adkins | sfc-gh-madkins | miles.adkins@snowflake.com | Snowflake | +| Felix Wang | `felixwang9817` | wangfelix98@gmail.com | Tecton | +| Kevin Zhang | `kevjumba` | kevin.zhang.13499@gmail.com | Tecton | From 4c9062449e3faaa9edb527d0d2be4ede4677db48 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 18 Jan 2024 22:16:36 -0500 Subject: [PATCH 005/122] chore: Bump jupyter-lsp from 2.2.0 to 2.2.2 in /sdk/python/requirements (#3892) Bumps [jupyter-lsp](https://github.com/jupyter-lsp/jupyterlab-lsp) from 2.2.0 to 2.2.2. - [Release notes](https://github.com/jupyter-lsp/jupyterlab-lsp/releases) - [Changelog](https://github.com/jupyter-lsp/jupyterlab-lsp/blob/main/CHANGELOG.md) - [Commits](https://github.com/jupyter-lsp/jupyterlab-lsp/commits) --- updated-dependencies: - dependency-name: jupyter-lsp dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- sdk/python/requirements/py3.10-ci-requirements.txt | 2 +- sdk/python/requirements/py3.8-ci-requirements.txt | 2 +- sdk/python/requirements/py3.9-ci-requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index 362b38ea47..ddc4d15ac4 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -398,7 +398,7 @@ jupyter-core==5.5.0 # nbformat jupyter-events==0.9.0 # via jupyter-server -jupyter-lsp==2.2.0 +jupyter-lsp==2.2.2 # via jupyterlab jupyter-server==2.11.2 # via diff --git a/sdk/python/requirements/py3.8-ci-requirements.txt b/sdk/python/requirements/py3.8-ci-requirements.txt index e219935b01..d8e948ab7c 100644 --- a/sdk/python/requirements/py3.8-ci-requirements.txt +++ b/sdk/python/requirements/py3.8-ci-requirements.txt @@ -414,7 +414,7 @@ jupyter-core==5.5.0 # nbformat jupyter-events==0.9.0 # via jupyter-server -jupyter-lsp==2.2.0 +jupyter-lsp==2.2.2 # via jupyterlab jupyter-server==2.11.2 # via diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index 3acecd892c..fd79e7cf4a 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -405,7 +405,7 @@ jupyter-core==5.5.0 # nbformat jupyter-events==0.9.0 # via jupyter-server -jupyter-lsp==2.2.0 +jupyter-lsp==2.2.2 # via jupyterlab jupyter-server==2.11.2 # via From 8f65fe10fc5f09ae9c220aa0057e5514033ee72d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 18 Jan 2024 22:48:50 -0500 Subject: [PATCH 006/122] chore: Bump cryptography from 41.0.5 to 41.0.6 in /sdk/python/requirements (#3845) chore: Bump cryptography in /sdk/python/requirements Bumps [cryptography](https://github.com/pyca/cryptography) from 41.0.5 to 41.0.6. - [Changelog](https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/cryptography/compare/41.0.5...41.0.6) --- updated-dependencies: - dependency-name: cryptography dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- sdk/python/requirements/py3.10-ci-requirements.txt | 2 +- sdk/python/requirements/py3.8-ci-requirements.txt | 2 +- sdk/python/requirements/py3.9-ci-requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index ddc4d15ac4..5e407c1a99 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -124,7 +124,7 @@ coverage[toml]==7.3.2 # via # coverage # pytest-cov -cryptography==41.0.5 +cryptography==41.0.6 # via # azure-identity # azure-storage-blob diff --git a/sdk/python/requirements/py3.8-ci-requirements.txt b/sdk/python/requirements/py3.8-ci-requirements.txt index d8e948ab7c..02eaf6dc30 100644 --- a/sdk/python/requirements/py3.8-ci-requirements.txt +++ b/sdk/python/requirements/py3.8-ci-requirements.txt @@ -130,7 +130,7 @@ coverage[toml]==7.3.2 # via # coverage # pytest-cov -cryptography==41.0.5 +cryptography==41.0.6 # via # azure-identity # azure-storage-blob diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index fd79e7cf4a..43c49a4952 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -124,7 +124,7 @@ coverage[toml]==7.3.2 # via # coverage # pytest-cov -cryptography==41.0.5 +cryptography==41.0.6 # via # azure-identity # azure-storage-blob From bdd7dfb6128dfc1f314a61a266da91c611ce7892 Mon Sep 17 00:00:00 2001 From: Jiwon Park Date: Fri, 19 Jan 2024 11:17:41 +0700 Subject: [PATCH 007/122] fix: Allow trancated timestamps when converting (#3861) --- sdk/python/feast/infra/offline_stores/bigquery.py | 10 ++++++++-- sdk/python/feast/infra/utils/aws_utils.py | 7 ++++++- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/sdk/python/feast/infra/offline_stores/bigquery.py b/sdk/python/feast/infra/offline_stores/bigquery.py index b7910c391c..0ee82a908e 100644 --- a/sdk/python/feast/infra/offline_stores/bigquery.py +++ b/sdk/python/feast/infra/offline_stores/bigquery.py @@ -356,7 +356,10 @@ def write_logged_features( # In Pyarrow v13.0, the parquet version was upgraded to v2.6 from v2.4. # Set the coerce_timestamps to "us"(microseconds) for backward compatibility. pyarrow.parquet.write_table( - table=data, where=parquet_temp_file, coerce_timestamps="us" + table=data, + where=parquet_temp_file, + coerce_timestamps="us", + allow_truncated_timestamps=True, ) parquet_temp_file.seek(0) @@ -407,7 +410,10 @@ def offline_write_batch( # In Pyarrow v13.0, the parquet version was upgraded to v2.6 from v2.4. # Set the coerce_timestamps to "us"(microseconds) for backward compatibility. pyarrow.parquet.write_table( - table=table, where=parquet_temp_file, coerce_timestamps="us" + table=table, + where=parquet_temp_file, + coerce_timestamps="us", + allow_truncated_timestamps=True, ) parquet_temp_file.seek(0) diff --git a/sdk/python/feast/infra/utils/aws_utils.py b/sdk/python/feast/infra/utils/aws_utils.py index 728bcab791..ef83c6d1c6 100644 --- a/sdk/python/feast/infra/utils/aws_utils.py +++ b/sdk/python/feast/infra/utils/aws_utils.py @@ -353,7 +353,12 @@ def upload_arrow_table_to_redshift( with tempfile.TemporaryFile(suffix=".parquet") as parquet_temp_file: # In Pyarrow v13.0, the parquet version was upgraded to v2.6 from v2.4. # Set the coerce_timestamps to "us"(microseconds) for backward compatibility. - pq.write_table(table, parquet_temp_file, coerce_timestamps="us") + pq.write_table( + table, + parquet_temp_file, + coerce_timestamps="us", + allow_truncated_timestamps=True, + ) parquet_temp_file.seek(0) s3_resource.Object(bucket, key).put(Body=parquet_temp_file) From 0a06a2bf54c4913305fba3a598b4cc7d47c87c92 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 21 Jan 2024 20:30:34 -0500 Subject: [PATCH 008/122] chore: Bump zod from 3.15.1 to 3.22.3 in /sdk/python/feast/ui (#3816) Bumps [zod](https://github.com/colinhacks/zod) from 3.15.1 to 3.22.3. - [Release notes](https://github.com/colinhacks/zod/releases) - [Changelog](https://github.com/colinhacks/zod/blob/master/CHANGELOG.md) - [Commits](https://github.com/colinhacks/zod/compare/v3.15.1...v3.22.3) --- updated-dependencies: - dependency-name: zod dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- sdk/python/feast/ui/package.json | 2 +- sdk/python/feast/ui/yarn.lock | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/sdk/python/feast/ui/package.json b/sdk/python/feast/ui/package.json index 1d2951cf57..f142b0b644 100644 --- a/sdk/python/feast/ui/package.json +++ b/sdk/python/feast/ui/package.json @@ -24,7 +24,7 @@ "typescript": "^4.6.4", "use-query-params": "^1.2.3", "web-vitals": "^2.1.4", - "zod": "^3.15.1" + "zod": "^3.22.3" }, "scripts": { "start": "react-scripts start", diff --git a/sdk/python/feast/ui/yarn.lock b/sdk/python/feast/ui/yarn.lock index 4d6f690b94..06f4d3f12b 100644 --- a/sdk/python/feast/ui/yarn.lock +++ b/sdk/python/feast/ui/yarn.lock @@ -11022,10 +11022,10 @@ yocto-queue@^0.1.0: resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== -zod@^3.11.6, zod@^3.15.1: - version "3.15.1" - resolved "https://registry.yarnpkg.com/zod/-/zod-3.15.1.tgz#9e404cd8002ccffb03baa94cff2e1638ed49d82f" - integrity sha512-WAdjcoOxa4S9oc/u7fTbC3CC7uVqptLLU0LKqS8RDBOrCXp2t5avM8BUfgNVZJymGWAx6SEUYxWPPoYuQ5rgwQ== +zod@^3.11.6, zod@^3.22.3: + version "3.22.3" + resolved "https://registry.yarnpkg.com/zod/-/zod-3.22.3.tgz#2fbc96118b174290d94e8896371c95629e87a060" + integrity sha512-EjIevzuJRiRPbVH4mGc8nApb/lVLKVpmUhAaR5R5doKGfAnGJ6Gr3CViAVjP+4FWSxCsybeWQdcgCtbX+7oZug== zwitch@^1.0.0: version "1.0.5" From b80bcd611eea4a46226cfee0d541d907c06a47d2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 21 Jan 2024 20:55:51 -0500 Subject: [PATCH 009/122] chore: Bump google.golang.org/grpc from 1.53.0 to 1.56.3 (#3820) Bumps [google.golang.org/grpc](https://github.com/grpc/grpc-go) from 1.53.0 to 1.56.3. - [Release notes](https://github.com/grpc/grpc-go/releases) - [Commits](https://github.com/grpc/grpc-go/compare/v1.53.0...v1.56.3) --- updated-dependencies: - dependency-name: google.golang.org/grpc dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 8 +- go.sum | 370 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 369 insertions(+), 9 deletions(-) diff --git a/go.mod b/go.mod index 20c52d3221..6def933985 100644 --- a/go.mod +++ b/go.mod @@ -8,14 +8,14 @@ require ( github.com/apache/arrow/go/v8 v8.0.0 github.com/ghodss/yaml v1.0.0 github.com/go-redis/redis/v8 v8.11.4 - github.com/golang/protobuf v1.5.2 + github.com/golang/protobuf v1.5.3 github.com/google/uuid v1.3.0 github.com/mattn/go-sqlite3 v1.14.12 github.com/pkg/errors v0.9.1 github.com/spaolacci/murmur3 v1.1.0 github.com/stretchr/testify v1.7.0 - google.golang.org/grpc v1.53.0 - google.golang.org/protobuf v1.28.1 + google.golang.org/grpc v1.56.3 + google.golang.org/protobuf v1.30.0 ) require ( @@ -43,7 +43,7 @@ require ( golang.org/x/text v0.13.0 // indirect golang.org/x/tools v0.6.0 // indirect golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f // indirect - google.golang.org/genproto v0.0.0-20230110181048-76db0878b65f // indirect + google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 // indirect gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c // indirect diff --git a/go.sum b/go.sum index 990ff9b1ba..9a33f2c9ee 100644 --- a/go.sum +++ b/go.sum @@ -35,47 +35,83 @@ cloud.google.com/go v0.102.1/go.mod h1:XZ77E9qnTEnrgEOvr4xzfdX5TRo7fB4T2F4O6+34h cloud.google.com/go v0.104.0/go.mod h1:OO6xxXdJyvuJPcEPBLN9BJPD+jep5G1+2U5B5gkRYtA= cloud.google.com/go v0.105.0/go.mod h1:PrLgOJNe5nfE9UMxKxgXj4mD3voiP+YQ6gdt6KMFOKM= cloud.google.com/go v0.107.0/go.mod h1:wpc2eNrD7hXUTy8EKS10jkxpZBjASrORK7goS+3YX2I= +cloud.google.com/go v0.110.0/go.mod h1:SJnCLqQ0FCFGSZMUNUf84MV3Aia54kn7pi8st7tMzaY= cloud.google.com/go/accessapproval v1.4.0/go.mod h1:zybIuC3KpDOvotz59lFe5qxRZx6C75OtwbisN56xYB4= cloud.google.com/go/accessapproval v1.5.0/go.mod h1:HFy3tuiGvMdcd/u+Cu5b9NkO1pEICJ46IR82PoUdplw= +cloud.google.com/go/accessapproval v1.6.0/go.mod h1:R0EiYnwV5fsRFiKZkPHr6mwyk2wxUJ30nL4j2pcFY2E= cloud.google.com/go/accesscontextmanager v1.3.0/go.mod h1:TgCBehyr5gNMz7ZaH9xubp+CE8dkrszb4oK9CWyvD4o= cloud.google.com/go/accesscontextmanager v1.4.0/go.mod h1:/Kjh7BBu/Gh83sv+K60vN9QE5NJcd80sU33vIe2IFPE= +cloud.google.com/go/accesscontextmanager v1.6.0/go.mod h1:8XCvZWfYw3K/ji0iVnp+6pu7huxoQTLmxAbVjbloTtM= +cloud.google.com/go/accesscontextmanager v1.7.0/go.mod h1:CEGLewx8dwa33aDAZQujl7Dx+uYhS0eay198wB/VumQ= cloud.google.com/go/aiplatform v1.22.0/go.mod h1:ig5Nct50bZlzV6NvKaTwmplLLddFx0YReh9WfTO5jKw= cloud.google.com/go/aiplatform v1.24.0/go.mod h1:67UUvRBKG6GTayHKV8DBv2RtR1t93YRu5B1P3x99mYY= cloud.google.com/go/aiplatform v1.27.0/go.mod h1:Bvxqtl40l0WImSb04d0hXFU7gDOiq9jQmorivIiWcKg= +cloud.google.com/go/aiplatform v1.35.0/go.mod h1:7MFT/vCaOyZT/4IIFfxH4ErVg/4ku6lKv3w0+tFTgXQ= +cloud.google.com/go/aiplatform v1.36.1/go.mod h1:WTm12vJRPARNvJ+v6P52RDHCNe4AhvjcIZ/9/RRHy/k= +cloud.google.com/go/aiplatform v1.37.0/go.mod h1:IU2Cv29Lv9oCn/9LkFiiuKfwrRTq+QQMbW+hPCxJGZw= cloud.google.com/go/analytics v0.11.0/go.mod h1:DjEWCu41bVbYcKyvlws9Er60YE4a//bK6mnhWvQeFNI= cloud.google.com/go/analytics v0.12.0/go.mod h1:gkfj9h6XRf9+TS4bmuhPEShsh3hH8PAZzm/41OOhQd4= +cloud.google.com/go/analytics v0.17.0/go.mod h1:WXFa3WSym4IZ+JiKmavYdJwGG/CvpqiqczmL59bTD9M= +cloud.google.com/go/analytics v0.18.0/go.mod h1:ZkeHGQlcIPkw0R/GW+boWHhCOR43xz9RN/jn7WcqfIE= +cloud.google.com/go/analytics v0.19.0/go.mod h1:k8liqf5/HCnOUkbawNtrWWc+UAzyDlW89doe8TtoDsE= cloud.google.com/go/apigateway v1.3.0/go.mod h1:89Z8Bhpmxu6AmUxuVRg/ECRGReEdiP3vQtk4Z1J9rJk= cloud.google.com/go/apigateway v1.4.0/go.mod h1:pHVY9MKGaH9PQ3pJ4YLzoj6U5FUDeDFBllIz7WmzJoc= +cloud.google.com/go/apigateway v1.5.0/go.mod h1:GpnZR3Q4rR7LVu5951qfXPJCHquZt02jf7xQx7kpqN8= cloud.google.com/go/apigeeconnect v1.3.0/go.mod h1:G/AwXFAKo0gIXkPTVfZDd2qA1TxBXJ3MgMRBQkIi9jc= cloud.google.com/go/apigeeconnect v1.4.0/go.mod h1:kV4NwOKqjvt2JYR0AoIWo2QGfoRtn/pkS3QlHp0Ni04= +cloud.google.com/go/apigeeconnect v1.5.0/go.mod h1:KFaCqvBRU6idyhSNyn3vlHXc8VMDJdRmwDF6JyFRqZ8= +cloud.google.com/go/apigeeregistry v0.4.0/go.mod h1:EUG4PGcsZvxOXAdyEghIdXwAEi/4MEaoqLMLDMIwKXY= +cloud.google.com/go/apigeeregistry v0.5.0/go.mod h1:YR5+s0BVNZfVOUkMa5pAR2xGd0A473vA5M7j247o1wM= +cloud.google.com/go/apigeeregistry v0.6.0/go.mod h1:BFNzW7yQVLZ3yj0TKcwzb8n25CFBri51GVGOEUcgQsc= +cloud.google.com/go/apikeys v0.4.0/go.mod h1:XATS/yqZbaBK0HOssf+ALHp8jAlNHUgyfprvNcBIszU= +cloud.google.com/go/apikeys v0.5.0/go.mod h1:5aQfwY4D+ewMMWScd3hm2en3hCj+BROlyrt3ytS7KLI= +cloud.google.com/go/apikeys v0.6.0/go.mod h1:kbpXu5upyiAlGkKrJgQl8A0rKNNJ7dQ377pdroRSSi8= cloud.google.com/go/appengine v1.4.0/go.mod h1:CS2NhuBuDXM9f+qscZ6V86m1MIIqPj3WC/UoEuR1Sno= cloud.google.com/go/appengine v1.5.0/go.mod h1:TfasSozdkFI0zeoxW3PTBLiNqRmzraodCWatWI9Dmak= +cloud.google.com/go/appengine v1.6.0/go.mod h1:hg6i0J/BD2cKmDJbaFSYHFyZkgBEfQrDg/X0V5fJn84= +cloud.google.com/go/appengine v1.7.0/go.mod h1:eZqpbHFCqRGa2aCdope7eC0SWLV1j0neb/QnMJVWx6A= +cloud.google.com/go/appengine v1.7.1/go.mod h1:IHLToyb/3fKutRysUlFO0BPt5j7RiQ45nrzEJmKTo6E= cloud.google.com/go/area120 v0.5.0/go.mod h1:DE/n4mp+iqVyvxHN41Vf1CR602GiHQjFPusMFW6bGR4= cloud.google.com/go/area120 v0.6.0/go.mod h1:39yFJqWVgm0UZqWTOdqkLhjoC7uFfgXRC8g/ZegeAh0= +cloud.google.com/go/area120 v0.7.0/go.mod h1:a3+8EUD1SX5RUcCs3MY5YasiO1z6yLiNLRiFrykbynY= +cloud.google.com/go/area120 v0.7.1/go.mod h1:j84i4E1RboTWjKtZVWXPqvK5VHQFJRF2c1Nm69pWm9k= cloud.google.com/go/artifactregistry v1.6.0/go.mod h1:IYt0oBPSAGYj/kprzsBjZ/4LnG/zOcHyFHjWPCi6SAQ= cloud.google.com/go/artifactregistry v1.7.0/go.mod h1:mqTOFOnGZx8EtSqK/ZWcsm/4U8B77rbcLP6ruDU2Ixk= cloud.google.com/go/artifactregistry v1.8.0/go.mod h1:w3GQXkJX8hiKN0v+at4b0qotwijQbYUqF2GWkZzAhC0= cloud.google.com/go/artifactregistry v1.9.0/go.mod h1:2K2RqvA2CYvAeARHRkLDhMDJ3OXy26h3XW+3/Jh2uYc= +cloud.google.com/go/artifactregistry v1.11.1/go.mod h1:lLYghw+Itq9SONbCa1YWBoWs1nOucMH0pwXN1rOBZFI= +cloud.google.com/go/artifactregistry v1.11.2/go.mod h1:nLZns771ZGAwVLzTX/7Al6R9ehma4WUEhZGWV6CeQNQ= +cloud.google.com/go/artifactregistry v1.12.0/go.mod h1:o6P3MIvtzTOnmvGagO9v/rOjjA0HmhJ+/6KAXrmYDCI= +cloud.google.com/go/artifactregistry v1.13.0/go.mod h1:uy/LNfoOIivepGhooAUpL1i30Hgee3Cu0l4VTWHUC08= cloud.google.com/go/asset v1.5.0/go.mod h1:5mfs8UvcM5wHhqtSv8J1CtxxaQq3AdBxxQi2jGW/K4o= cloud.google.com/go/asset v1.7.0/go.mod h1:YbENsRK4+xTiL+Ofoj5Ckf+O17kJtgp3Y3nn4uzZz5s= cloud.google.com/go/asset v1.8.0/go.mod h1:mUNGKhiqIdbr8X7KNayoYvyc4HbbFO9URsjbytpUaW0= cloud.google.com/go/asset v1.9.0/go.mod h1:83MOE6jEJBMqFKadM9NLRcs80Gdw76qGuHn8m3h8oHQ= cloud.google.com/go/asset v1.10.0/go.mod h1:pLz7uokL80qKhzKr4xXGvBQXnzHn5evJAEAtZiIb0wY= +cloud.google.com/go/asset v1.11.1/go.mod h1:fSwLhbRvC9p9CXQHJ3BgFeQNM4c9x10lqlrdEUYXlJo= +cloud.google.com/go/asset v1.12.0/go.mod h1:h9/sFOa4eDIyKmH6QMpm4eUK3pDojWnUhTgJlk762Hg= +cloud.google.com/go/asset v1.13.0/go.mod h1:WQAMyYek/b7NBpYq/K4KJWcRqzoalEsxz/t/dTk4THw= cloud.google.com/go/assuredworkloads v1.5.0/go.mod h1:n8HOZ6pff6re5KYfBXcFvSViQjDwxFkAkmUFffJRbbY= cloud.google.com/go/assuredworkloads v1.6.0/go.mod h1:yo2YOk37Yc89Rsd5QMVECvjaMKymF9OP+QXWlKXUkXw= cloud.google.com/go/assuredworkloads v1.7.0/go.mod h1:z/736/oNmtGAyU47reJgGN+KVoYoxeLBoj4XkKYscNI= cloud.google.com/go/assuredworkloads v1.8.0/go.mod h1:AsX2cqyNCOvEQC8RMPnoc0yEarXQk6WEKkxYfL6kGIo= cloud.google.com/go/assuredworkloads v1.9.0/go.mod h1:kFuI1P78bplYtT77Tb1hi0FMxM0vVpRC7VVoJC3ZoT0= +cloud.google.com/go/assuredworkloads v1.10.0/go.mod h1:kwdUQuXcedVdsIaKgKTp9t0UJkE5+PAVNhdQm4ZVq2E= cloud.google.com/go/automl v1.5.0/go.mod h1:34EjfoFGMZ5sgJ9EoLsRtdPSNZLcfflJR39VbVNS2M0= cloud.google.com/go/automl v1.6.0/go.mod h1:ugf8a6Fx+zP0D59WLhqgTDsQI9w07o64uf/Is3Nh5p8= cloud.google.com/go/automl v1.7.0/go.mod h1:RL9MYCCsJEOmt0Wf3z9uzG0a7adTT1fe+aObgSpkCt8= cloud.google.com/go/automl v1.8.0/go.mod h1:xWx7G/aPEe/NP+qzYXktoBSDfjO+vnKMGgsApGJJquM= +cloud.google.com/go/automl v1.12.0/go.mod h1:tWDcHDp86aMIuHmyvjuKeeHEGq76lD7ZqfGLN6B0NuU= cloud.google.com/go/baremetalsolution v0.3.0/go.mod h1:XOrocE+pvK1xFfleEnShBlNAXf+j5blPPxrhjKgnIFc= cloud.google.com/go/baremetalsolution v0.4.0/go.mod h1:BymplhAadOO/eBa7KewQ0Ppg4A4Wplbn+PsFKRLo0uI= +cloud.google.com/go/baremetalsolution v0.5.0/go.mod h1:dXGxEkmR9BMwxhzBhV0AioD0ULBmuLZI8CdwalUxuss= cloud.google.com/go/batch v0.3.0/go.mod h1:TR18ZoAekj1GuirsUsR1ZTKN3FC/4UDnScjT8NXImFE= cloud.google.com/go/batch v0.4.0/go.mod h1:WZkHnP43R/QCGQsZ+0JyG4i79ranE2u8xvjq/9+STPE= +cloud.google.com/go/batch v0.7.0/go.mod h1:vLZN95s6teRUqRQ4s3RLDsH8PvboqBK+rn1oevL159g= cloud.google.com/go/beyondcorp v0.2.0/go.mod h1:TB7Bd+EEtcw9PCPQhCJtJGjk/7TC6ckmnSFS+xwTfm4= cloud.google.com/go/beyondcorp v0.3.0/go.mod h1:E5U5lcrcXMsCuoDNyGrpyTm/hn7ne941Jz2vmksAxW8= +cloud.google.com/go/beyondcorp v0.4.0/go.mod h1:3ApA0mbhHx6YImmuubf5pyW8srKnCEPON32/5hj+RmM= +cloud.google.com/go/beyondcorp v0.5.0/go.mod h1:uFqj9X+dSfrheVp7ssLTaRHd2EHqSL4QZmH4e8WXGGU= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= @@ -85,26 +121,42 @@ cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM7 cloud.google.com/go/bigquery v1.42.0/go.mod h1:8dRTJxhtG+vwBKzE5OseQn/hiydoQN3EedCaOdYmxRA= cloud.google.com/go/bigquery v1.43.0/go.mod h1:ZMQcXHsl+xmU1z36G2jNGZmKp9zNY5BUua5wDgmNCfw= cloud.google.com/go/bigquery v1.44.0/go.mod h1:0Y33VqXTEsbamHJvJHdFmtqHvMIY28aK1+dFsvaChGc= +cloud.google.com/go/bigquery v1.47.0/go.mod h1:sA9XOgy0A8vQK9+MWhEQTY6Tix87M/ZurWFIxmF9I/E= +cloud.google.com/go/bigquery v1.48.0/go.mod h1:QAwSz+ipNgfL5jxiaK7weyOhzdoAy1zFm0Nf1fysJac= +cloud.google.com/go/bigquery v1.49.0/go.mod h1:Sv8hMmTFFYBlt/ftw2uN6dFdQPzBlREY9yBh7Oy7/4Q= +cloud.google.com/go/bigquery v1.50.0/go.mod h1:YrleYEh2pSEbgTBZYMJ5SuSr0ML3ypjRB1zgf7pvQLU= cloud.google.com/go/billing v1.4.0/go.mod h1:g9IdKBEFlItS8bTtlrZdVLWSSdSyFUZKXNS02zKMOZY= cloud.google.com/go/billing v1.5.0/go.mod h1:mztb1tBc3QekhjSgmpf/CV4LzWXLzCArwpLmP2Gm88s= cloud.google.com/go/billing v1.6.0/go.mod h1:WoXzguj+BeHXPbKfNWkqVtDdzORazmCjraY+vrxcyvI= cloud.google.com/go/billing v1.7.0/go.mod h1:q457N3Hbj9lYwwRbnlD7vUpyjq6u5U1RAOArInEiD5Y= +cloud.google.com/go/billing v1.12.0/go.mod h1:yKrZio/eu+okO/2McZEbch17O5CB5NpZhhXG6Z766ss= +cloud.google.com/go/billing v1.13.0/go.mod h1:7kB2W9Xf98hP9Sr12KfECgfGclsH3CQR0R08tnRlRbc= cloud.google.com/go/binaryauthorization v1.1.0/go.mod h1:xwnoWu3Y84jbuHa0zd526MJYmtnVXn0syOjaJgy4+dM= cloud.google.com/go/binaryauthorization v1.2.0/go.mod h1:86WKkJHtRcv5ViNABtYMhhNWRrD1Vpi//uKEy7aYEfI= cloud.google.com/go/binaryauthorization v1.3.0/go.mod h1:lRZbKgjDIIQvzYQS1p99A7/U1JqvqeZg0wiI5tp6tg0= cloud.google.com/go/binaryauthorization v1.4.0/go.mod h1:tsSPQrBd77VLplV70GUhBf/Zm3FsKmgSqgm4UmiDItk= +cloud.google.com/go/binaryauthorization v1.5.0/go.mod h1:OSe4OU1nN/VswXKRBmciKpo9LulY41gch5c68htf3/Q= cloud.google.com/go/certificatemanager v1.3.0/go.mod h1:n6twGDvcUBFu9uBgt4eYvvf3sQ6My8jADcOVwHmzadg= cloud.google.com/go/certificatemanager v1.4.0/go.mod h1:vowpercVFyqs8ABSmrdV+GiFf2H/ch3KyudYQEMM590= +cloud.google.com/go/certificatemanager v1.6.0/go.mod h1:3Hh64rCKjRAX8dXgRAyOcY5vQ/fE1sh8o+Mdd6KPgY8= cloud.google.com/go/channel v1.8.0/go.mod h1:W5SwCXDJsq/rg3tn3oG0LOxpAo6IMxNa09ngphpSlnk= cloud.google.com/go/channel v1.9.0/go.mod h1:jcu05W0my9Vx4mt3/rEHpfxc9eKi9XwsdDL8yBMbKUk= +cloud.google.com/go/channel v1.11.0/go.mod h1:IdtI0uWGqhEeatSB62VOoJ8FSUhJ9/+iGkJVqp74CGE= +cloud.google.com/go/channel v1.12.0/go.mod h1:VkxCGKASi4Cq7TbXxlaBezonAYpp1GCnKMY6tnMQnLU= cloud.google.com/go/cloudbuild v1.3.0/go.mod h1:WequR4ULxlqvMsjDEEEFnOG5ZSRSgWOywXYDb1vPE6U= cloud.google.com/go/cloudbuild v1.4.0/go.mod h1:5Qwa40LHiOXmz3386FrjrYM93rM/hdRr7b53sySrTqA= +cloud.google.com/go/cloudbuild v1.6.0/go.mod h1:UIbc/w9QCbH12xX+ezUsgblrWv+Cv4Tw83GiSMHOn9M= +cloud.google.com/go/cloudbuild v1.7.0/go.mod h1:zb5tWh2XI6lR9zQmsm1VRA+7OCuve5d8S+zJUul8KTg= +cloud.google.com/go/cloudbuild v1.9.0/go.mod h1:qK1d7s4QlO0VwfYn5YuClDGg2hfmLZEb4wQGAbIgL1s= cloud.google.com/go/clouddms v1.3.0/go.mod h1:oK6XsCDdW4Ib3jCCBugx+gVjevp2TMXFtgxvPSee3OM= cloud.google.com/go/clouddms v1.4.0/go.mod h1:Eh7sUGCC+aKry14O1NRljhjyrr0NFC0G2cjwX0cByRk= +cloud.google.com/go/clouddms v1.5.0/go.mod h1:QSxQnhikCLUw13iAbffF2CZxAER3xDGNHjsTAkQJcQA= cloud.google.com/go/cloudtasks v1.5.0/go.mod h1:fD92REy1x5woxkKEkLdvavGnPJGEn8Uic9nWuLzqCpY= cloud.google.com/go/cloudtasks v1.6.0/go.mod h1:C6Io+sxuke9/KNRkbQpihnW93SWDU3uXt92nu85HkYI= cloud.google.com/go/cloudtasks v1.7.0/go.mod h1:ImsfdYWwlWNJbdgPIIGJWC+gemEGTBK/SunNQQNCAb4= cloud.google.com/go/cloudtasks v1.8.0/go.mod h1:gQXUIwCSOI4yPVK7DgTVFiiP0ZW/eQkydWzwVMdHxrI= +cloud.google.com/go/cloudtasks v1.9.0/go.mod h1:w+EyLsVkLWHcOaqNEyvcKAsWp9p29dL6uL9Nst1cI7Y= +cloud.google.com/go/cloudtasks v1.10.0/go.mod h1:NDSoTLkZ3+vExFEWu2UJV1arUyzVDAiZtdWcsUyNwBs= cloud.google.com/go/compute v0.1.0/go.mod h1:GAesmwr110a34z04OlxYkATPBEfVhkymfTBXtfbBFow= cloud.google.com/go/compute v1.3.0/go.mod h1:cCZiE1NHEtai4wiufUhW8I8S1JKkAnhnQJWM7YD99wM= cloud.google.com/go/compute v1.5.0/go.mod h1:9SMHyhJlzhlkJqrPAc839t2BZFTSk6Jdj6mkzQJeu0M= @@ -117,224 +169,353 @@ cloud.google.com/go/compute v1.12.1/go.mod h1:e8yNOBcBONZU1vJKCvCoDw/4JQsA0dpM4x cloud.google.com/go/compute v1.13.0/go.mod h1:5aPTS0cUNMIc1CE546K+Th6weJUNQErARyZtRXDJ8GE= cloud.google.com/go/compute v1.14.0/go.mod h1:YfLtxrj9sU4Yxv+sXzZkyPjEyPBZfXHUvjxega5vAdo= cloud.google.com/go/compute v1.15.1/go.mod h1:bjjoF/NtFUrkD/urWfdHaKuOPDR5nWIs63rR+SXhcpA= +cloud.google.com/go/compute v1.18.0/go.mod h1:1X7yHxec2Ga+Ss6jPyjxRxpu2uu7PLgsOVXvgU0yacs= +cloud.google.com/go/compute v1.19.0/go.mod h1:rikpw2y+UMidAe9tISo04EHNOIf42RLYF/q8Bs93scU= +cloud.google.com/go/compute v1.19.1/go.mod h1:6ylj3a05WF8leseCdIf77NK0g1ey+nj5IKd5/kvShxE= cloud.google.com/go/compute/metadata v0.1.0/go.mod h1:Z1VN+bulIf6bt4P/C37K4DyZYZEXYonfTBHHFPO/4UU= cloud.google.com/go/compute/metadata v0.2.0/go.mod h1:zFmK7XCadkQkj6TtorcaGlCW1hT1fIilQDwofLpJ20k= cloud.google.com/go/compute/metadata v0.2.1/go.mod h1:jgHgmJd2RKBGzXqF5LR2EZMGxBkeanZ9wwa75XHJgOM= cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= cloud.google.com/go/contactcenterinsights v1.3.0/go.mod h1:Eu2oemoePuEFc/xKFPjbTuPSj0fYJcPls9TFlPNnHHY= cloud.google.com/go/contactcenterinsights v1.4.0/go.mod h1:L2YzkGbPsv+vMQMCADxJoT9YiTTnSEd6fEvCeHTYVck= +cloud.google.com/go/contactcenterinsights v1.6.0/go.mod h1:IIDlT6CLcDoyv79kDv8iWxMSTZhLxSCofVV5W6YFM/w= cloud.google.com/go/container v1.6.0/go.mod h1:Xazp7GjJSeUYo688S+6J5V+n/t+G5sKBTFkKNudGRxg= cloud.google.com/go/container v1.7.0/go.mod h1:Dp5AHtmothHGX3DwwIHPgq45Y8KmNsgN3amoYfxVkLo= +cloud.google.com/go/container v1.13.1/go.mod h1:6wgbMPeQRw9rSnKBCAJXnds3Pzj03C4JHamr8asWKy4= +cloud.google.com/go/container v1.14.0/go.mod h1:3AoJMPhHfLDxLvrlVWaK57IXzaPnLaZq63WX59aQBfM= +cloud.google.com/go/container v1.15.0/go.mod h1:ft+9S0WGjAyjDggg5S06DXj+fHJICWg8L7isCQe9pQA= cloud.google.com/go/containeranalysis v0.5.1/go.mod h1:1D92jd8gRR/c0fGMlymRgxWD3Qw9C1ff6/T7mLgVL8I= cloud.google.com/go/containeranalysis v0.6.0/go.mod h1:HEJoiEIu+lEXM+k7+qLCci0h33lX3ZqoYFdmPcoO7s4= +cloud.google.com/go/containeranalysis v0.7.0/go.mod h1:9aUL+/vZ55P2CXfuZjS4UjQ9AgXoSw8Ts6lemfmxBxI= +cloud.google.com/go/containeranalysis v0.9.0/go.mod h1:orbOANbwk5Ejoom+s+DUCTTJ7IBdBQJDcSylAx/on9s= cloud.google.com/go/datacatalog v1.3.0/go.mod h1:g9svFY6tuR+j+hrTw3J2dNcmI0dzmSiyOzm8kpLq0a0= cloud.google.com/go/datacatalog v1.5.0/go.mod h1:M7GPLNQeLfWqeIm3iuiruhPzkt65+Bx8dAKvScX8jvs= cloud.google.com/go/datacatalog v1.6.0/go.mod h1:+aEyF8JKg+uXcIdAmmaMUmZ3q1b/lKLtXCmXdnc0lbc= cloud.google.com/go/datacatalog v1.7.0/go.mod h1:9mEl4AuDYWw81UGc41HonIHH7/sn52H0/tc8f8ZbZIE= cloud.google.com/go/datacatalog v1.8.0/go.mod h1:KYuoVOv9BM8EYz/4eMFxrr4DUKhGIOXxZoKYF5wdISM= +cloud.google.com/go/datacatalog v1.8.1/go.mod h1:RJ58z4rMp3gvETA465Vg+ag8BGgBdnRPEMMSTr5Uv+M= +cloud.google.com/go/datacatalog v1.12.0/go.mod h1:CWae8rFkfp6LzLumKOnmVh4+Zle4A3NXLzVJ1d1mRm0= +cloud.google.com/go/datacatalog v1.13.0/go.mod h1:E4Rj9a5ZtAxcQJlEBTLgMTphfP11/lNaAshpoBgemX8= cloud.google.com/go/dataflow v0.6.0/go.mod h1:9QwV89cGoxjjSR9/r7eFDqqjtvbKxAK2BaYU6PVk9UM= cloud.google.com/go/dataflow v0.7.0/go.mod h1:PX526vb4ijFMesO1o202EaUmouZKBpjHsTlCtB4parQ= +cloud.google.com/go/dataflow v0.8.0/go.mod h1:Rcf5YgTKPtQyYz8bLYhFoIV/vP39eL7fWNcSOyFfLJE= cloud.google.com/go/dataform v0.3.0/go.mod h1:cj8uNliRlHpa6L3yVhDOBrUXH+BPAO1+KFMQQNSThKo= cloud.google.com/go/dataform v0.4.0/go.mod h1:fwV6Y4Ty2yIFL89huYlEkwUPtS7YZinZbzzj5S9FzCE= cloud.google.com/go/dataform v0.5.0/go.mod h1:GFUYRe8IBa2hcomWplodVmUx/iTL0FrsauObOM3Ipr0= +cloud.google.com/go/dataform v0.6.0/go.mod h1:QPflImQy33e29VuapFdf19oPbE4aYTJxr31OAPV+ulA= +cloud.google.com/go/dataform v0.7.0/go.mod h1:7NulqnVozfHvWUBpMDfKMUESr+85aJsC/2O0o3jWPDE= cloud.google.com/go/datafusion v1.4.0/go.mod h1:1Zb6VN+W6ALo85cXnM1IKiPw+yQMKMhB9TsTSRDo/38= cloud.google.com/go/datafusion v1.5.0/go.mod h1:Kz+l1FGHB0J+4XF2fud96WMmRiq/wj8N9u007vyXZ2w= +cloud.google.com/go/datafusion v1.6.0/go.mod h1:WBsMF8F1RhSXvVM8rCV3AeyWVxcC2xY6vith3iw3S+8= cloud.google.com/go/datalabeling v0.5.0/go.mod h1:TGcJ0G2NzcsXSE/97yWjIZO0bXj0KbVlINXMG9ud42I= cloud.google.com/go/datalabeling v0.6.0/go.mod h1:WqdISuk/+WIGeMkpw/1q7bK/tFEZxsrFJOJdY2bXvTQ= +cloud.google.com/go/datalabeling v0.7.0/go.mod h1:WPQb1y08RJbmpM3ww0CSUAGweL0SxByuW2E+FU+wXcM= cloud.google.com/go/dataplex v1.3.0/go.mod h1:hQuRtDg+fCiFgC8j0zV222HvzFQdRd+SVX8gdmFcZzA= cloud.google.com/go/dataplex v1.4.0/go.mod h1:X51GfLXEMVJ6UN47ESVqvlsRplbLhcsAt0kZCCKsU0A= +cloud.google.com/go/dataplex v1.5.2/go.mod h1:cVMgQHsmfRoI5KFYq4JtIBEUbYwc3c7tXmIDhRmNNVQ= +cloud.google.com/go/dataplex v1.6.0/go.mod h1:bMsomC/aEJOSpHXdFKFGQ1b0TDPIeL28nJObeO1ppRs= cloud.google.com/go/dataproc v1.7.0/go.mod h1:CKAlMjII9H90RXaMpSxQ8EU6dQx6iAYNPcYPOkSbi8s= cloud.google.com/go/dataproc v1.8.0/go.mod h1:5OW+zNAH0pMpw14JVrPONsxMQYMBqJuzORhIBfBn9uI= +cloud.google.com/go/dataproc v1.12.0/go.mod h1:zrF3aX0uV3ikkMz6z4uBbIKyhRITnxvr4i3IjKsKrw4= cloud.google.com/go/dataqna v0.5.0/go.mod h1:90Hyk596ft3zUQ8NkFfvICSIfHFh1Bc7C4cK3vbhkeo= cloud.google.com/go/dataqna v0.6.0/go.mod h1:1lqNpM7rqNLVgWBJyk5NF6Uen2PHym0jtVJonplVsDA= +cloud.google.com/go/dataqna v0.7.0/go.mod h1:Lx9OcIIeqCrw1a6KdO3/5KMP1wAmTc0slZWwP12Qq3c= cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= cloud.google.com/go/datastore v1.10.0/go.mod h1:PC5UzAmDEkAmkfaknstTYbNpgE49HAgW2J1gcgUfmdM= +cloud.google.com/go/datastore v1.11.0/go.mod h1:TvGxBIHCS50u8jzG+AW/ppf87v1of8nwzFNgEZU1D3c= cloud.google.com/go/datastream v1.2.0/go.mod h1:i/uTP8/fZwgATHS/XFu0TcNUhuA0twZxxQ3EyCUQMwo= cloud.google.com/go/datastream v1.3.0/go.mod h1:cqlOX8xlyYF/uxhiKn6Hbv6WjwPPuI9W2M9SAXwaLLQ= cloud.google.com/go/datastream v1.4.0/go.mod h1:h9dpzScPhDTs5noEMQVWP8Wx8AFBRyS0s8KWPx/9r0g= cloud.google.com/go/datastream v1.5.0/go.mod h1:6TZMMNPwjUqZHBKPQ1wwXpb0d5VDVPl2/XoS5yi88q4= +cloud.google.com/go/datastream v1.6.0/go.mod h1:6LQSuswqLa7S4rPAOZFVjHIG3wJIjZcZrw8JDEDJuIs= +cloud.google.com/go/datastream v1.7.0/go.mod h1:uxVRMm2elUSPuh65IbZpzJNMbuzkcvu5CjMqVIUHrww= cloud.google.com/go/deploy v1.4.0/go.mod h1:5Xghikd4VrmMLNaF6FiRFDlHb59VM59YoDQnOUdsH/c= cloud.google.com/go/deploy v1.5.0/go.mod h1:ffgdD0B89tToyW/U/D2eL0jN2+IEV/3EMuXHA0l4r+s= +cloud.google.com/go/deploy v1.6.0/go.mod h1:f9PTHehG/DjCom3QH0cntOVRm93uGBDt2vKzAPwpXQI= +cloud.google.com/go/deploy v1.8.0/go.mod h1:z3myEJnA/2wnB4sgjqdMfgxCA0EqC3RBTNcVPs93mtQ= cloud.google.com/go/dialogflow v1.15.0/go.mod h1:HbHDWs33WOGJgn6rfzBW1Kv807BE3O1+xGbn59zZWI4= cloud.google.com/go/dialogflow v1.16.1/go.mod h1:po6LlzGfK+smoSmTBnbkIZY2w8ffjz/RcGSS+sh1el0= cloud.google.com/go/dialogflow v1.17.0/go.mod h1:YNP09C/kXA1aZdBgC/VtXX74G/TKn7XVCcVumTflA+8= cloud.google.com/go/dialogflow v1.18.0/go.mod h1:trO7Zu5YdyEuR+BhSNOqJezyFQ3aUzz0njv7sMx/iek= cloud.google.com/go/dialogflow v1.19.0/go.mod h1:JVmlG1TwykZDtxtTXujec4tQ+D8SBFMoosgy+6Gn0s0= +cloud.google.com/go/dialogflow v1.29.0/go.mod h1:b+2bzMe+k1s9V+F2jbJwpHPzrnIyHihAdRFMtn2WXuM= +cloud.google.com/go/dialogflow v1.31.0/go.mod h1:cuoUccuL1Z+HADhyIA7dci3N5zUssgpBJmCzI6fNRB4= +cloud.google.com/go/dialogflow v1.32.0/go.mod h1:jG9TRJl8CKrDhMEcvfcfFkkpp8ZhgPz3sBGmAUYJ2qE= cloud.google.com/go/dlp v1.6.0/go.mod h1:9eyB2xIhpU0sVwUixfBubDoRwP+GjeUoxxeueZmqvmM= cloud.google.com/go/dlp v1.7.0/go.mod h1:68ak9vCiMBjbasxeVD17hVPxDEck+ExiHavX8kiHG+Q= +cloud.google.com/go/dlp v1.9.0/go.mod h1:qdgmqgTyReTz5/YNSSuueR8pl7hO0o9bQ39ZhtgkWp4= cloud.google.com/go/documentai v1.7.0/go.mod h1:lJvftZB5NRiFSX4moiye1SMxHx0Bc3x1+p9e/RfXYiU= cloud.google.com/go/documentai v1.8.0/go.mod h1:xGHNEB7CtsnySCNrCFdCyyMz44RhFEEX2Q7UD0c5IhU= cloud.google.com/go/documentai v1.9.0/go.mod h1:FS5485S8R00U10GhgBC0aNGrJxBP8ZVpEeJ7PQDZd6k= cloud.google.com/go/documentai v1.10.0/go.mod h1:vod47hKQIPeCfN2QS/jULIvQTugbmdc0ZvxxfQY1bg4= +cloud.google.com/go/documentai v1.16.0/go.mod h1:o0o0DLTEZ+YnJZ+J4wNfTxmDVyrkzFvttBXXtYRMHkM= +cloud.google.com/go/documentai v1.18.0/go.mod h1:F6CK6iUH8J81FehpskRmhLq/3VlwQvb7TvwOceQ2tbs= cloud.google.com/go/domains v0.6.0/go.mod h1:T9Rz3GasrpYk6mEGHh4rymIhjlnIuB4ofT1wTxDeT4Y= cloud.google.com/go/domains v0.7.0/go.mod h1:PtZeqS1xjnXuRPKE/88Iru/LdfoRyEHYA9nFQf4UKpg= +cloud.google.com/go/domains v0.8.0/go.mod h1:M9i3MMDzGFXsydri9/vW+EWz9sWb4I6WyHqdlAk0idE= cloud.google.com/go/edgecontainer v0.1.0/go.mod h1:WgkZ9tp10bFxqO8BLPqv2LlfmQF1X8lZqwW4r1BTajk= cloud.google.com/go/edgecontainer v0.2.0/go.mod h1:RTmLijy+lGpQ7BXuTDa4C4ssxyXT34NIuHIgKuP4s5w= +cloud.google.com/go/edgecontainer v0.3.0/go.mod h1:FLDpP4nykgwwIfcLt6zInhprzw0lEi2P1fjO6Ie0qbc= +cloud.google.com/go/edgecontainer v1.0.0/go.mod h1:cttArqZpBB2q58W/upSG++ooo6EsblxDIolxa3jSjbY= cloud.google.com/go/errorreporting v0.3.0/go.mod h1:xsP2yaAp+OAW4OIm60An2bbLpqIhKXdWR/tawvl7QzU= cloud.google.com/go/essentialcontacts v1.3.0/go.mod h1:r+OnHa5jfj90qIfZDO/VztSFqbQan7HV75p8sA+mdGI= cloud.google.com/go/essentialcontacts v1.4.0/go.mod h1:8tRldvHYsmnBCHdFpvU+GL75oWiBKl80BiqlFh9tp+8= +cloud.google.com/go/essentialcontacts v1.5.0/go.mod h1:ay29Z4zODTuwliK7SnX8E86aUF2CTzdNtvv42niCX0M= cloud.google.com/go/eventarc v1.7.0/go.mod h1:6ctpF3zTnaQCxUjHUdcfgcA1A2T309+omHZth7gDfmc= cloud.google.com/go/eventarc v1.8.0/go.mod h1:imbzxkyAU4ubfsaKYdQg04WS1NvncblHEup4kvF+4gw= +cloud.google.com/go/eventarc v1.10.0/go.mod h1:u3R35tmZ9HvswGRBnF48IlYgYeBcPUCjkr4BTdem2Kw= +cloud.google.com/go/eventarc v1.11.0/go.mod h1:PyUjsUKPWoRBCHeOxZd/lbOOjahV41icXyUY5kSTvVY= cloud.google.com/go/filestore v1.3.0/go.mod h1:+qbvHGvXU1HaKX2nD0WEPo92TP/8AQuCVEBXNY9z0+w= cloud.google.com/go/filestore v1.4.0/go.mod h1:PaG5oDfo9r224f8OYXURtAsY+Fbyq/bLYoINEK8XQAI= +cloud.google.com/go/filestore v1.5.0/go.mod h1:FqBXDWBp4YLHqRnVGveOkHDf8svj9r5+mUDLupOWEDs= +cloud.google.com/go/filestore v1.6.0/go.mod h1:di5unNuss/qfZTw2U9nhFqo8/ZDSc466dre85Kydllg= cloud.google.com/go/firestore v1.9.0/go.mod h1:HMkjKHNTtRyZNiMzu7YAsLr9K3X2udY2AMwDaMEQiiE= cloud.google.com/go/functions v1.6.0/go.mod h1:3H1UA3qiIPRWD7PeZKLvHZ9SaQhR26XIJcC0A5GbvAk= cloud.google.com/go/functions v1.7.0/go.mod h1:+d+QBcWM+RsrgZfV9xo6KfA1GlzJfxcfZcRPEhDDfzg= cloud.google.com/go/functions v1.8.0/go.mod h1:RTZ4/HsQjIqIYP9a9YPbU+QFoQsAlYgrwOXJWHn1POY= cloud.google.com/go/functions v1.9.0/go.mod h1:Y+Dz8yGguzO3PpIjhLTbnqV1CWmgQ5UwtlpzoyquQ08= +cloud.google.com/go/functions v1.10.0/go.mod h1:0D3hEOe3DbEvCXtYOZHQZmD+SzYsi1YbI7dGvHfldXw= +cloud.google.com/go/functions v1.12.0/go.mod h1:AXWGrF3e2C/5ehvwYo/GH6O5s09tOPksiKhz+hH8WkA= +cloud.google.com/go/functions v1.13.0/go.mod h1:EU4O007sQm6Ef/PwRsI8N2umygGqPBS/IZQKBQBcJ3c= cloud.google.com/go/gaming v1.5.0/go.mod h1:ol7rGcxP/qHTRQE/RO4bxkXq+Fix0j6D4LFPzYTIrDM= cloud.google.com/go/gaming v1.6.0/go.mod h1:YMU1GEvA39Qt3zWGyAVA9bpYz/yAhTvaQ1t2sK4KPUA= cloud.google.com/go/gaming v1.7.0/go.mod h1:LrB8U7MHdGgFG851iHAfqUdLcKBdQ55hzXy9xBJz0+w= cloud.google.com/go/gaming v1.8.0/go.mod h1:xAqjS8b7jAVW0KFYeRUxngo9My3f33kFmua++Pi+ggM= +cloud.google.com/go/gaming v1.9.0/go.mod h1:Fc7kEmCObylSWLO334NcO+O9QMDyz+TKC4v1D7X+Bc0= cloud.google.com/go/gkebackup v0.2.0/go.mod h1:XKvv/4LfG829/B8B7xRkk8zRrOEbKtEam6yNfuQNH60= cloud.google.com/go/gkebackup v0.3.0/go.mod h1:n/E671i1aOQvUxT541aTkCwExO/bTer2HDlj4TsBRAo= +cloud.google.com/go/gkebackup v0.4.0/go.mod h1:byAyBGUwYGEEww7xsbnUTBHIYcOPy/PgUWUtOeRm9Vg= cloud.google.com/go/gkeconnect v0.5.0/go.mod h1:c5lsNAg5EwAy7fkqX/+goqFsU1Da/jQFqArp+wGNr/o= cloud.google.com/go/gkeconnect v0.6.0/go.mod h1:Mln67KyU/sHJEBY8kFZ0xTeyPtzbq9StAVvEULYK16A= +cloud.google.com/go/gkeconnect v0.7.0/go.mod h1:SNfmVqPkaEi3bF/B3CNZOAYPYdg7sU+obZ+QTky2Myw= cloud.google.com/go/gkehub v0.9.0/go.mod h1:WYHN6WG8w9bXU0hqNxt8rm5uxnk8IH+lPY9J2TV7BK0= cloud.google.com/go/gkehub v0.10.0/go.mod h1:UIPwxI0DsrpsVoWpLB0stwKCP+WFVG9+y977wO+hBH0= +cloud.google.com/go/gkehub v0.11.0/go.mod h1:JOWHlmN+GHyIbuWQPl47/C2RFhnFKH38jH9Ascu3n0E= +cloud.google.com/go/gkehub v0.12.0/go.mod h1:djiIwwzTTBrF5NaXCGv3mf7klpEMcST17VBTVVDcuaw= cloud.google.com/go/gkemulticloud v0.3.0/go.mod h1:7orzy7O0S+5kq95e4Hpn7RysVA7dPs8W/GgfUtsPbrA= cloud.google.com/go/gkemulticloud v0.4.0/go.mod h1:E9gxVBnseLWCk24ch+P9+B2CoDFJZTyIgLKSalC7tuI= +cloud.google.com/go/gkemulticloud v0.5.0/go.mod h1:W0JDkiyi3Tqh0TJr//y19wyb1yf8llHVto2Htf2Ja3Y= cloud.google.com/go/grafeas v0.2.0/go.mod h1:KhxgtF2hb0P191HlY5besjYm6MqTSTj3LSI+M+ByZHc= cloud.google.com/go/gsuiteaddons v1.3.0/go.mod h1:EUNK/J1lZEZO8yPtykKxLXI6JSVN2rg9bN8SXOa0bgM= cloud.google.com/go/gsuiteaddons v1.4.0/go.mod h1:rZK5I8hht7u7HxFQcFei0+AtfS9uSushomRlg+3ua1o= +cloud.google.com/go/gsuiteaddons v1.5.0/go.mod h1:TFCClYLd64Eaa12sFVmUyG62tk4mdIsI7pAnSXRkcFo= cloud.google.com/go/iam v0.1.0/go.mod h1:vcUNEa0pEm0qRVpmWepWaFMIAI8/hjB9mO8rNCJtF6c= cloud.google.com/go/iam v0.3.0/go.mod h1:XzJPvDayI+9zsASAFO68Hk07u3z+f+JrT2xXNdp4bnY= cloud.google.com/go/iam v0.5.0/go.mod h1:wPU9Vt0P4UmCux7mqtRu6jcpPAb74cP1fh50J3QpkUc= cloud.google.com/go/iam v0.6.0/go.mod h1:+1AH33ueBne5MzYccyMHtEKqLE4/kJOibtffMHDMFMc= cloud.google.com/go/iam v0.7.0/go.mod h1:H5Br8wRaDGNc8XP3keLc4unfUUZeyH3Sfl9XpQEYOeg= cloud.google.com/go/iam v0.8.0/go.mod h1:lga0/y3iH6CX7sYqypWJ33hf7kkfXJag67naqGESjkE= +cloud.google.com/go/iam v0.11.0/go.mod h1:9PiLDanza5D+oWFZiH1uG+RnRCfEGKoyl6yo4cgWZGY= +cloud.google.com/go/iam v0.12.0/go.mod h1:knyHGviacl11zrtZUoDuYpDgLjvr28sLQaG0YB2GYAY= +cloud.google.com/go/iam v0.13.0/go.mod h1:ljOg+rcNfzZ5d6f1nAUJ8ZIxOaZUVoS14bKCtaLZ/D0= cloud.google.com/go/iap v1.4.0/go.mod h1:RGFwRJdihTINIe4wZ2iCP0zF/qu18ZwyKxrhMhygBEc= cloud.google.com/go/iap v1.5.0/go.mod h1:UH/CGgKd4KyohZL5Pt0jSKE4m3FR51qg6FKQ/z/Ix9A= +cloud.google.com/go/iap v1.6.0/go.mod h1:NSuvI9C/j7UdjGjIde7t7HBz+QTwBcapPE07+sSRcLk= +cloud.google.com/go/iap v1.7.0/go.mod h1:beqQx56T9O1G1yNPph+spKpNibDlYIiIixiqsQXxLIo= +cloud.google.com/go/iap v1.7.1/go.mod h1:WapEwPc7ZxGt2jFGB/C/bm+hP0Y6NXzOYGjpPnmMS74= cloud.google.com/go/ids v1.1.0/go.mod h1:WIuwCaYVOzHIj2OhN9HAwvW+DBdmUAdcWlFxRl+KubM= cloud.google.com/go/ids v1.2.0/go.mod h1:5WXvp4n25S0rA/mQWAg1YEEBBq6/s+7ml1RDCW1IrcY= +cloud.google.com/go/ids v1.3.0/go.mod h1:JBdTYwANikFKaDP6LtW5JAi4gubs57SVNQjemdt6xV4= cloud.google.com/go/iot v1.3.0/go.mod h1:r7RGh2B61+B8oz0AGE+J72AhA0G7tdXItODWsaA2oLs= cloud.google.com/go/iot v1.4.0/go.mod h1:dIDxPOn0UvNDUMD8Ger7FIaTuvMkj+aGk94RPP0iV+g= +cloud.google.com/go/iot v1.5.0/go.mod h1:mpz5259PDl3XJthEmh9+ap0affn/MqNSP4My77Qql9o= +cloud.google.com/go/iot v1.6.0/go.mod h1:IqdAsmE2cTYYNO1Fvjfzo9po179rAtJeVGUvkLN3rLE= cloud.google.com/go/kms v1.4.0/go.mod h1:fajBHndQ+6ubNw6Ss2sSd+SWvjL26RNo/dr7uxsnnOA= cloud.google.com/go/kms v1.5.0/go.mod h1:QJS2YY0eJGBg3mnDfuaCyLauWwBJiHRboYxJ++1xJNg= cloud.google.com/go/kms v1.6.0/go.mod h1:Jjy850yySiasBUDi6KFUwUv2n1+o7QZFyuUJg6OgjA0= +cloud.google.com/go/kms v1.8.0/go.mod h1:4xFEhYFqvW+4VMELtZyxomGSYtSQKzM178ylFW4jMAg= +cloud.google.com/go/kms v1.9.0/go.mod h1:qb1tPTgfF9RQP8e1wq4cLFErVuTJv7UsSC915J8dh3w= +cloud.google.com/go/kms v1.10.0/go.mod h1:ng3KTUtQQU9bPX3+QGLsflZIHlkbn8amFAMY63m8d24= +cloud.google.com/go/kms v1.10.1/go.mod h1:rIWk/TryCkR59GMC3YtHtXeLzd634lBbKenvyySAyYI= cloud.google.com/go/language v1.4.0/go.mod h1:F9dRpNFQmJbkaop6g0JhSBXCNlO90e1KWx5iDdxbWic= cloud.google.com/go/language v1.6.0/go.mod h1:6dJ8t3B+lUYfStgls25GusK04NLh3eDLQnWM3mdEbhI= cloud.google.com/go/language v1.7.0/go.mod h1:DJ6dYN/W+SQOjF8e1hLQXMF21AkH2w9wiPzPCJa2MIE= cloud.google.com/go/language v1.8.0/go.mod h1:qYPVHf7SPoNNiCL2Dr0FfEFNil1qi3pQEyygwpgVKB8= +cloud.google.com/go/language v1.9.0/go.mod h1:Ns15WooPM5Ad/5no/0n81yUetis74g3zrbeJBE+ptUY= cloud.google.com/go/lifesciences v0.5.0/go.mod h1:3oIKy8ycWGPUyZDR/8RNnTOYevhaMLqh5vLUXs9zvT8= cloud.google.com/go/lifesciences v0.6.0/go.mod h1:ddj6tSX/7BOnhxCSd3ZcETvtNr8NZ6t/iPhY2Tyfu08= +cloud.google.com/go/lifesciences v0.8.0/go.mod h1:lFxiEOMqII6XggGbOnKiyZ7IBwoIqA84ClvoezaA/bo= cloud.google.com/go/logging v1.6.1/go.mod h1:5ZO0mHHbvm8gEmeEUHrmDlTDSu5imF6MUP9OfilNXBw= +cloud.google.com/go/logging v1.7.0/go.mod h1:3xjP2CjkM3ZkO73aj4ASA5wRPGGCRrPIAeNqVNkzY8M= cloud.google.com/go/longrunning v0.1.1/go.mod h1:UUFxuDWkv22EuY93jjmDMFT5GPQKeFVJBIF6QlTqdsE= cloud.google.com/go/longrunning v0.3.0/go.mod h1:qth9Y41RRSUE69rDcOn6DdK3HfQfsUI0YSmW3iIlLJc= +cloud.google.com/go/longrunning v0.4.1/go.mod h1:4iWDqhBZ70CvZ6BfETbvam3T8FMvLK+eFj0E6AaRQTo= cloud.google.com/go/managedidentities v1.3.0/go.mod h1:UzlW3cBOiPrzucO5qWkNkh0w33KFtBJU281hacNvsdE= cloud.google.com/go/managedidentities v1.4.0/go.mod h1:NWSBYbEMgqmbZsLIyKvxrYbtqOsxY1ZrGM+9RgDqInM= +cloud.google.com/go/managedidentities v1.5.0/go.mod h1:+dWcZ0JlUmpuxpIDfyP5pP5y0bLdRwOS4Lp7gMni/LA= cloud.google.com/go/maps v0.1.0/go.mod h1:BQM97WGyfw9FWEmQMpZ5T6cpovXXSd1cGmFma94eubI= +cloud.google.com/go/maps v0.6.0/go.mod h1:o6DAMMfb+aINHz/p/jbcY+mYeXBoZoxTfdSQ8VAJaCw= +cloud.google.com/go/maps v0.7.0/go.mod h1:3GnvVl3cqeSvgMcpRlQidXsPYuDGQ8naBis7MVzpXsY= cloud.google.com/go/mediatranslation v0.5.0/go.mod h1:jGPUhGTybqsPQn91pNXw0xVHfuJ3leR1wj37oU3y1f4= cloud.google.com/go/mediatranslation v0.6.0/go.mod h1:hHdBCTYNigsBxshbznuIMFNe5QXEowAuNmmC7h8pu5w= +cloud.google.com/go/mediatranslation v0.7.0/go.mod h1:LCnB/gZr90ONOIQLgSXagp8XUW1ODs2UmUMvcgMfI2I= cloud.google.com/go/memcache v1.4.0/go.mod h1:rTOfiGZtJX1AaFUrOgsMHX5kAzaTQ8azHiuDoTPzNsE= cloud.google.com/go/memcache v1.5.0/go.mod h1:dk3fCK7dVo0cUU2c36jKb4VqKPS22BTkf81Xq617aWM= cloud.google.com/go/memcache v1.6.0/go.mod h1:XS5xB0eQZdHtTuTF9Hf8eJkKtR3pVRCcvJwtm68T3rA= cloud.google.com/go/memcache v1.7.0/go.mod h1:ywMKfjWhNtkQTxrWxCkCFkoPjLHPW6A7WOTVI8xy3LY= +cloud.google.com/go/memcache v1.9.0/go.mod h1:8oEyzXCu+zo9RzlEaEjHl4KkgjlNDaXbCQeQWlzNFJM= cloud.google.com/go/metastore v1.5.0/go.mod h1:2ZNrDcQwghfdtCwJ33nM0+GrBGlVuh8rakL3vdPY3XY= cloud.google.com/go/metastore v1.6.0/go.mod h1:6cyQTls8CWXzk45G55x57DVQ9gWg7RiH65+YgPsNh9s= cloud.google.com/go/metastore v1.7.0/go.mod h1:s45D0B4IlsINu87/AsWiEVYbLaIMeUSoxlKKDqBGFS8= cloud.google.com/go/metastore v1.8.0/go.mod h1:zHiMc4ZUpBiM7twCIFQmJ9JMEkDSyZS9U12uf7wHqSI= +cloud.google.com/go/metastore v1.10.0/go.mod h1:fPEnH3g4JJAk+gMRnrAnoqyv2lpUCqJPWOodSaf45Eo= cloud.google.com/go/monitoring v1.7.0/go.mod h1:HpYse6kkGo//7p6sT0wsIC6IBDET0RhIsnmlA53dvEk= cloud.google.com/go/monitoring v1.8.0/go.mod h1:E7PtoMJ1kQXWxPjB6mv2fhC5/15jInuulFdYYtlcvT4= +cloud.google.com/go/monitoring v1.12.0/go.mod h1:yx8Jj2fZNEkL/GYZyTLS4ZtZEZN8WtDEiEqG4kLK50w= +cloud.google.com/go/monitoring v1.13.0/go.mod h1:k2yMBAB1H9JT/QETjNkgdCGD9bPF712XiLTVr+cBrpw= cloud.google.com/go/networkconnectivity v1.4.0/go.mod h1:nOl7YL8odKyAOtzNX73/M5/mGZgqqMeryi6UPZTk/rA= cloud.google.com/go/networkconnectivity v1.5.0/go.mod h1:3GzqJx7uhtlM3kln0+x5wyFvuVH1pIBJjhCpjzSt75o= cloud.google.com/go/networkconnectivity v1.6.0/go.mod h1:OJOoEXW+0LAxHh89nXd64uGG+FbQoeH8DtxCHVOMlaM= cloud.google.com/go/networkconnectivity v1.7.0/go.mod h1:RMuSbkdbPwNMQjB5HBWD5MpTBnNm39iAVpC3TmsExt8= +cloud.google.com/go/networkconnectivity v1.10.0/go.mod h1:UP4O4sWXJG13AqrTdQCD9TnLGEbtNRqjuaaA7bNjF5E= +cloud.google.com/go/networkconnectivity v1.11.0/go.mod h1:iWmDD4QF16VCDLXUqvyspJjIEtBR/4zq5hwnY2X3scM= cloud.google.com/go/networkmanagement v1.4.0/go.mod h1:Q9mdLLRn60AsOrPc8rs8iNV6OHXaGcDdsIQe1ohekq8= cloud.google.com/go/networkmanagement v1.5.0/go.mod h1:ZnOeZ/evzUdUsnvRt792H0uYEnHQEMaz+REhhzJRcf4= +cloud.google.com/go/networkmanagement v1.6.0/go.mod h1:5pKPqyXjB/sgtvB5xqOemumoQNB7y95Q7S+4rjSOPYY= cloud.google.com/go/networksecurity v0.5.0/go.mod h1:xS6fOCoqpVC5zx15Z/MqkfDwH4+m/61A3ODiDV1xmiQ= cloud.google.com/go/networksecurity v0.6.0/go.mod h1:Q5fjhTr9WMI5mbpRYEbiexTzROf7ZbDzvzCrNl14nyU= +cloud.google.com/go/networksecurity v0.7.0/go.mod h1:mAnzoxx/8TBSyXEeESMy9OOYwo1v+gZ5eMRnsT5bC8k= +cloud.google.com/go/networksecurity v0.8.0/go.mod h1:B78DkqsxFG5zRSVuwYFRZ9Xz8IcQ5iECsNrPn74hKHU= cloud.google.com/go/notebooks v1.2.0/go.mod h1:9+wtppMfVPUeJ8fIWPOq1UnATHISkGXGqTkxeieQ6UY= cloud.google.com/go/notebooks v1.3.0/go.mod h1:bFR5lj07DtCPC7YAAJ//vHskFBxA5JzYlH68kXVdk34= cloud.google.com/go/notebooks v1.4.0/go.mod h1:4QPMngcwmgb6uw7Po99B2xv5ufVoIQ7nOGDyL4P8AgA= cloud.google.com/go/notebooks v1.5.0/go.mod h1:q8mwhnP9aR8Hpfnrc5iN5IBhrXUy8S2vuYs+kBJ/gu0= +cloud.google.com/go/notebooks v1.7.0/go.mod h1:PVlaDGfJgj1fl1S3dUwhFMXFgfYGhYQt2164xOMONmE= +cloud.google.com/go/notebooks v1.8.0/go.mod h1:Lq6dYKOYOWUCTvw5t2q1gp1lAp0zxAxRycayS0iJcqQ= cloud.google.com/go/optimization v1.1.0/go.mod h1:5po+wfvX5AQlPznyVEZjGJTMr4+CAkJf2XSTQOOl9l4= cloud.google.com/go/optimization v1.2.0/go.mod h1:Lr7SOHdRDENsh+WXVmQhQTrzdu9ybg0NecjHidBq6xs= +cloud.google.com/go/optimization v1.3.1/go.mod h1:IvUSefKiwd1a5p0RgHDbWCIbDFgKuEdB+fPPuP0IDLI= cloud.google.com/go/orchestration v1.3.0/go.mod h1:Sj5tq/JpWiB//X/q3Ngwdl5K7B7Y0KZ7bfv0wL6fqVA= cloud.google.com/go/orchestration v1.4.0/go.mod h1:6W5NLFWs2TlniBphAViZEVhrXRSMgUGDfW7vrWKvsBk= +cloud.google.com/go/orchestration v1.6.0/go.mod h1:M62Bevp7pkxStDfFfTuCOaXgaaqRAga1yKyoMtEoWPQ= cloud.google.com/go/orgpolicy v1.4.0/go.mod h1:xrSLIV4RePWmP9P3tBl8S93lTmlAxjm06NSm2UTmKvE= cloud.google.com/go/orgpolicy v1.5.0/go.mod h1:hZEc5q3wzwXJaKrsx5+Ewg0u1LxJ51nNFlext7Tanwc= +cloud.google.com/go/orgpolicy v1.10.0/go.mod h1:w1fo8b7rRqlXlIJbVhOMPrwVljyuW5mqssvBtU18ONc= cloud.google.com/go/osconfig v1.7.0/go.mod h1:oVHeCeZELfJP7XLxcBGTMBvRO+1nQ5tFG9VQTmYS2Fs= cloud.google.com/go/osconfig v1.8.0/go.mod h1:EQqZLu5w5XA7eKizepumcvWx+m8mJUhEwiPqWiZeEdg= cloud.google.com/go/osconfig v1.9.0/go.mod h1:Yx+IeIZJ3bdWmzbQU4fxNl8xsZ4amB+dygAwFPlvnNo= cloud.google.com/go/osconfig v1.10.0/go.mod h1:uMhCzqC5I8zfD9zDEAfvgVhDS8oIjySWh+l4WK6GnWw= +cloud.google.com/go/osconfig v1.11.0/go.mod h1:aDICxrur2ogRd9zY5ytBLV89KEgT2MKB2L/n6x1ooPw= cloud.google.com/go/oslogin v1.4.0/go.mod h1:YdgMXWRaElXz/lDk1Na6Fh5orF7gvmJ0FGLIs9LId4E= cloud.google.com/go/oslogin v1.5.0/go.mod h1:D260Qj11W2qx/HVF29zBg+0fd6YCSjSqLUkY/qEenQU= cloud.google.com/go/oslogin v1.6.0/go.mod h1:zOJ1O3+dTU8WPlGEkFSh7qeHPPSoxrcMbbK1Nm2iX70= cloud.google.com/go/oslogin v1.7.0/go.mod h1:e04SN0xO1UNJ1M5GP0vzVBFicIe4O53FOfcixIqTyXo= +cloud.google.com/go/oslogin v1.9.0/go.mod h1:HNavntnH8nzrn8JCTT5fj18FuJLFJc4NaZJtBnQtKFs= cloud.google.com/go/phishingprotection v0.5.0/go.mod h1:Y3HZknsK9bc9dMi+oE8Bim0lczMU6hrX0UpADuMefr0= cloud.google.com/go/phishingprotection v0.6.0/go.mod h1:9Y3LBLgy0kDTcYET8ZH3bq/7qni15yVUoAxiFxnlSUA= +cloud.google.com/go/phishingprotection v0.7.0/go.mod h1:8qJI4QKHoda/sb/7/YmMQ2omRLSLYSu9bU0EKCNI+Lk= cloud.google.com/go/policytroubleshooter v1.3.0/go.mod h1:qy0+VwANja+kKrjlQuOzmlvscn4RNsAc0e15GGqfMxg= cloud.google.com/go/policytroubleshooter v1.4.0/go.mod h1:DZT4BcRw3QoO8ota9xw/LKtPa8lKeCByYeKTIf/vxdE= +cloud.google.com/go/policytroubleshooter v1.5.0/go.mod h1:Rz1WfV+1oIpPdN2VvvuboLVRsB1Hclg3CKQ53j9l8vw= +cloud.google.com/go/policytroubleshooter v1.6.0/go.mod h1:zYqaPTsmfvpjm5ULxAyD/lINQxJ0DDsnWOP/GZ7xzBc= cloud.google.com/go/privatecatalog v0.5.0/go.mod h1:XgosMUvvPyxDjAVNDYxJ7wBW8//hLDDYmnsNcMGq1K0= cloud.google.com/go/privatecatalog v0.6.0/go.mod h1:i/fbkZR0hLN29eEWiiwue8Pb+GforiEIBnV9yrRUOKI= +cloud.google.com/go/privatecatalog v0.7.0/go.mod h1:2s5ssIFO69F5csTXcwBP7NPFTZvps26xGzvQ2PQaBYg= +cloud.google.com/go/privatecatalog v0.8.0/go.mod h1:nQ6pfaegeDAq/Q5lrfCQzQLhubPiZhSaNhIgfJlnIXs= cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= cloud.google.com/go/pubsub v1.26.0/go.mod h1:QgBH3U/jdJy/ftjPhTkyXNj543Tin1pRYcdcPRnFIRI= cloud.google.com/go/pubsub v1.27.1/go.mod h1:hQN39ymbV9geqBnfQq6Xf63yNhUAhv9CZhzp5O6qsW0= +cloud.google.com/go/pubsub v1.28.0/go.mod h1:vuXFpwaVoIPQMGXqRyUQigu/AX1S3IWugR9xznmcXX8= +cloud.google.com/go/pubsub v1.30.0/go.mod h1:qWi1OPS0B+b5L+Sg6Gmc9zD1Y+HaM0MdUr7LsupY1P4= cloud.google.com/go/pubsublite v1.5.0/go.mod h1:xapqNQ1CuLfGi23Yda/9l4bBCKz/wC3KIJ5gKcxveZg= +cloud.google.com/go/pubsublite v1.6.0/go.mod h1:1eFCS0U11xlOuMFV/0iBqw3zP12kddMeCbj/F3FSj9k= +cloud.google.com/go/pubsublite v1.7.0/go.mod h1:8hVMwRXfDfvGm3fahVbtDbiLePT3gpoiJYJY+vxWxVM= cloud.google.com/go/recaptchaenterprise v1.3.1/go.mod h1:OdD+q+y4XGeAlxRaMn1Y7/GveP6zmq76byL6tjPE7d4= cloud.google.com/go/recaptchaenterprise/v2 v2.1.0/go.mod h1:w9yVqajwroDNTfGuhmOjPDN//rZGySaf6PtFVcSCa7o= cloud.google.com/go/recaptchaenterprise/v2 v2.2.0/go.mod h1:/Zu5jisWGeERrd5HnlS3EUGb/D335f9k51B/FVil0jk= cloud.google.com/go/recaptchaenterprise/v2 v2.3.0/go.mod h1:O9LwGCjrhGHBQET5CA7dd5NwwNQUErSgEDit1DLNTdo= cloud.google.com/go/recaptchaenterprise/v2 v2.4.0/go.mod h1:Am3LHfOuBstrLrNCBrlI5sbwx9LBg3te2N6hGvHn2mE= cloud.google.com/go/recaptchaenterprise/v2 v2.5.0/go.mod h1:O8LzcHXN3rz0j+LBC91jrwI3R+1ZSZEWrfL7XHgNo9U= +cloud.google.com/go/recaptchaenterprise/v2 v2.6.0/go.mod h1:RPauz9jeLtB3JVzg6nCbe12qNoaa8pXc4d/YukAmcnA= +cloud.google.com/go/recaptchaenterprise/v2 v2.7.0/go.mod h1:19wVj/fs5RtYtynAPJdDTb69oW0vNHYDBTbB4NvMD9c= cloud.google.com/go/recommendationengine v0.5.0/go.mod h1:E5756pJcVFeVgaQv3WNpImkFP8a+RptV6dDLGPILjvg= cloud.google.com/go/recommendationengine v0.6.0/go.mod h1:08mq2umu9oIqc7tDy8sx+MNJdLG0fUi3vaSVbztHgJ4= +cloud.google.com/go/recommendationengine v0.7.0/go.mod h1:1reUcE3GIu6MeBz/h5xZJqNLuuVjNg1lmWMPyjatzac= cloud.google.com/go/recommender v1.5.0/go.mod h1:jdoeiBIVrJe9gQjwd759ecLJbxCDED4A6p+mqoqDvTg= cloud.google.com/go/recommender v1.6.0/go.mod h1:+yETpm25mcoiECKh9DEScGzIRyDKpZ0cEhWGo+8bo+c= cloud.google.com/go/recommender v1.7.0/go.mod h1:XLHs/W+T8olwlGOgfQenXBTbIseGclClff6lhFVe9Bs= cloud.google.com/go/recommender v1.8.0/go.mod h1:PkjXrTT05BFKwxaUxQmtIlrtj0kph108r02ZZQ5FE70= +cloud.google.com/go/recommender v1.9.0/go.mod h1:PnSsnZY7q+VL1uax2JWkt/UegHssxjUVVCrX52CuEmQ= cloud.google.com/go/redis v1.7.0/go.mod h1:V3x5Jq1jzUcg+UNsRvdmsfuFnit1cfe3Z/PGyq/lm4Y= cloud.google.com/go/redis v1.8.0/go.mod h1:Fm2szCDavWzBk2cDKxrkmWBqoCiL1+Ctwq7EyqBCA/A= cloud.google.com/go/redis v1.9.0/go.mod h1:HMYQuajvb2D0LvMgZmLDZW8V5aOC/WxstZHiy4g8OiA= cloud.google.com/go/redis v1.10.0/go.mod h1:ThJf3mMBQtW18JzGgh41/Wld6vnDDc/F/F35UolRZPM= +cloud.google.com/go/redis v1.11.0/go.mod h1:/X6eicana+BWcUda5PpwZC48o37SiFVTFSs0fWAJ7uQ= cloud.google.com/go/resourcemanager v1.3.0/go.mod h1:bAtrTjZQFJkiWTPDb1WBjzvc6/kifjj4QBYuKCCoqKA= cloud.google.com/go/resourcemanager v1.4.0/go.mod h1:MwxuzkumyTX7/a3n37gmsT3py7LIXwrShilPh3P1tR0= +cloud.google.com/go/resourcemanager v1.5.0/go.mod h1:eQoXNAiAvCf5PXxWxXjhKQoTMaUSNrEfg+6qdf/wots= +cloud.google.com/go/resourcemanager v1.6.0/go.mod h1:YcpXGRs8fDzcUl1Xw8uOVmI8JEadvhRIkoXXUNVYcVo= +cloud.google.com/go/resourcemanager v1.7.0/go.mod h1:HlD3m6+bwhzj9XCouqmeiGuni95NTrExfhoSrkC/3EI= cloud.google.com/go/resourcesettings v1.3.0/go.mod h1:lzew8VfESA5DQ8gdlHwMrqZs1S9V87v3oCnKCWoOuQU= cloud.google.com/go/resourcesettings v1.4.0/go.mod h1:ldiH9IJpcrlC3VSuCGvjR5of/ezRrOxFtpJoJo5SmXg= +cloud.google.com/go/resourcesettings v1.5.0/go.mod h1:+xJF7QSG6undsQDfsCJyqWXyBwUoJLhetkRMDRnIoXA= cloud.google.com/go/retail v1.8.0/go.mod h1:QblKS8waDmNUhghY2TI9O3JLlFk8jybHeV4BF19FrE4= cloud.google.com/go/retail v1.9.0/go.mod h1:g6jb6mKuCS1QKnH/dpu7isX253absFl6iE92nHwlBUY= cloud.google.com/go/retail v1.10.0/go.mod h1:2gDk9HsL4HMS4oZwz6daui2/jmKvqShXKQuB2RZ+cCc= cloud.google.com/go/retail v1.11.0/go.mod h1:MBLk1NaWPmh6iVFSz9MeKG/Psyd7TAgm6y/9L2B4x9Y= +cloud.google.com/go/retail v1.12.0/go.mod h1:UMkelN/0Z8XvKymXFbD4EhFJlYKRx1FGhQkVPU5kF14= cloud.google.com/go/run v0.2.0/go.mod h1:CNtKsTA1sDcnqqIFR3Pb5Tq0usWxJJvsWOCPldRU3Do= cloud.google.com/go/run v0.3.0/go.mod h1:TuyY1+taHxTjrD0ZFk2iAR+xyOXEA0ztb7U3UNA0zBo= +cloud.google.com/go/run v0.8.0/go.mod h1:VniEnuBwqjigv0A7ONfQUaEItaiCRVujlMqerPPiktM= +cloud.google.com/go/run v0.9.0/go.mod h1:Wwu+/vvg8Y+JUApMwEDfVfhetv30hCG4ZwDR/IXl2Qg= cloud.google.com/go/scheduler v1.4.0/go.mod h1:drcJBmxF3aqZJRhmkHQ9b3uSSpQoltBPGPxGAWROx6s= cloud.google.com/go/scheduler v1.5.0/go.mod h1:ri073ym49NW3AfT6DZi21vLZrG07GXr5p3H1KxN5QlI= cloud.google.com/go/scheduler v1.6.0/go.mod h1:SgeKVM7MIwPn3BqtcBntpLyrIJftQISRrYB5ZtT+KOk= cloud.google.com/go/scheduler v1.7.0/go.mod h1:jyCiBqWW956uBjjPMMuX09n3x37mtyPJegEWKxRsn44= +cloud.google.com/go/scheduler v1.8.0/go.mod h1:TCET+Y5Gp1YgHT8py4nlg2Sew8nUHMqcpousDgXJVQc= +cloud.google.com/go/scheduler v1.9.0/go.mod h1:yexg5t+KSmqu+njTIh3b7oYPheFtBWGcbVUYF1GGMIc= cloud.google.com/go/secretmanager v1.6.0/go.mod h1:awVa/OXF6IiyaU1wQ34inzQNc4ISIDIrId8qE5QGgKA= cloud.google.com/go/secretmanager v1.8.0/go.mod h1:hnVgi/bN5MYHd3Gt0SPuTPPp5ENina1/LxM+2W9U9J4= cloud.google.com/go/secretmanager v1.9.0/go.mod h1:b71qH2l1yHmWQHt9LC80akm86mX8AL6X1MA01dW8ht4= +cloud.google.com/go/secretmanager v1.10.0/go.mod h1:MfnrdvKMPNra9aZtQFvBcvRU54hbPD8/HayQdlUgJpU= cloud.google.com/go/security v1.5.0/go.mod h1:lgxGdyOKKjHL4YG3/YwIL2zLqMFCKs0UbQwgyZmfJl4= cloud.google.com/go/security v1.7.0/go.mod h1:mZklORHl6Bg7CNnnjLH//0UlAlaXqiG7Lb9PsPXLfD0= cloud.google.com/go/security v1.8.0/go.mod h1:hAQOwgmaHhztFhiQ41CjDODdWP0+AE1B3sX4OFlq+GU= cloud.google.com/go/security v1.9.0/go.mod h1:6Ta1bO8LXI89nZnmnsZGp9lVoVWXqsVbIq/t9dzI+2Q= cloud.google.com/go/security v1.10.0/go.mod h1:QtOMZByJVlibUT2h9afNDWRZ1G96gVywH8T5GUSb9IA= +cloud.google.com/go/security v1.12.0/go.mod h1:rV6EhrpbNHrrxqlvW0BWAIawFWq3X90SduMJdFwtLB8= +cloud.google.com/go/security v1.13.0/go.mod h1:Q1Nvxl1PAgmeW0y3HTt54JYIvUdtcpYKVfIB8AOMZ+0= cloud.google.com/go/securitycenter v1.13.0/go.mod h1:cv5qNAqjY84FCN6Y9z28WlkKXyWsgLO832YiWwkCWcU= cloud.google.com/go/securitycenter v1.14.0/go.mod h1:gZLAhtyKv85n52XYWt6RmeBdydyxfPeTrpToDPw4Auc= cloud.google.com/go/securitycenter v1.15.0/go.mod h1:PeKJ0t8MoFmmXLXWm41JidyzI3PJjd8sXWaVqg43WWk= cloud.google.com/go/securitycenter v1.16.0/go.mod h1:Q9GMaLQFUD+5ZTabrbujNWLtSLZIZF7SAR0wWECrjdk= +cloud.google.com/go/securitycenter v1.18.1/go.mod h1:0/25gAzCM/9OL9vVx4ChPeM/+DlfGQJDwBy/UC8AKK0= +cloud.google.com/go/securitycenter v1.19.0/go.mod h1:LVLmSg8ZkkyaNy4u7HCIshAngSQ8EcIRREP3xBnyfag= cloud.google.com/go/servicecontrol v1.4.0/go.mod h1:o0hUSJ1TXJAmi/7fLJAedOovnujSEvjKCAFNXPQ1RaU= cloud.google.com/go/servicecontrol v1.5.0/go.mod h1:qM0CnXHhyqKVuiZnGKrIurvVImCs8gmqWsDoqe9sU1s= +cloud.google.com/go/servicecontrol v1.10.0/go.mod h1:pQvyvSRh7YzUF2efw7H87V92mxU8FnFDawMClGCNuAA= +cloud.google.com/go/servicecontrol v1.11.0/go.mod h1:kFmTzYzTUIuZs0ycVqRHNaNhgR+UMUpw9n02l/pY+mc= +cloud.google.com/go/servicecontrol v1.11.1/go.mod h1:aSnNNlwEFBY+PWGQ2DoM0JJ/QUXqV5/ZD9DOLB7SnUk= cloud.google.com/go/servicedirectory v1.4.0/go.mod h1:gH1MUaZCgtP7qQiI+F+A+OpeKF/HQWgtAddhTbhL2bs= cloud.google.com/go/servicedirectory v1.5.0/go.mod h1:QMKFL0NUySbpZJ1UZs3oFAmdvVxhhxB6eJ/Vlp73dfg= cloud.google.com/go/servicedirectory v1.6.0/go.mod h1:pUlbnWsLH9c13yGkxCmfumWEPjsRs1RlmJ4pqiNjVL4= cloud.google.com/go/servicedirectory v1.7.0/go.mod h1:5p/U5oyvgYGYejufvxhgwjL8UVXjkuw7q5XcG10wx1U= +cloud.google.com/go/servicedirectory v1.8.0/go.mod h1:srXodfhY1GFIPvltunswqXpVxFPpZjf8nkKQT7XcXaY= +cloud.google.com/go/servicedirectory v1.9.0/go.mod h1:29je5JjiygNYlmsGz8k6o+OZ8vd4f//bQLtvzkPPT/s= cloud.google.com/go/servicemanagement v1.4.0/go.mod h1:d8t8MDbezI7Z2R1O/wu8oTggo3BI2GKYbdG4y/SJTco= cloud.google.com/go/servicemanagement v1.5.0/go.mod h1:XGaCRe57kfqu4+lRxaFEAuqmjzF0r+gWHjWqKqBvKFo= +cloud.google.com/go/servicemanagement v1.6.0/go.mod h1:aWns7EeeCOtGEX4OvZUWCCJONRZeFKiptqKf1D0l/Jc= +cloud.google.com/go/servicemanagement v1.8.0/go.mod h1:MSS2TDlIEQD/fzsSGfCdJItQveu9NXnUniTrq/L8LK4= cloud.google.com/go/serviceusage v1.3.0/go.mod h1:Hya1cozXM4SeSKTAgGXgj97GlqUvF5JaoXacR1JTP/E= cloud.google.com/go/serviceusage v1.4.0/go.mod h1:SB4yxXSaYVuUBYUml6qklyONXNLt83U0Rb+CXyhjEeU= +cloud.google.com/go/serviceusage v1.5.0/go.mod h1:w8U1JvqUqwJNPEOTQjrMHkw3IaIFLoLsPLvsE3xueec= +cloud.google.com/go/serviceusage v1.6.0/go.mod h1:R5wwQcbOWsyuOfbP9tGdAnCAc6B9DRwPG1xtWMDeuPA= cloud.google.com/go/shell v1.3.0/go.mod h1:VZ9HmRjZBsjLGXusm7K5Q5lzzByZmJHf1d0IWHEN5X4= cloud.google.com/go/shell v1.4.0/go.mod h1:HDxPzZf3GkDdhExzD/gs8Grqk+dmYcEjGShZgYa9URw= +cloud.google.com/go/shell v1.6.0/go.mod h1:oHO8QACS90luWgxP3N9iZVuEiSF84zNyLytb+qE2f9A= cloud.google.com/go/spanner v1.41.0/go.mod h1:MLYDBJR/dY4Wt7ZaMIQ7rXOTLjYrmxLE/5ve9vFfWos= +cloud.google.com/go/spanner v1.44.0/go.mod h1:G8XIgYdOK+Fbcpbs7p2fiprDw4CaZX63whnSMLVBxjk= +cloud.google.com/go/spanner v1.45.0/go.mod h1:FIws5LowYz8YAE1J8fOS7DJup8ff7xJeetWEo5REA2M= cloud.google.com/go/speech v1.6.0/go.mod h1:79tcr4FHCimOp56lwC01xnt/WPJZc4v3gzyT7FoBkCM= cloud.google.com/go/speech v1.7.0/go.mod h1:KptqL+BAQIhMsj1kOP2la5DSEEerPDuOP/2mmkhHhZQ= cloud.google.com/go/speech v1.8.0/go.mod h1:9bYIl1/tjsAnMgKGHKmBZzXKEkGgtU+MpdDPTE9f7y0= cloud.google.com/go/speech v1.9.0/go.mod h1:xQ0jTcmnRFFM2RfX/U+rk6FQNUF6DQlydUSyoooSpco= +cloud.google.com/go/speech v1.14.1/go.mod h1:gEosVRPJ9waG7zqqnsHpYTOoAS4KouMRLDFMekpJ0J0= +cloud.google.com/go/speech v1.15.0/go.mod h1:y6oH7GhqCaZANH7+Oe0BhgIogsNInLlz542tg3VqeYI= cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= @@ -344,49 +525,77 @@ cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3f cloud.google.com/go/storage v1.22.1/go.mod h1:S8N1cAStu7BOeFfE8KAQzmyyLkK8p/vmRq6kuBTW58Y= cloud.google.com/go/storage v1.23.0/go.mod h1:vOEEDNFnciUMhBeT6hsJIn3ieU5cFRmzeLgDvXzfIXc= cloud.google.com/go/storage v1.27.0/go.mod h1:x9DOL8TK/ygDUMieqwfhdpQryTeEkhGKMi80i/iqR2s= +cloud.google.com/go/storage v1.28.1/go.mod h1:Qnisd4CqDdo6BGs2AD5LLnEsmSQ80wQ5ogcBBKhU86Y= +cloud.google.com/go/storage v1.29.0/go.mod h1:4puEjyTKnku6gfKoTfNOU/W+a9JyuVNxjpS5GBrB8h4= cloud.google.com/go/storagetransfer v1.5.0/go.mod h1:dxNzUopWy7RQevYFHewchb29POFv3/AaBgnhqzqiK0w= cloud.google.com/go/storagetransfer v1.6.0/go.mod h1:y77xm4CQV/ZhFZH75PLEXY0ROiS7Gh6pSKrM8dJyg6I= +cloud.google.com/go/storagetransfer v1.7.0/go.mod h1:8Giuj1QNb1kfLAiWM1bN6dHzfdlDAVC9rv9abHot2W4= +cloud.google.com/go/storagetransfer v1.8.0/go.mod h1:JpegsHHU1eXg7lMHkvf+KE5XDJ7EQu0GwNJbbVGanEw= cloud.google.com/go/talent v1.1.0/go.mod h1:Vl4pt9jiHKvOgF9KoZo6Kob9oV4lwd/ZD5Cto54zDRw= cloud.google.com/go/talent v1.2.0/go.mod h1:MoNF9bhFQbiJ6eFD3uSsg0uBALw4n4gaCaEjBw9zo8g= cloud.google.com/go/talent v1.3.0/go.mod h1:CmcxwJ/PKfRgd1pBjQgU6W3YBwiewmUzQYH5HHmSCmM= cloud.google.com/go/talent v1.4.0/go.mod h1:ezFtAgVuRf8jRsvyE6EwmbTK5LKciD4KVnHuDEFmOOA= +cloud.google.com/go/talent v1.5.0/go.mod h1:G+ODMj9bsasAEJkQSzO2uHQWXHHXUomArjWQQYkqK6c= cloud.google.com/go/texttospeech v1.4.0/go.mod h1:FX8HQHA6sEpJ7rCMSfXuzBcysDAuWusNNNvN9FELDd8= cloud.google.com/go/texttospeech v1.5.0/go.mod h1:oKPLhR4n4ZdQqWKURdwxMy0uiTS1xU161C8W57Wkea4= +cloud.google.com/go/texttospeech v1.6.0/go.mod h1:YmwmFT8pj1aBblQOI3TfKmwibnsfvhIBzPXcW4EBovc= cloud.google.com/go/tpu v1.3.0/go.mod h1:aJIManG0o20tfDQlRIej44FcwGGl/cD0oiRyMKG19IQ= cloud.google.com/go/tpu v1.4.0/go.mod h1:mjZaX8p0VBgllCzF6wcU2ovUXN9TONFLd7iz227X2Xg= +cloud.google.com/go/tpu v1.5.0/go.mod h1:8zVo1rYDFuW2l4yZVY0R0fb/v44xLh3llq7RuV61fPM= cloud.google.com/go/trace v1.3.0/go.mod h1:FFUE83d9Ca57C+K8rDl/Ih8LwOzWIV1krKgxg6N0G28= cloud.google.com/go/trace v1.4.0/go.mod h1:UG0v8UBqzusp+z63o7FK74SdFE+AXpCLdFb1rshXG+Y= +cloud.google.com/go/trace v1.8.0/go.mod h1:zH7vcsbAhklH8hWFig58HvxcxyQbaIqMarMg9hn5ECA= +cloud.google.com/go/trace v1.9.0/go.mod h1:lOQqpE5IaWY0Ixg7/r2SjixMuc6lfTFeO4QGM4dQWOk= cloud.google.com/go/translate v1.3.0/go.mod h1:gzMUwRjvOqj5i69y/LYLd8RrNQk+hOmIXTi9+nb3Djs= cloud.google.com/go/translate v1.4.0/go.mod h1:06Dn/ppvLD6WvA5Rhdp029IX2Mi3Mn7fpMRLPvXT5Wg= +cloud.google.com/go/translate v1.5.0/go.mod h1:29YDSYveqqpA1CQFD7NQuP49xymq17RXNaUDdc0mNu0= +cloud.google.com/go/translate v1.6.0/go.mod h1:lMGRudH1pu7I3n3PETiOB2507gf3HnfLV8qlkHZEyos= +cloud.google.com/go/translate v1.7.0/go.mod h1:lMGRudH1pu7I3n3PETiOB2507gf3HnfLV8qlkHZEyos= cloud.google.com/go/video v1.8.0/go.mod h1:sTzKFc0bUSByE8Yoh8X0mn8bMymItVGPfTuUBUyRgxk= cloud.google.com/go/video v1.9.0/go.mod h1:0RhNKFRF5v92f8dQt0yhaHrEuH95m068JYOvLZYnJSw= +cloud.google.com/go/video v1.12.0/go.mod h1:MLQew95eTuaNDEGriQdcYn0dTwf9oWiA4uYebxM5kdg= +cloud.google.com/go/video v1.13.0/go.mod h1:ulzkYlYgCp15N2AokzKjy7MQ9ejuynOJdf1tR5lGthk= +cloud.google.com/go/video v1.14.0/go.mod h1:SkgaXwT+lIIAKqWAJfktHT/RbgjSuY6DobxEp0C5yTQ= +cloud.google.com/go/video v1.15.0/go.mod h1:SkgaXwT+lIIAKqWAJfktHT/RbgjSuY6DobxEp0C5yTQ= cloud.google.com/go/videointelligence v1.6.0/go.mod h1:w0DIDlVRKtwPCn/C4iwZIJdvC69yInhW0cfi+p546uU= cloud.google.com/go/videointelligence v1.7.0/go.mod h1:k8pI/1wAhjznARtVT9U1llUaFNPh7muw8QyOUpavru4= cloud.google.com/go/videointelligence v1.8.0/go.mod h1:dIcCn4gVDdS7yte/w+koiXn5dWVplOZkE+xwG9FgK+M= cloud.google.com/go/videointelligence v1.9.0/go.mod h1:29lVRMPDYHikk3v8EdPSaL8Ku+eMzDljjuvRs105XoU= +cloud.google.com/go/videointelligence v1.10.0/go.mod h1:LHZngX1liVtUhZvi2uNS0VQuOzNi2TkY1OakiuoUOjU= cloud.google.com/go/vision v1.2.0/go.mod h1:SmNwgObm5DpFBme2xpyOyasvBc1aPdjvMk2bBk0tKD0= cloud.google.com/go/vision/v2 v2.2.0/go.mod h1:uCdV4PpN1S0jyCyq8sIM42v2Y6zOLkZs+4R9LrGYwFo= cloud.google.com/go/vision/v2 v2.3.0/go.mod h1:UO61abBx9QRMFkNBbf1D8B1LXdS2cGiiCRx0vSpZoUo= cloud.google.com/go/vision/v2 v2.4.0/go.mod h1:VtI579ll9RpVTrdKdkMzckdnwMyX2JILb+MhPqRbPsY= cloud.google.com/go/vision/v2 v2.5.0/go.mod h1:MmaezXOOE+IWa+cS7OhRRLK2cNv1ZL98zhqFFZaaH2E= +cloud.google.com/go/vision/v2 v2.6.0/go.mod h1:158Hes0MvOS9Z/bDMSFpjwsUrZ5fPrdwuyyvKSGAGMY= +cloud.google.com/go/vision/v2 v2.7.0/go.mod h1:H89VysHy21avemp6xcf9b9JvZHVehWbET0uT/bcuY/0= cloud.google.com/go/vmmigration v1.2.0/go.mod h1:IRf0o7myyWFSmVR1ItrBSFLFD/rJkfDCUTO4vLlJvsE= cloud.google.com/go/vmmigration v1.3.0/go.mod h1:oGJ6ZgGPQOFdjHuocGcLqX4lc98YQ7Ygq8YQwHh9A7g= +cloud.google.com/go/vmmigration v1.5.0/go.mod h1:E4YQ8q7/4W9gobHjQg4JJSgXXSgY21nA5r8swQV+Xxc= +cloud.google.com/go/vmmigration v1.6.0/go.mod h1:bopQ/g4z+8qXzichC7GW1w2MjbErL54rk3/C843CjfY= cloud.google.com/go/vmwareengine v0.1.0/go.mod h1:RsdNEf/8UDvKllXhMz5J40XxDrNJNN4sagiox+OI208= +cloud.google.com/go/vmwareengine v0.2.2/go.mod h1:sKdctNJxb3KLZkE/6Oui94iw/xs9PRNC2wnNLXsHvH8= +cloud.google.com/go/vmwareengine v0.3.0/go.mod h1:wvoyMvNWdIzxMYSpH/R7y2h5h3WFkx6d+1TIsP39WGY= cloud.google.com/go/vpcaccess v1.4.0/go.mod h1:aQHVbTWDYUR1EbTApSVvMq1EnT57ppDmQzZ3imqIk4w= cloud.google.com/go/vpcaccess v1.5.0/go.mod h1:drmg4HLk9NkZpGfCmZ3Tz0Bwnm2+DKqViEpeEpOq0m8= +cloud.google.com/go/vpcaccess v1.6.0/go.mod h1:wX2ILaNhe7TlVa4vC5xce1bCnqE3AeH27RV31lnmZes= cloud.google.com/go/webrisk v1.4.0/go.mod h1:Hn8X6Zr+ziE2aNd8SliSDWpEnSS1u4R9+xXZmFiHmGE= cloud.google.com/go/webrisk v1.5.0/go.mod h1:iPG6fr52Tv7sGk0H6qUFzmL3HHZev1htXuWDEEsqMTg= cloud.google.com/go/webrisk v1.6.0/go.mod h1:65sW9V9rOosnc9ZY7A7jsy1zoHS5W9IAXv6dGqhMQMc= cloud.google.com/go/webrisk v1.7.0/go.mod h1:mVMHgEYH0r337nmt1JyLthzMr6YxwN1aAIEc2fTcq7A= +cloud.google.com/go/webrisk v1.8.0/go.mod h1:oJPDuamzHXgUc+b8SiHRcVInZQuybnvEW72PqTc7sSg= cloud.google.com/go/websecurityscanner v1.3.0/go.mod h1:uImdKm2wyeXQevQJXeh8Uun/Ym1VqworNDlBXQevGMo= cloud.google.com/go/websecurityscanner v1.4.0/go.mod h1:ebit/Fp0a+FWu5j4JOmJEV8S8CzdTkAS77oDsiSqYWQ= +cloud.google.com/go/websecurityscanner v1.5.0/go.mod h1:Y6xdCPy81yi0SQnDY1xdNTNpfY1oAgXUlcfN3B3eSng= cloud.google.com/go/workflows v1.6.0/go.mod h1:6t9F5h/unJz41YqfBmqSASJSXccBLtD1Vwf+KmJENM0= cloud.google.com/go/workflows v1.7.0/go.mod h1:JhSrZuVZWuiDfKEFxU0/F1PQjmpnpcoISEXH2bcHC3M= cloud.google.com/go/workflows v1.8.0/go.mod h1:ysGhmEajwZxGn1OhGOGKsTXc5PyxOc0vfKf5Af+to4M= cloud.google.com/go/workflows v1.9.0/go.mod h1:ZGkj1aFIOd9c8Gerkjjq7OW7I5+l6cSvT3ujaO/WwSA= +cloud.google.com/go/workflows v1.10.0/go.mod h1:fZ8LmRmZQWacon9UCX1r/g/DfAXx5VcPALq2CxzdePw= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= dmitri.shuralyov.com/gpu/mtl v0.0.0-20201218220906-28db891af037/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= gioui.org v0.0.0-20210308172011-57750fc8a0a6/go.mod h1:RSH6KIUZ0p2xy5zHDxgAM4zumjgTw83q2ge/PI+yyw8= +git.sr.ht/~sbinet/gg v0.3.1/go.mod h1:KGYtlADtqsqANL9ueOFkWymvzUvLMQllU5Ixo+8v3pc= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/JohnCGriffin/overflow v0.0.0-20211019200055-46fa312c352c h1:RGWPOewvKIROun94nF7v2cua9qP+thov/7M50KEoeSU= @@ -397,7 +606,10 @@ github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWX github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI= github.com/VividCortex/gohistogram v1.0.0/go.mod h1:Pf5mBqqDxYaXu3hDrrU+w6nw50o/4+TcAqDqk/vUH7g= github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c= +github.com/ajstarks/deck v0.0.0-20200831202436-30c9fc6549a9/go.mod h1:JynElWSGnm/4RlzPXRlREEwqTHAN3T56Bv2ITsFT3gY= +github.com/ajstarks/deck/generate v0.0.0-20210309230005-c3f852c02e19/go.mod h1:T13YZdzov6OU0A1+RfKZiZN9ca6VeKdBdyDV+BY97Tk= github.com/ajstarks/svgo v0.0.0-20180226025133-644b8db467af/go.mod h1:K08gAheRH3/J6wwsYMMT4xOr94bZjxIelGM0+d/wbFw= +github.com/ajstarks/svgo v0.0.0-20211024235047-1546f124cd8b/go.mod h1:1KcenG0jGWcpt8ov532z81sp/kMMUG485J2InIOyADM= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= @@ -405,12 +617,16 @@ github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRF github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY3JY= github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= +github.com/apache/arrow/go/v10 v10.0.1/go.mod h1:YvhnlEePVnBS4+0z3fhPfUy7W1Ikj0Ih0vcRo/gZ1M0= +github.com/apache/arrow/go/v11 v11.0.0/go.mod h1:Eg5OsL5H+e299f7u5ssuXsuHQVEGC4xei5aX110hRiI= github.com/apache/arrow/go/v8 v8.0.0 h1:mG1dDlq8aQO4a/PB00T9H19Ga2imvqoFPHI5cykpibs= github.com/apache/arrow/go/v8 v8.0.0/go.mod h1:63co72EKYQT9WKr8Y1Yconk4dysC0t79wNDauYO1ZGg= github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= github.com/apache/thrift v0.13.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= github.com/apache/thrift v0.15.0 h1:aGvdaR0v1t9XLgjtBYwxcBvBOTMqClzwE26CHOgjW1Y= github.com/apache/thrift v0.15.0/go.mod h1:PHK3hniurgQaNMZYaCLEqXKsYK8upmhPbmdP2FXSqgU= +github.com/apache/thrift v0.16.0 h1:qEy6UW60iVOlUy+b9ZR0d5WzUWYGOo4HfopoyBaNmoY= +github.com/apache/thrift v0.16.0/go.mod h1:PHK3hniurgQaNMZYaCLEqXKsYK8upmhPbmdP2FXSqgU= github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= @@ -423,6 +639,7 @@ github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+Ce github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= +github.com/boombuler/barcode v1.0.1/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= github.com/casbin/casbin/v2 v2.1.2/go.mod h1:YcPU1XXisHhLzuxH9coDNf2FbKpjGlbCg3n9yuLkIJQ= github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= @@ -451,6 +668,7 @@ github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWH github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20220314180256-7f1daf1720fc/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20230105202645-06c439db220b/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20230607035331-e9ce68804cb4/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8= github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI= github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= @@ -459,6 +677,7 @@ github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7 github.com/coreos/pkg v0.0.0-20160727233714-3ac0863d7acf/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -467,6 +686,7 @@ github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/r github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE= github.com/dustin/go-humanize v0.0.0-20171111073723-bb3d318650d4/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= +github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs= github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU= github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I= @@ -482,9 +702,11 @@ github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.m github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.mod h1:KJwIaB5Mv44NWtYuAOFCVOjcI94vtpEz2JU/D2v6IjE= github.com/envoyproxy/go-control-plane v0.10.3/go.mod h1:fJJn/j26vwOu972OllsvAgJJM//w9BV6Fxbg2LuVd34= +github.com/envoyproxy/go-control-plane v0.11.1-0.20230524094728-9239064ad72f/go.mod h1:sfYdkwUW4BA3PbKjySwjJy+O4Pu0h62rlqCMHNk+K+Q= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/envoyproxy/protoc-gen-validate v0.6.7/go.mod h1:dyJXwwfPK2VSqiB9Klm1J6romD608Ba7Hij42vrOBCo= github.com/envoyproxy/protoc-gen-validate v0.9.1/go.mod h1:OKNgG7TCp5pF4d6XftA0++PMirau2/yoOwVac3AbF2w= +github.com/envoyproxy/protoc-gen-validate v0.10.1/go.mod h1:DRjgyB0I43LtJapqN6NiRwroiAU2PaFuvk/vjgh61ss= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= github.com/fogleman/gg v1.2.1-0.20190220221249-0403632d5b90/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k= github.com/fogleman/gg v1.3.0/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k= @@ -498,6 +720,7 @@ github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeME github.com/go-fonts/dejavu v0.1.0/go.mod h1:4Wt4I4OU2Nq9asgDCteaAaWZOV24E+0/Pwo0gppep4g= github.com/go-fonts/latin-modern v0.2.0/go.mod h1:rQVLdDMK+mK1xscDwsqM5J8U2jrRa3T0ecnM9pNujks= github.com/go-fonts/liberation v0.1.1/go.mod h1:K6qoJYypsmfVjWg8KOVDQhLc8UDgIK2HYqyqAO9z7GY= +github.com/go-fonts/liberation v0.2.0/go.mod h1:K6qoJYypsmfVjWg8KOVDQhLc8UDgIK2HYqyqAO9z7GY= github.com/go-fonts/stix v0.1.0/go.mod h1:w/c1f0ldAUlJmLBvlbkvVXLAD+tAMqobIIQpmnUIzUY= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= @@ -506,10 +729,13 @@ github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2 github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-kit/kit v0.10.0/go.mod h1:xUsJbQ/Fp4kEt7AFgCuvyX4a71u8h9jB8tj/ORgOZ7o= github.com/go-latex/latex v0.0.0-20210118124228-b3d85cf34e07/go.mod h1:CO1AlKB2CSIqUrmQPqA0gdRIlnLEY0gK5JGjh37zN5U= +github.com/go-latex/latex v0.0.0-20210823091927-c0d11ff05a81/go.mod h1:SX0U8uGpxhq9o2S/CELCSUxEWWAuoCUcVCQWv7G2OCk= github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= github.com/go-logr/logr v0.4.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTgseGU= +github.com/go-pdf/fpdf v0.5.0/go.mod h1:HzcnA+A23uwogo0tp9yU+l3V+KXhiESpt1PMayhOh5M= +github.com/go-pdf/fpdf v0.6.0/go.mod h1:HzcnA+A23uwogo0tp9yU+l3V+KXhiESpt1PMayhOh5M= github.com/go-redis/redis/v8 v8.11.4 h1:kHoYkfZP6+pe04aFTnhDH6GDROa5yJdHJVNxV3F46Tg= github.com/go-redis/redis/v8 v8.11.4/go.mod h1:2Z2wHZXdQpCDXEGzqMockDpNyYvi2l4Pxt6RJr792+w= github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= @@ -517,6 +743,8 @@ github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/me github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= github.com/goccy/go-json v0.9.6 h1:5/4CtRQdtsX0sal8fdVhTaiMN01Ri8BExZZ8iRmHQ6E= github.com/goccy/go-json v0.9.6/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= +github.com/goccy/go-json v0.9.11 h1:/pAaQDLHEoCq/5FFmSKBswWmK6H0e8g4159Kc/X/nqk= +github.com/goccy/go-json v0.9.11/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= github.com/gogo/googleapis v1.1.0/go.mod h1:gf4bu3Q80BeJ6H1S1vYPm8/ELATdvryBaNFGgqEef3s= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= @@ -524,6 +752,7 @@ github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zV github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/glog v1.0.0/go.mod h1:EWib/APOK0SL3dFbYqvxE3UYd8E6s1ouQ7iEp/0LWV4= +github.com/golang/glog v1.1.0/go.mod h1:pfYeQZ3JWZoXTV5sFc986z3HTpwQs9At6P4ImfuP3NQ= github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= @@ -553,8 +782,9 @@ github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= -github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= +github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= @@ -564,6 +794,8 @@ github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ github.com/google/flatbuffers v2.0.5+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= github.com/google/flatbuffers v2.0.6+incompatible h1:XHFReMv7nFFusa+CEokzWbzaYocKXI6C7hdU5Kgh9Lw= github.com/google/flatbuffers v2.0.6+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= +github.com/google/flatbuffers v2.0.8+incompatible h1:ivUb1cGomAB101ZM1T0nOiWz9pSrTMoa9+EiY7igmkM= +github.com/google/flatbuffers v2.0.8+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= @@ -585,6 +817,7 @@ github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXi github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= github.com/google/martian/v3 v3.2.1/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= +github.com/google/martian/v3 v3.3.2/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= @@ -608,6 +841,8 @@ github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+ github.com/googleapis/enterprise-certificate-proxy v0.0.0-20220520183353-fd19c99a87aa/go.mod h1:17drOmN3MwGY7t0e+Ei9b45FFGA3fBs3x36SsCg1hq8= github.com/googleapis/enterprise-certificate-proxy v0.1.0/go.mod h1:17drOmN3MwGY7t0e+Ei9b45FFGA3fBs3x36SsCg1hq8= github.com/googleapis/enterprise-certificate-proxy v0.2.0/go.mod h1:8C0jb7/mgJe/9KK8Lm7X9ctZC2t60YyIpYEI16jx0Qg= +github.com/googleapis/enterprise-certificate-proxy v0.2.1/go.mod h1:AwSRAtLfXpU5Nm3pW+v7rGDHp09LsPtGY9MduiEsR9k= +github.com/googleapis/enterprise-certificate-proxy v0.2.3/go.mod h1:AwSRAtLfXpU5Nm3pW+v7rGDHp09LsPtGY9MduiEsR9k= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= @@ -618,6 +853,7 @@ github.com/googleapis/gax-go/v2 v2.4.0/go.mod h1:XOTVJ59hdnfJLIP/dh8n5CGryZR2LxK github.com/googleapis/gax-go/v2 v2.5.1/go.mod h1:h6B0KMMFNtI2ddbGJn3T3ZbwkeT6yqEF02fYlzkUCyo= github.com/googleapis/gax-go/v2 v2.6.0/go.mod h1:1mjbznJAPHFpesgE5ucqfYEscaz5kMdcIDwU/6+DDoY= github.com/googleapis/gax-go/v2 v2.7.0/go.mod h1:TEop28CZZQ2y+c0VxMUmu1lV+fQx57QpBWsYpwqHJx8= +github.com/googleapis/gax-go/v2 v2.7.1/go.mod h1:4orTrqY6hXxxaUL4LHIPl6lGo8vAE38/qKbhSAKP6QI= github.com/googleapis/go-type-adapters v1.0.0/go.mod h1:zHW75FOG2aur7gAO2B+MLby+cLsWGBF62rFAi7WjWO4= github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= @@ -669,6 +905,7 @@ github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfV github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/jung-kurt/gofpdf v1.0.0/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes= github.com/jung-kurt/gofpdf v1.0.3-0.20190309125859-24315acbbda5/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes= +github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8= github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/klauspost/asmfmt v1.3.1/go.mod h1:AG8TuvYojzulgDAMCnYn50l/5QV3Bs/tp6j0HLHbNSE= @@ -677,6 +914,8 @@ github.com/klauspost/asmfmt v1.3.2/go.mod h1:AG8TuvYojzulgDAMCnYn50l/5QV3Bs/tp6j github.com/klauspost/compress v1.14.2/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= github.com/klauspost/compress v1.15.1 h1:y9FcTHGyrebwfP0ZZqFiaxTaiDnUrGkJkI+f583BL1A= github.com/klauspost/compress v1.15.1/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= +github.com/klauspost/compress v1.15.9 h1:wKRjX6JRtDdrE9qwa4b/Cip7ACOshUI4smpCQanqjSY= +github.com/klauspost/compress v1.15.9/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU= github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= github.com/klauspost/cpuid/v2 v2.0.12 h1:p9dKCg8i4gmOxtv35DvrYoWqYzQrvEVdjQ762Y0OqZE= github.com/klauspost/cpuid/v2 v2.0.12/go.mod h1:g2LTdtYhdyuGPqyWyv7qRAmj1WBqxuObKfj5c0PQa7c= @@ -686,20 +925,29 @@ github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFB github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.2.1 h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= +github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/lightstep/lightstep-tracer-common/golang/gogo v0.0.0-20190605223551-bc2310a04743/go.mod h1:qklhhLq1aX+mtWk9cPHPzaBjWImj5ULL6C7HFJtXQMM= github.com/lightstep/lightstep-tracer-go v0.18.1/go.mod h1:jlF1pusYV4pidLvZ+XD0UBX0ZE6WURAspgAczcDHrL4= github.com/lyft/protoc-gen-star v0.6.0/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuzJYeZuUPFPNwA= github.com/lyft/protoc-gen-star v0.6.1/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuzJYeZuUPFPNwA= +github.com/lyft/protoc-gen-star/v2 v2.0.1/go.mod h1:RcCdONR2ScXaYnQC5tUzxzlpA3WVYF7/opLeUgcQs/o= github.com/lyft/protoc-gen-validate v0.0.13/go.mod h1:XbGvPuh87YZc5TdIa2/I4pLk0QoUACkjt2znoq26NVQ= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= github.com/mattn/go-sqlite3 v1.14.12 h1:TJ1bhYJPV44phC+IMu1u2K/i5RriLTPe+yc68XDJ1Z0= github.com/mattn/go-sqlite3 v1.14.12/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= +github.com/mattn/go-sqlite3 v1.14.14 h1:qZgc/Rwetq+MtyE18WhzjokPD93dNqLGNT3QJuLvBGw= +github.com/mattn/go-sqlite3 v1.14.14/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 h1:AMFGa4R4MiIpspGNG7Z948v4n35fFGB3RR3G/ry4FWs= @@ -756,12 +1004,16 @@ github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtP github.com/performancecopilot/speed v3.0.0+incompatible/go.mod h1:/CLtqpZ5gBg1M9iaPbIdPPGyKcA8hKdoy6hAWba7Yac= github.com/phpdave11/gofpdf v1.4.2/go.mod h1:zpO6xFn9yxo3YLyMvW8HcKWVdbNqgIfOOp2dXMnm1mY= github.com/phpdave11/gofpdi v1.0.12/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= +github.com/phpdave11/gofpdi v1.0.13/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= github.com/pierrec/lz4 v1.0.2-0.20190131084431-473cd7ce01a1/go.mod h1:3/3N9NVKO0jef7pBehbT1qWhCMrIgbYNnFAZCqQ5LRc= github.com/pierrec/lz4 v2.0.5+incompatible h1:2xWsjqPFWcplujydGg4WmhC/6fZqK42wMM8aXeqhl0I= github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= github.com/pierrec/lz4/v4 v4.1.12/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pierrec/lz4/v4 v4.1.14 h1:+fL8AQEZtz/ijeNnpduH0bROTu0O3NZAlPjQxGn8LwE= github.com/pierrec/lz4/v4 v4.1.14/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= +github.com/pierrec/lz4/v4 v4.1.15 h1:MO0/ucJhngq7299dKLwIMtgTfbkoSPF6AoMYDd8Q4q0= +github.com/pierrec/lz4/v4 v4.1.15/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= +github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= @@ -782,6 +1034,7 @@ github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1: github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.1.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.3.0/go.mod h1:LDGWKZIo7rky3hgvBe+caln+Dr3dPggB5dvjtD7w9+w= github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= github.com/prometheus/common v0.7.0/go.mod h1:DjGbpBbp5NYNiECxcL/VnbXCCaQpKd3tt26CguLLsqA= @@ -790,13 +1043,17 @@ github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A= github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= +github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= +github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= github.com/rs/zerolog v1.21.0/go.mod h1:ZPhntP/xmq1nnND05hhpAh2QMhSsA4UN3MGZ6O2J3hM= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ruudk/golang-pdf417 v0.0.0-20181029194003-1af4ab5afa58/go.mod h1:6lfFZQK844Gfx8o5WFuvpxWRwnSoipWe/p622j1v06w= +github.com/ruudk/golang-pdf417 v0.0.0-20201230142125-a7e3863a1245/go.mod h1:pQAZKsJ8yyVxGRWYNEm9oFB8ieLgKFnamEyDmSA0BRk= github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= github.com/samuel/go-zookeeper v0.0.0-20190923202752-2cc03de413da/go.mod h1:gi+0XIa01GRL2eRQVjQkKGqKF3SF9vZR/HnPullcV2E= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= @@ -834,8 +1091,9 @@ github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5Cc github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= -github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.3 h1:RP3t2pwF7cMEbC1dqtB6poj3niw/9gnV4Cjg5oW5gtY= +github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= @@ -871,6 +1129,7 @@ go.opentelemetry.io/otel/sdk v0.20.0/go.mod h1:g/IcepuwNsoiX5Byy2nNV0ySUF1em498m go.opentelemetry.io/otel/trace v0.20.0/go.mod h1:6GjCW8zgDjwGHGa6GkyeB8+/5vjT16gUEi0Nf1iBdgw= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.opentelemetry.io/proto/otlp v0.15.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U= +go.opentelemetry.io/proto/otlp v0.19.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U= go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= @@ -914,6 +1173,8 @@ golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMk golang.org/x/exp v0.0.0-20211216164055-b2b84827b756/go.mod h1:b9TAUYHmRtqA6klRHApnXMnj+OyLce4yF5cZCUbk2ps= golang.org/x/exp v0.0.0-20220407100705-7b9b53b0aca4 h1:K3x+yU+fbot38x5bQbU2QqUAVyYLEktdNH2GxZLnM3U= golang.org/x/exp v0.0.0-20220407100705-7b9b53b0aca4/go.mod h1:lgLbSvA5ygNOMpwM/9anMpWVlVJ7Z+cHWq/eFuinpGE= +golang.org/x/exp v0.0.0-20220827204233-334a2380cb91 h1:tnebWN09GYg9OLPss1KXj8txwZc6X6uMr6VFdcGNbHw= +golang.org/x/exp v0.0.0-20220827204233-334a2380cb91/go.mod h1:cyybsKvd6eL0RnXn6p/Grxp8F5bW7iYuBgsNCOHpMYE= golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81/go.mod h1:ux5Hcp/YLpHSI86hEcLt0YII63i6oz57MZXIpbrjZUs= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= @@ -923,6 +1184,10 @@ golang.org/x/image v0.0.0-20200430140353-33d19683fad8/go.mod h1:FeLwcggjj3mMvU+o golang.org/x/image v0.0.0-20200618115811-c13761719519/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/image v0.0.0-20201208152932-35266b937fa6/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/image v0.0.0-20210216034530-4410531fe030/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/image v0.0.0-20210607152325-775e3b0c77b9/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM= +golang.org/x/image v0.0.0-20210628002857-a66eb6448b8d/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM= +golang.org/x/image v0.0.0-20211028202545-6944b10bf410/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM= +golang.org/x/image v0.0.0-20220302094943-723b81ca9867/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= @@ -956,6 +1221,8 @@ golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91 golang.org/x/mod v0.7.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.8.0 h1:LUYupSeNrTNCGzR/hVBk2NHZO4hXcVaW1k4Qx7rjPx8= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.9.0 h1:KENHtAZL2y3NLMYZeHY9DW8HW8V+kQyJsY/V9JlKvCs= +golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -1017,8 +1284,12 @@ golang.org/x/net v0.0.0-20220909164309-bea034e7d591/go.mod h1:YDH+HFinaLZZlnHAfS golang.org/x/net v0.0.0-20221012135044-0b7e1fb9d458/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= golang.org/x/net v0.0.0-20221014081412-f15817d10f9b/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= +golang.org/x/net v0.4.0/go.mod h1:MBQ8lrhLObU/6UmLb4fmbmk5OcyYmqtbGd/9yIeKjEE= golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= +golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM= golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= @@ -1048,6 +1319,9 @@ golang.org/x/oauth2 v0.0.0-20220909003341-f21342109be1/go.mod h1:h4gKUeWbJ4rQPri golang.org/x/oauth2 v0.0.0-20221006150949-b44042a4b9c1/go.mod h1:h4gKUeWbJ4rQPri7E0u6Gs4e9Ri2zaLxzw5DI5XGrYg= golang.org/x/oauth2 v0.0.0-20221014153046-6fdb5e3db783/go.mod h1:h4gKUeWbJ4rQPri7E0u6Gs4e9Ri2zaLxzw5DI5XGrYg= golang.org/x/oauth2 v0.4.0/go.mod h1:RznEsdpjGAINPTOF0UH/t+xJ75L18YO3Ho6Pyn+uRec= +golang.org/x/oauth2 v0.5.0/go.mod h1:9/XBHVqLaWO3/BRHs5jbpYCnOZVjj5V0ndyaAM7KB4I= +golang.org/x/oauth2 v0.6.0/go.mod h1:ycmewcwgD4Rpr3eZJLSB4Kyyljb3qDh40vJ8STE5HKw= +golang.org/x/oauth2 v0.7.0/go.mod h1:hPLQkd9LyjfXTiRohC/41GhcFqxisoUQ99sCUOHO9x4= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -1061,6 +1335,7 @@ golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220819030929-7fc1605a5dde/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220929204114-8fcdb60fdcc0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -1091,6 +1366,7 @@ golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20191220142924-d4481acd189f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -1129,6 +1405,7 @@ golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20210816183151-1e6c022a8912/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211210111614-af8b64212486/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -1146,17 +1423,25 @@ golang.org/x/sys v0.0.0-20220615213510-4f61da869c0c/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220624220833-87e55d714810/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220829200755-d48e67d00261/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE= golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= +golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA= golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= +golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY= golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -1170,8 +1455,10 @@ golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k= golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= @@ -1181,6 +1468,7 @@ golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxb golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20220922220347-f3bd1da661af/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.1.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -1233,6 +1521,7 @@ golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201124115921-2c860bdd6e78/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= @@ -1245,11 +1534,14 @@ golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.8-0.20211029000441-d6a9af8af023/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= +golang.org/x/tools v0.1.9/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.3.0/go.mod h1:/rWhSS2+zyEVwoJf8YAX6L2f0ntZ7Kn/mGgAWcipA5k= golang.org/x/tools v0.6.0 h1:BOw41kyTf3PuCW1pVQf8+Cyg8pMlkYB1oo9iJ6D/lKM= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/tools v0.7.0 h1:W4OVu8VVOaIO0yzWMNdepAulS7YfoS3Zabrm8DOXXU4= +golang.org/x/tools v0.7.0/go.mod h1:4pg6aUX35JBAogB10C9AtvVL+qowtN4pT3CGSQex14s= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -1264,9 +1556,12 @@ gonum.org/v1/gonum v0.0.0-20180816165407-929014505bf4/go.mod h1:Y+Yx5eoAFn32cQvJ gonum.org/v1/gonum v0.8.2/go.mod h1:oe/vMfY3deqTw+1EZJhuvEW2iwGF1bW9wwu7XCu0+v0= gonum.org/v1/gonum v0.9.3 h1:DnoIG+QAMaF5NvxnGe/oKsgKcAc6PcUyl8q0VetfQ8s= gonum.org/v1/gonum v0.9.3/go.mod h1:TZumC3NeyVQskjXqmyWt4S3bINhy7B4eYwW69EbyX+0= +gonum.org/v1/gonum v0.11.0 h1:f1IJhK4Km5tBJmaiJXtk/PkL4cdVX6J+tGiM187uT5E= +gonum.org/v1/gonum v0.11.0/go.mod h1:fSG4YDCxxUZQJ7rKsQrj0gMOg00Il0Z96/qMA4bVQhA= gonum.org/v1/netlib v0.0.0-20190313105609-8cb42192e0e0/go.mod h1:wa6Ws7BG/ESfp6dHfk7C6KdzKA7wR7u/rKwOGE66zvw= gonum.org/v1/plot v0.0.0-20190515093506-e2840ee46a6b/go.mod h1:Wt8AAjI+ypCyYX3nZBvf6cAIx93T+c/OS2HFAYskSZc= gonum.org/v1/plot v0.9.0/go.mod h1:3Pcqqmp6RHvJI72kgb8fThyUnav364FOsdDo2aGW5lY= +gonum.org/v1/plot v0.10.1/go.mod h1:VZW5OlhkL1mysU9vaqNHnsy86inf6Ot+jB3r+BczCEo= google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk= google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= @@ -1319,6 +1614,12 @@ google.golang.org/api v0.99.0/go.mod h1:1YOf74vkVndF7pG6hIHuINsM7eWwpVTAfNMNiL91 google.golang.org/api v0.100.0/go.mod h1:ZE3Z2+ZOr87Rx7dqFsdRQkRBk36kDtp/h+QpHbB7a70= google.golang.org/api v0.102.0/go.mod h1:3VFl6/fzoA+qNuS1N1/VfXY4LjoXN/wzeIp7TweWwGo= google.golang.org/api v0.103.0/go.mod h1:hGtW6nK1AC+d9si/UBhw8Xli+QMOf6xyNAyJw4qU9w0= +google.golang.org/api v0.106.0/go.mod h1:2Ts0XTHNVWxypznxWOYUeI4g3WdP9Pk2Qk58+a/O9MY= +google.golang.org/api v0.107.0/go.mod h1:2Ts0XTHNVWxypznxWOYUeI4g3WdP9Pk2Qk58+a/O9MY= +google.golang.org/api v0.108.0/go.mod h1:2Ts0XTHNVWxypznxWOYUeI4g3WdP9Pk2Qk58+a/O9MY= +google.golang.org/api v0.110.0/go.mod h1:7FC4Vvx1Mooxh8C5HWjzZHcavuS2f6pmJpZx60ca7iI= +google.golang.org/api v0.111.0/go.mod h1:qtFHvU9mhgTJegR31csQ+rwxyUTHOKFqCKWp1J0fdw0= +google.golang.org/api v0.114.0/go.mod h1:ifYI2ZsFK6/uGddGfAD5BMxlnkBqCmqHSDUVi45N5Yg= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.2.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -1434,13 +1735,33 @@ google.golang.org/genproto v0.0.0-20221014213838-99cd37c6964a/go.mod h1:1vXfmgAz google.golang.org/genproto v0.0.0-20221024153911-1573dae28c9c/go.mod h1:9qHF0xnpdSfF6knlcsnpzUu5y+rpwgbvsyGAZPBMg4s= google.golang.org/genproto v0.0.0-20221024183307-1bc688fe9f3e/go.mod h1:9qHF0xnpdSfF6knlcsnpzUu5y+rpwgbvsyGAZPBMg4s= google.golang.org/genproto v0.0.0-20221027153422-115e99e71e1c/go.mod h1:CGI5F/G+E5bKwmfYo09AXuVN4dD894kIKUFmVbP2/Fo= +google.golang.org/genproto v0.0.0-20221109142239-94d6d90a7d66/go.mod h1:rZS5c/ZVYMaOGBfO68GWtjOw/eLaZM1X6iVtgjZ+EWg= google.golang.org/genproto v0.0.0-20221114212237-e4508ebdbee1/go.mod h1:rZS5c/ZVYMaOGBfO68GWtjOw/eLaZM1X6iVtgjZ+EWg= google.golang.org/genproto v0.0.0-20221117204609-8f9c96812029/go.mod h1:rZS5c/ZVYMaOGBfO68GWtjOw/eLaZM1X6iVtgjZ+EWg= google.golang.org/genproto v0.0.0-20221118155620-16455021b5e6/go.mod h1:rZS5c/ZVYMaOGBfO68GWtjOw/eLaZM1X6iVtgjZ+EWg= google.golang.org/genproto v0.0.0-20221201164419-0e50fba7f41c/go.mod h1:rZS5c/ZVYMaOGBfO68GWtjOw/eLaZM1X6iVtgjZ+EWg= +google.golang.org/genproto v0.0.0-20221201204527-e3fa12d562f3/go.mod h1:rZS5c/ZVYMaOGBfO68GWtjOw/eLaZM1X6iVtgjZ+EWg= google.golang.org/genproto v0.0.0-20221202195650-67e5cbc046fd/go.mod h1:cTsE614GARnxrLsqKREzmNYJACSWWpAWdNMwnD7c2BE= -google.golang.org/genproto v0.0.0-20230110181048-76db0878b65f h1:BWUVssLB0HVOSY78gIdvk1dTVYtT1y8SBWtPYuTJ/6w= +google.golang.org/genproto v0.0.0-20221227171554-f9683d7f8bef/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= google.golang.org/genproto v0.0.0-20230110181048-76db0878b65f/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= +google.golang.org/genproto v0.0.0-20230112194545-e10362b5ecf9/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= +google.golang.org/genproto v0.0.0-20230113154510-dbe35b8444a5/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= +google.golang.org/genproto v0.0.0-20230123190316-2c411cf9d197/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= +google.golang.org/genproto v0.0.0-20230124163310-31e0e69b6fc2/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= +google.golang.org/genproto v0.0.0-20230125152338-dcaf20b6aeaa/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= +google.golang.org/genproto v0.0.0-20230127162408-596548ed4efa/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= +google.golang.org/genproto v0.0.0-20230209215440-0dfe4f8abfcc/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= +google.golang.org/genproto v0.0.0-20230216225411-c8e22ba71e44/go.mod h1:8B0gmkoRebU8ukX6HP+4wrVQUY1+6PkQ44BSyIlflHA= +google.golang.org/genproto v0.0.0-20230222225845-10f96fb3dbec/go.mod h1:3Dl5ZL0q0isWJt+FVcfpQyirqemEuLAK/iFvg1UP1Hw= +google.golang.org/genproto v0.0.0-20230223222841-637eb2293923/go.mod h1:3Dl5ZL0q0isWJt+FVcfpQyirqemEuLAK/iFvg1UP1Hw= +google.golang.org/genproto v0.0.0-20230303212802-e74f57abe488/go.mod h1:TvhZT5f700eVlTNwND1xoEZQeWTB2RY/65kplwl/bFA= +google.golang.org/genproto v0.0.0-20230306155012-7f2fa6fef1f4/go.mod h1:NWraEVixdDnqcqQ30jipen1STv2r/n24Wb7twVTGR4s= +google.golang.org/genproto v0.0.0-20230320184635-7606e756e683/go.mod h1:NWraEVixdDnqcqQ30jipen1STv2r/n24Wb7twVTGR4s= +google.golang.org/genproto v0.0.0-20230323212658-478b75c54725/go.mod h1:UUQDJDOlWu4KYeJZffbWgBkS1YFobzKbLVfK69pe0Ak= +google.golang.org/genproto v0.0.0-20230330154414-c0448cd141ea/go.mod h1:UUQDJDOlWu4KYeJZffbWgBkS1YFobzKbLVfK69pe0Ak= +google.golang.org/genproto v0.0.0-20230331144136-dcfb400f0633/go.mod h1:UUQDJDOlWu4KYeJZffbWgBkS1YFobzKbLVfK69pe0Ak= +google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 h1:KpwkzHKEF7B9Zxg18WzOa7djJ+Ha5DzthMyZYQfEn2A= +google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1/go.mod h1:nKE/iIaLqn2bQwXBg8f1g2Ylh6r5MN5CmZvuzZCgsCU= google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.0/go.mod h1:chYK+tFQF0nDUGJgXMSgLCQk3phJEuONr2DCgLDdAQM= @@ -1483,8 +1804,11 @@ google.golang.org/grpc v1.49.0/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCD google.golang.org/grpc v1.50.0/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI= google.golang.org/grpc v1.50.1/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI= google.golang.org/grpc v1.51.0/go.mod h1:wgNDFcnuBGmxLKI/qn4T+m5BtEBYXJPvibbUPsAIPww= -google.golang.org/grpc v1.53.0 h1:LAv2ds7cmFV/XTS3XG1NneeENYrXGmorPxsBbptIjNc= +google.golang.org/grpc v1.52.3/go.mod h1:pu6fVzoFb+NBYNAvQL08ic+lvB2IojljRYuun5vorUY= google.golang.org/grpc v1.53.0/go.mod h1:OnIrk0ipVdj4N5d9IUoFUx72/VlD7+jUsHwZgwSMQpw= +google.golang.org/grpc v1.54.0/go.mod h1:PUSEXI6iWghWaB6lXM4knEgpJNu2qUcKfDtNci3EC2g= +google.golang.org/grpc v1.56.3 h1:8I4C0Yq1EjstUzUJzpcRVbuYA2mODtEmpWiQoN/b2nc= +google.golang.org/grpc v1.56.3/go.mod h1:I9bI3vqKfayGqPUAwGdOSu7kt6oIJLixfffKrpXqQ9s= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= @@ -1500,8 +1824,10 @@ google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp0 google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= -google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w= google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.29.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.30.0 h1:kPPoIgf3TsEvrm0PFe15JQ+570QVxYzEvvHqChK+cng= +google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= @@ -1536,6 +1862,40 @@ honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= honnef.co/go/tools v0.1.3/go.mod h1:NgwopIslSNH47DimFoV78dnkksY2EFtX0ajyb3K/las= +lukechampine.com/uint128 v1.1.1/go.mod h1:c4eWIwlEGaxC/+H1VguhU4PHXNWDCDMUlWdIWl2j1gk= +lukechampine.com/uint128 v1.2.0/go.mod h1:c4eWIwlEGaxC/+H1VguhU4PHXNWDCDMUlWdIWl2j1gk= +modernc.org/cc/v3 v3.36.0/go.mod h1:NFUHyPn4ekoC/JHeZFfZurN6ixxawE1BnVonP/oahEI= +modernc.org/cc/v3 v3.36.2/go.mod h1:NFUHyPn4ekoC/JHeZFfZurN6ixxawE1BnVonP/oahEI= +modernc.org/cc/v3 v3.36.3/go.mod h1:NFUHyPn4ekoC/JHeZFfZurN6ixxawE1BnVonP/oahEI= +modernc.org/ccgo/v3 v3.0.0-20220428102840-41399a37e894/go.mod h1:eI31LL8EwEBKPpNpA4bU1/i+sKOwOrQy8D87zWUcRZc= +modernc.org/ccgo/v3 v3.0.0-20220430103911-bc99d88307be/go.mod h1:bwdAnOoaIt8Ax9YdWGjxWsdkPcZyRPHqrOvJxaKAKGw= +modernc.org/ccgo/v3 v3.16.4/go.mod h1:tGtX0gE9Jn7hdZFeU88slbTh1UtCYKusWOoCJuvkWsQ= +modernc.org/ccgo/v3 v3.16.6/go.mod h1:tGtX0gE9Jn7hdZFeU88slbTh1UtCYKusWOoCJuvkWsQ= +modernc.org/ccgo/v3 v3.16.8/go.mod h1:zNjwkizS+fIFDrDjIAgBSCLkWbJuHF+ar3QRn+Z9aws= +modernc.org/ccgo/v3 v3.16.9/go.mod h1:zNMzC9A9xeNUepy6KuZBbugn3c0Mc9TeiJO4lgvkJDo= +modernc.org/ccorpus v1.11.6/go.mod h1:2gEUTrWqdpH2pXsmTM1ZkjeSrUWDpjMu2T6m29L/ErQ= +modernc.org/httpfs v1.0.6/go.mod h1:7dosgurJGp0sPaRanU53W4xZYKh14wfzX420oZADeHM= +modernc.org/libc v0.0.0-20220428101251-2d5f3daf273b/go.mod h1:p7Mg4+koNjc8jkqwcoFBJx7tXkpj00G77X7A72jXPXA= +modernc.org/libc v1.16.0/go.mod h1:N4LD6DBE9cf+Dzf9buBlzVJndKr/iJHG97vGLHYnb5A= +modernc.org/libc v1.16.1/go.mod h1:JjJE0eu4yeK7tab2n4S1w8tlWd9MxXLRzheaRnAKymU= +modernc.org/libc v1.16.17/go.mod h1:hYIV5VZczAmGZAnG15Vdngn5HSF5cSkbvfz2B7GRuVU= +modernc.org/libc v1.16.19/go.mod h1:p7Mg4+koNjc8jkqwcoFBJx7tXkpj00G77X7A72jXPXA= +modernc.org/libc v1.17.0/go.mod h1:XsgLldpP4aWlPlsjqKRdHPqCxCjISdHfM/yeWC5GyW0= +modernc.org/libc v1.17.1/go.mod h1:FZ23b+8LjxZs7XtFMbSzL/EhPxNbfZbErxEHc7cbD9s= +modernc.org/mathutil v1.2.2/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E= +modernc.org/mathutil v1.4.1/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E= +modernc.org/mathutil v1.5.0/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E= +modernc.org/memory v1.1.1/go.mod h1:/0wo5ibyrQiaoUoH7f9D8dnglAmILJ5/cxZlRECf+Nw= +modernc.org/memory v1.2.0/go.mod h1:/0wo5ibyrQiaoUoH7f9D8dnglAmILJ5/cxZlRECf+Nw= +modernc.org/memory v1.2.1/go.mod h1:PkUhL0Mugw21sHPeskwZW4D6VscE/GQJOnIpCnW6pSU= +modernc.org/opt v0.1.1/go.mod h1:WdSiB5evDcignE70guQKxYUl14mgWtbClRi5wmkkTX0= +modernc.org/opt v0.1.3/go.mod h1:WdSiB5evDcignE70guQKxYUl14mgWtbClRi5wmkkTX0= +modernc.org/sqlite v1.18.1/go.mod h1:6ho+Gow7oX5V+OiOQ6Tr4xeqbx13UZ6t+Fw9IRUG4d4= +modernc.org/strutil v1.1.1/go.mod h1:DE+MQQ/hjKBZS2zNInV5hhcipt5rLPWkmpbGeW5mmdw= +modernc.org/strutil v1.1.3/go.mod h1:MEHNA7PdEnEwLvspRMtWTNnp2nnyvMfkimT1NKNAGbw= +modernc.org/tcl v1.13.1/go.mod h1:XOLfOwzhkljL4itZkK6T72ckMgvj0BDsnKNdZVUOecw= +modernc.org/token v1.0.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM= +modernc.org/z v1.5.1/go.mod h1:eWFB510QWW5Th9YGZT81s+LwvaAs3Q2yr4sP0rmLkv8= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= From 61fdb0057d352fd35a5534eca2801fc2aefcd197 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 21 Jan 2024 22:28:05 -0500 Subject: [PATCH 010/122] chore: Bump @babel/traverse from 7.16.10 to 7.23.2 in /ui (#3801) Bumps [@babel/traverse](https://github.com/babel/babel/tree/HEAD/packages/babel-traverse) from 7.16.10 to 7.23.2. - [Release notes](https://github.com/babel/babel/releases) - [Changelog](https://github.com/babel/babel/blob/main/CHANGELOG.md) - [Commits](https://github.com/babel/babel/commits/v7.23.2/packages/babel-traverse) --- updated-dependencies: - dependency-name: "@babel/traverse" dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- ui/yarn.lock | 184 ++++++++++++++++++++++++--------------------------- 1 file changed, 86 insertions(+), 98 deletions(-) diff --git a/ui/yarn.lock b/ui/yarn.lock index 78fe6863bf..02c9fd130f 100644 --- a/ui/yarn.lock +++ b/ui/yarn.lock @@ -25,12 +25,13 @@ dependencies: "@babel/highlight" "^7.16.7" -"@babel/code-frame@^7.18.6", "@babel/code-frame@^7.21.4": - version "7.21.4" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.21.4.tgz#d0fa9e4413aca81f2b23b9442797bda1826edb39" - integrity sha512-LYvhNKfwWSPpocw8GI7gpK2nq3HSDuEPC/uSYaALSJu9xjsalaaYFOq0Pwt5KmVqwEbZlDu81aLXwBOmD/Fv9g== +"@babel/code-frame@^7.22.13": + version "7.22.13" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.22.13.tgz#e3c1c099402598483b7a8c46a721d1038803755e" + integrity sha512-XktuhWlJ5g+3TJXc5upd9Ks1HutSArik6jf2eAjYFyIOf4ej3RN+184cZbzDvbPnuTJIUhPKKJE3cIsYTiAT3w== dependencies: - "@babel/highlight" "^7.18.6" + "@babel/highlight" "^7.22.13" + chalk "^2.4.2" "@babel/compat-data@^7.13.11", "@babel/compat-data@^7.16.4", "@babel/compat-data@^7.16.8": version "7.16.8" @@ -106,12 +107,12 @@ jsesc "^2.5.1" source-map "^0.5.0" -"@babel/generator@^7.21.4": - version "7.21.4" - resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.21.4.tgz#64a94b7448989f421f919d5239ef553b37bb26bc" - integrity sha512-NieM3pVIYW2SwGzKoqfPrQsf4xGs9M9AIG3ThppsSRmO+m7eQhmI6amajKMUeIO37wFfsvnvcxQFx6x6iqxDnA== +"@babel/generator@^7.23.0": + version "7.23.0" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.23.0.tgz#df5c386e2218be505b34837acbcb874d7a983420" + integrity sha512-lN85QRR+5IbYrMWM6Y4pE/noaQtg4pNiqeNGX60eqOfo6gtEj6uw/JagelB8vVztSd7R6M5n1+PQkDbHbBRU4g== dependencies: - "@babel/types" "^7.21.4" + "@babel/types" "^7.23.0" "@jridgewell/gen-mapping" "^0.3.2" "@jridgewell/trace-mapping" "^0.3.17" jsesc "^2.5.1" @@ -190,10 +191,10 @@ dependencies: "@babel/types" "^7.16.7" -"@babel/helper-environment-visitor@^7.18.9": - version "7.18.9" - resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz#0c0cee9b35d2ca190478756865bb3528422f51be" - integrity sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg== +"@babel/helper-environment-visitor@^7.22.20": + version "7.22.20" + resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.20.tgz#96159db61d34a29dba454c959f5ae4a649ba9167" + integrity sha512-zfedSIzFhat/gFhWfHtgWvlec0nqB9YEIVrpuwjruLlXfUSnA8cJB0miHKwqDnQ7d32aKo2xt88/xZptwxbfhA== "@babel/helper-explode-assignable-expression@^7.16.7": version "7.16.7" @@ -211,13 +212,13 @@ "@babel/template" "^7.16.7" "@babel/types" "^7.16.7" -"@babel/helper-function-name@^7.21.0": - version "7.21.0" - resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.21.0.tgz#d552829b10ea9f120969304023cd0645fa00b1b4" - integrity sha512-HfK1aMRanKHpxemaY2gqBmL04iAPOPRj7DxtNbiDOrJK+gdwkiNRVpCpUJYbUT+aZyemKN8brqTOxzCaG6ExRg== +"@babel/helper-function-name@^7.23.0": + version "7.23.0" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.23.0.tgz#1f9a3cdbd5b2698a670c30d2735f9af95ed52759" + integrity sha512-OErEqsrxjZTJciZ4Oo+eoZqeW9UIiOcuYKRJA4ZAgV9myA+pOXhhmpfNCKjEH/auVfEYVFJ6y1Tc4r0eIApqiw== dependencies: - "@babel/template" "^7.20.7" - "@babel/types" "^7.21.0" + "@babel/template" "^7.22.15" + "@babel/types" "^7.23.0" "@babel/helper-get-function-arity@^7.16.7": version "7.16.7" @@ -233,12 +234,12 @@ dependencies: "@babel/types" "^7.16.7" -"@babel/helper-hoist-variables@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz#d4d2c8fb4baeaa5c68b99cc8245c56554f926678" - integrity sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q== +"@babel/helper-hoist-variables@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz#c01a007dac05c085914e8fb652b339db50d823bb" + integrity sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw== dependencies: - "@babel/types" "^7.18.6" + "@babel/types" "^7.22.5" "@babel/helper-member-expression-to-functions@^7.16.7": version "7.16.7" @@ -328,28 +329,38 @@ dependencies: "@babel/types" "^7.16.7" -"@babel/helper-split-export-declaration@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.18.6.tgz#7367949bc75b20c6d5a5d4a97bba2824ae8ef075" - integrity sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA== +"@babel/helper-split-export-declaration@^7.22.6": + version "7.22.6" + resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.6.tgz#322c61b7310c0997fe4c323955667f18fcefb91c" + integrity sha512-AsUnxuLhRYsisFiaJwvp1QF+I3KjD5FOxut14q/GzovUe6orHLesW2C7d754kRm53h5gqrz6sFl6sxc4BVtE/g== dependencies: - "@babel/types" "^7.18.6" + "@babel/types" "^7.22.5" "@babel/helper-string-parser@^7.19.4": version "7.19.4" resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.19.4.tgz#38d3acb654b4701a9b77fb0615a96f775c3a9e63" integrity sha512-nHtDoQcuqFmwYNYPz3Rah5ph2p8PFeFCsZk9A/48dPc/rGocJ5J3hAAZ7pb76VWX3fZKu+uEr/FhH5jLx7umrw== +"@babel/helper-string-parser@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.22.5.tgz#533f36457a25814cf1df6488523ad547d784a99f" + integrity sha512-mM4COjgZox8U+JcXQwPijIZLElkgEpO5rsERVDJTc2qfCDfERyob6k5WegS14SX18IIjv+XD+GrqNumY5JRCDw== + "@babel/helper-validator-identifier@^7.16.7": version "7.16.7" resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.16.7.tgz#e8c602438c4a8195751243da9031d1607d247cad" integrity sha512-hsEnFemeiW4D08A5gUAZxLBTXpZ39P+a+DGDsHw1yxqyQ/jzFEnxf5uTEGp+3bzAbNOxU1paTgYS4ECU/IgfDw== -"@babel/helper-validator-identifier@^7.18.6", "@babel/helper-validator-identifier@^7.19.1": +"@babel/helper-validator-identifier@^7.19.1": version "7.19.1" resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz#7eea834cf32901ffdc1a7ee555e2f9c27e249ca2" integrity sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w== +"@babel/helper-validator-identifier@^7.22.20": + version "7.22.20" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz#c4ae002c61d2879e724581d96665583dbc1dc0e0" + integrity sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A== + "@babel/helper-validator-option@^7.16.7": version "7.16.7" resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.16.7.tgz#b203ce62ce5fe153899b617c08957de860de4d23" @@ -392,16 +403,16 @@ chalk "^2.0.0" js-tokens "^4.0.0" -"@babel/highlight@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.18.6.tgz#81158601e93e2563795adcbfbdf5d64be3f2ecdf" - integrity sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g== +"@babel/highlight@^7.22.13": + version "7.22.20" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.22.20.tgz#4ca92b71d80554b01427815e06f2df965b9c1f54" + integrity sha512-dkdMCN3py0+ksCgYmGG8jKeGA/8Tk+gJwSYYlFGxG5lmhfKNoAy004YpLxpS1W2J8m/EK2Ew+yOs9pVRwO89mg== dependencies: - "@babel/helper-validator-identifier" "^7.18.6" - chalk "^2.0.0" + "@babel/helper-validator-identifier" "^7.22.20" + chalk "^2.4.2" js-tokens "^4.0.0" -"@babel/parser@^7.1.0", "@babel/parser@^7.14.7", "@babel/parser@^7.16.10", "@babel/parser@^7.16.12", "@babel/parser@^7.16.7": +"@babel/parser@^7.1.0", "@babel/parser@^7.14.7", "@babel/parser@^7.16.12", "@babel/parser@^7.16.7": version "7.16.12" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.16.12.tgz#9474794f9a650cf5e2f892444227f98e28cdf8b6" integrity sha512-VfaV15po8RiZssrkPweyvbGVSe4x2y+aciFCgn0n0/SJMR22cwofRV1mtnJQYcSB1wUTaA/X1LnA3es66MCO5A== @@ -411,10 +422,10 @@ resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.17.3.tgz#b07702b982990bf6fdc1da5049a23fece4c5c3d0" integrity sha512-7yJPvPV+ESz2IUTPbOL+YkIGyCqOyNIzdguKQuJGnH7bg1WTIifuM21YqokFt/THWh1AkCRn9IgoykTRCBVpzA== -"@babel/parser@^7.20.7", "@babel/parser@^7.21.4": - version "7.21.4" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.21.4.tgz#94003fdfc520bbe2875d4ae557b43ddb6d880f17" - integrity sha512-alVJj7k7zIxqBZ7BTRhz0IqJFxW1VJbm6N8JbcYhQ186df9ZBPbZBmWSqAMXwHGsCJdYks7z/voa3ibiS5bCIw== +"@babel/parser@^7.22.15", "@babel/parser@^7.23.0": + version "7.23.0" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.23.0.tgz#da950e622420bf96ca0d0f2909cdddac3acd8719" + integrity sha512-vvPKKdMemU85V9WE/l5wZEmImpCtLqbnTvqDS2U1fJ96KrxoW7KrXhNsNCblQlg8Ck4b85yxdTyelsMUgFUXiw== "@babel/parser@^7.9.4": version "7.19.0" @@ -1176,60 +1187,28 @@ "@babel/parser" "^7.16.7" "@babel/types" "^7.16.7" -"@babel/template@^7.20.7": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.20.7.tgz#a15090c2839a83b02aa996c0b4994005841fd5a8" - integrity sha512-8SegXApWe6VoNw0r9JHpSteLKTpTiLZ4rMlGIm9JQ18KiCtyQiAMEazujAHrUS5flrcqYZa75ukev3P6QmUwUw== - dependencies: - "@babel/code-frame" "^7.18.6" - "@babel/parser" "^7.20.7" - "@babel/types" "^7.20.7" - -"@babel/traverse@^7.13.0", "@babel/traverse@^7.16.10", "@babel/traverse@^7.16.7", "@babel/traverse@^7.16.8", "@babel/traverse@^7.7.2": - version "7.16.10" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.16.10.tgz#448f940defbe95b5a8029975b051f75993e8239f" - integrity sha512-yzuaYXoRJBGMlBhsMJoUW7G1UmSb/eXr/JHYM/MsOJgavJibLwASijW7oXBdw3NQ6T0bW7Ty5P/VarOs9cHmqw== - dependencies: - "@babel/code-frame" "^7.16.7" - "@babel/generator" "^7.16.8" - "@babel/helper-environment-visitor" "^7.16.7" - "@babel/helper-function-name" "^7.16.7" - "@babel/helper-hoist-variables" "^7.16.7" - "@babel/helper-split-export-declaration" "^7.16.7" - "@babel/parser" "^7.16.10" - "@babel/types" "^7.16.8" - debug "^4.1.0" - globals "^11.1.0" - -"@babel/traverse@^7.17.0", "@babel/traverse@^7.17.3": - version "7.17.3" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.17.3.tgz#0ae0f15b27d9a92ba1f2263358ea7c4e7db47b57" - integrity sha512-5irClVky7TxRWIRtxlh2WPUUOLhcPN06AGgaQSB8AEwuyEBgJVuJ5imdHm5zxk8w0QS5T+tDfnDxAlhWjpb7cw== - dependencies: - "@babel/code-frame" "^7.16.7" - "@babel/generator" "^7.17.3" - "@babel/helper-environment-visitor" "^7.16.7" - "@babel/helper-function-name" "^7.16.7" - "@babel/helper-hoist-variables" "^7.16.7" - "@babel/helper-split-export-declaration" "^7.16.7" - "@babel/parser" "^7.17.3" - "@babel/types" "^7.17.0" - debug "^4.1.0" - globals "^11.1.0" - -"@babel/traverse@^7.4.5": - version "7.21.4" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.21.4.tgz#a836aca7b116634e97a6ed99976236b3282c9d36" - integrity sha512-eyKrRHKdyZxqDm+fV1iqL9UAHMoIg0nDaGqfIOd8rKH17m5snv7Gn4qgjBoFfLz9APvjFU/ICT00NVCv1Epp8Q== - dependencies: - "@babel/code-frame" "^7.21.4" - "@babel/generator" "^7.21.4" - "@babel/helper-environment-visitor" "^7.18.9" - "@babel/helper-function-name" "^7.21.0" - "@babel/helper-hoist-variables" "^7.18.6" - "@babel/helper-split-export-declaration" "^7.18.6" - "@babel/parser" "^7.21.4" - "@babel/types" "^7.21.4" +"@babel/template@^7.22.15": + version "7.22.15" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.22.15.tgz#09576efc3830f0430f4548ef971dde1350ef2f38" + integrity sha512-QPErUVm4uyJa60rkI73qneDacvdvzxshT3kksGqlGWYdOTIUOwJ7RDUL8sGqslY1uXWSL6xMFKEXDS3ox2uF0w== + dependencies: + "@babel/code-frame" "^7.22.13" + "@babel/parser" "^7.22.15" + "@babel/types" "^7.22.15" + +"@babel/traverse@^7.13.0", "@babel/traverse@^7.16.10", "@babel/traverse@^7.16.7", "@babel/traverse@^7.16.8", "@babel/traverse@^7.17.0", "@babel/traverse@^7.17.3", "@babel/traverse@^7.4.5", "@babel/traverse@^7.7.2": + version "7.23.2" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.23.2.tgz#329c7a06735e144a506bdb2cad0268b7f46f4ad8" + integrity sha512-azpe59SQ48qG6nu2CzcMLbxUudtN+dOM9kDbUqGq3HXUJRlo7i8fvPoxQUzYgLZ4cMVmuZgm8vvBpNeRhd6XSw== + dependencies: + "@babel/code-frame" "^7.22.13" + "@babel/generator" "^7.23.0" + "@babel/helper-environment-visitor" "^7.22.20" + "@babel/helper-function-name" "^7.23.0" + "@babel/helper-hoist-variables" "^7.22.5" + "@babel/helper-split-export-declaration" "^7.22.6" + "@babel/parser" "^7.23.0" + "@babel/types" "^7.23.0" debug "^4.1.0" globals "^11.1.0" @@ -1249,7 +1228,7 @@ "@babel/helper-validator-identifier" "^7.16.7" to-fast-properties "^2.0.0" -"@babel/types@^7.18.6", "@babel/types@^7.20.7", "@babel/types@^7.21.0", "@babel/types@^7.21.4": +"@babel/types@^7.18.6", "@babel/types@^7.21.4": version "7.21.4" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.21.4.tgz#2d5d6bb7908699b3b416409ffd3b5daa25b030d4" integrity sha512-rU2oY501qDxE8Pyo7i/Orqma4ziCOrby0/9mvbDUGEfvZjb279Nk9k19e2fiCxHbRRpY2ZyrgW1eq22mvmOIzA== @@ -1258,6 +1237,15 @@ "@babel/helper-validator-identifier" "^7.19.1" to-fast-properties "^2.0.0" +"@babel/types@^7.22.15", "@babel/types@^7.22.5", "@babel/types@^7.23.0": + version "7.23.0" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.23.0.tgz#8c1f020c9df0e737e4e247c0619f58c68458aaeb" + integrity sha512-0oIyUfKoI3mSqMvsxBdclDwxXKXAUA8v/apZbc+iSyARYou1o8ZGDxbUYyLFoW2arqS2jDGqJuZvv1d/io1axg== + dependencies: + "@babel/helper-string-parser" "^7.22.5" + "@babel/helper-validator-identifier" "^7.22.20" + to-fast-properties "^2.0.0" + "@base2/pretty-print-object@1.0.1": version "1.0.1" resolved "https://registry.yarnpkg.com/@base2/pretty-print-object/-/pretty-print-object-1.0.1.tgz#371ba8be66d556812dc7fb169ebc3c08378f69d4" @@ -3773,7 +3761,7 @@ chalk@4.1.1: ansi-styles "^4.1.0" supports-color "^7.1.0" -chalk@^2.0.0, chalk@^2.4.1: +chalk@^2.0.0, chalk@^2.4.1, chalk@^2.4.2: version "2.4.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== From a75d4170a9544eb0ac529cfb62a8c524925b5f9e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 21 Jan 2024 22:56:14 -0500 Subject: [PATCH 011/122] chore: Bump zod from 3.19.1 to 3.22.3 in /ui (#3817) Bumps [zod](https://github.com/colinhacks/zod) from 3.19.1 to 3.22.3. - [Release notes](https://github.com/colinhacks/zod/releases) - [Changelog](https://github.com/colinhacks/zod/blob/master/CHANGELOG.md) - [Commits](https://github.com/colinhacks/zod/compare/v3.19.1...v3.22.3) --- updated-dependencies: - dependency-name: zod dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- ui/yarn.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ui/yarn.lock b/ui/yarn.lock index 02c9fd130f..becb6bbd7b 100644 --- a/ui/yarn.lock +++ b/ui/yarn.lock @@ -11709,9 +11709,9 @@ yocto-queue@^0.1.0: integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== zod@^3.11.6: - version "3.19.1" - resolved "https://registry.yarnpkg.com/zod/-/zod-3.19.1.tgz#112f074a97b50bfc4772d4ad1576814bd8ac4473" - integrity sha512-LYjZsEDhCdYET9ikFu6dVPGp2YH9DegXjdJToSzD9rO6fy4qiRYFoyEYwps88OseJlPyl2NOe2iJuhEhL7IpEA== + version "3.22.3" + resolved "https://registry.yarnpkg.com/zod/-/zod-3.22.3.tgz#2fbc96118b174290d94e8896371c95629e87a060" + integrity sha512-EjIevzuJRiRPbVH4mGc8nApb/lVLKVpmUhAaR5R5doKGfAnGJ6Gr3CViAVjP+4FWSxCsybeWQdcgCtbX+7oZug== zwitch@^1.0.0: version "1.0.5" From 86d62215f2338ea9d48c6e723e907c82cbe5500b Mon Sep 17 00:00:00 2001 From: Shuchu Han Date: Wed, 24 Jan 2024 11:06:54 -0500 Subject: [PATCH 012/122] fix: Correct the returning class proto type of StreamFeatureView to StreamFeatureViewProto instead of FeatureViewProto. (#3843) --- sdk/python/feast/feature_view.py | 3 ++- sdk/python/feast/stream_feature_view.py | 7 ++++++- sdk/python/tests/unit/test_feature_views.py | 22 +++++++++++++++++++++ 3 files changed, 30 insertions(+), 2 deletions(-) diff --git a/sdk/python/feast/feature_view.py b/sdk/python/feast/feature_view.py index e26759ba92..67f9662d31 100644 --- a/sdk/python/feast/feature_view.py +++ b/sdk/python/feast/feature_view.py @@ -17,6 +17,7 @@ from typing import Dict, List, Optional, Tuple, Type from google.protobuf.duration_pb2 import Duration +from google.protobuf.message import Message from typeguard import typechecked from feast import utils @@ -274,7 +275,7 @@ def ensure_valid(self): raise ValueError("Feature view has no entities.") @property - def proto_class(self) -> Type[FeatureViewProto]: + def proto_class(self) -> Type[Message]: return FeatureViewProto def with_join_key_map(self, join_key_map: Dict[str, str]): diff --git a/sdk/python/feast/stream_feature_view.py b/sdk/python/feast/stream_feature_view.py index d3a2164788..6a204d6813 100644 --- a/sdk/python/feast/stream_feature_view.py +++ b/sdk/python/feast/stream_feature_view.py @@ -3,9 +3,10 @@ import warnings from datetime import datetime, timedelta from types import FunctionType -from typing import Dict, List, Optional, Tuple, Union +from typing import Dict, List, Optional, Tuple, Type, Union import dill +from google.protobuf.message import Message from typeguard import typechecked from feast import flags_helper, utils @@ -298,6 +299,10 @@ def __copy__(self): fv.projection = copy.copy(self.projection) return fv + @property + def proto_class(self) -> Type[Message]: + return StreamFeatureViewProto + def stream_feature_view( *, diff --git a/sdk/python/tests/unit/test_feature_views.py b/sdk/python/tests/unit/test_feature_views.py index 379396e5c6..afef332d37 100644 --- a/sdk/python/tests/unit/test_feature_views.py +++ b/sdk/python/tests/unit/test_feature_views.py @@ -10,6 +10,9 @@ from feast.feature_view import FeatureView from feast.field import Field from feast.infra.offline_stores.file_source import FileSource +from feast.protos.feast.core.StreamFeatureView_pb2 import ( + StreamFeatureView as StreamFeatureViewProto, +) from feast.protos.feast.types.Value_pb2 import ValueType from feast.stream_feature_view import StreamFeatureView, stream_feature_view from feast.types import Float32 @@ -277,3 +280,22 @@ def test_hash(): def test_field_types(): with pytest.raises(TypeError): Field(name="name", dtype=ValueType.INT32) + + +def test_stream_feature_view_proto_type(): + stream_source = KafkaSource( + name="kafka", + timestamp_field="event_timestamp", + kafka_bootstrap_servers="", + message_format=AvroFormat(""), + topic="topic", + batch_source=FileSource(path="some path"), + ) + sfv = StreamFeatureView( + name="test stream featureview proto class", + entities=[], + ttl=timedelta(days=30), + source=stream_source, + aggregations=[], + ) + assert sfv.proto_class is StreamFeatureViewProto From 9a3590ea771ca3c3224f5e1a833453144e54284e Mon Sep 17 00:00:00 2001 From: Shuchu Han Date: Wed, 24 Jan 2024 16:16:15 -0500 Subject: [PATCH 013/122] fix: Verify the existence of Registry tables in snowflake before calling CREATE sql command. Allow read-only user to call feast apply. (#3851) Signed-off-by: Shuchu Han --- sdk/python/feast/infra/registry/snowflake.py | 71 ++++++++++-- .../registry/snowflake_registry_table.py | 104 ++++++++++++++++++ .../infra/utils/snowflake/snowflake_utils.py | 10 +- 3 files changed, 171 insertions(+), 14 deletions(-) create mode 100644 sdk/python/feast/infra/utils/snowflake/registry/snowflake_registry_table.py diff --git a/sdk/python/feast/infra/registry/snowflake.py b/sdk/python/feast/infra/registry/snowflake.py index 56c7bc1f65..40ec27e7d9 100644 --- a/sdk/python/feast/infra/registry/snowflake.py +++ b/sdk/python/feast/infra/registry/snowflake.py @@ -124,15 +124,19 @@ def __init__( f'"{self.registry_config.database}"."{self.registry_config.schema_}"' ) - with GetSnowflakeConnection(self.registry_config) as conn: - sql_function_file = f"{os.path.dirname(feast.__file__)}/infra/utils/snowflake/registry/snowflake_table_creation.sql" - with open(sql_function_file, "r") as file: - sqlFile = file.read() - - sqlCommands = sqlFile.split(";") - for command in sqlCommands: - query = command.replace("REGISTRY_PATH", f"{self.registry_path}") - execute_snowflake_statement(conn, query) + if not self._verify_registry_database(): + # Verify the existing resitry database schema from snowflake. If any table names and column types is wrong, run table recreation SQL. + with GetSnowflakeConnection(self.registry_config) as conn: + sql_function_file = f"{os.path.dirname(feast.__file__)}/infra/utils/snowflake/registry/snowflake_table_creation.sql" + with open(sql_function_file, "r") as file: + sqlFile = file.read() + + sqlCommands = sqlFile.split(";") + for command in sqlCommands: + query = command.replace( + "REGISTRY_PATH", f"{self.registry_path}" + ) + execute_snowflake_statement(conn, query) self.cached_registry_proto = self.proto() proto_registry_utils.init_project_metadata(self.cached_registry_proto, project) @@ -145,6 +149,55 @@ def __init__( ) self.project = project + def _verify_registry_database( + self, + ) -> bool: + """Verify the records in registry database. To check: + 1, the 11 tables are existed. + 2, the column types are correct. + + Example return from snowflake's cursor.describe("SELECT * FROM a_table") command: + [ResultMetadata(name='ENTITY_NAME', type_code=2, display_size=None, internal_size=16777216, precision=None, scale=None, is_nullable=False), + ResultMetadata(name='PROJECT_ID', type_code=2, display_size=None, internal_size=16777216, precision=None, scale=None, is_nullable=False), + ResultMetadata(name='LAST_UPDATED_TIMESTAMP', type_code=6, display_size=None, internal_size=None, precision=0, scale=9, is_nullable=False), + ResultMetadata(name='ENTITY_PROTO', type_code=11, display_size=None, internal_size=8388608, precision=None, scale=None, is_nullable=False)] + + Returns: + True if the necessary 11 tables are existed in Snowflake and schema of each table is correct. + False if failure happens. + """ + + from feast.infra.utils.snowflake.registry.snowflake_registry_table import ( + snowflake_registry_table_names_and_column_types as expect_tables, + ) + + res = True + + try: + with GetSnowflakeConnection(self.registry_config) as conn: + for table_name in expect_tables: + result_metadata_list = conn.cursor().describe( + f"SELECT * FROM {table_name}" + ) + for col in result_metadata_list: + if ( + expect_tables[table_name][col.name]["type_code"] + != col.type_code + ): + res = False + break + except Exception as e: + res = False # Set to False for all errors. + logger.debug( + f"Failed to verify Registry tables and columns types with exception: {e}." + ) + finally: + # The implementation in snowflake_utils.py will cache the established connection without re-connection logic. + # conn.close() + pass + + return res + def refresh(self, project: Optional[str] = None): if project: project_metadata = proto_registry_utils.get_project_metadata( diff --git a/sdk/python/feast/infra/utils/snowflake/registry/snowflake_registry_table.py b/sdk/python/feast/infra/utils/snowflake/registry/snowflake_registry_table.py new file mode 100644 index 0000000000..d24fbc27ec --- /dev/null +++ b/sdk/python/feast/infra/utils/snowflake/registry/snowflake_registry_table.py @@ -0,0 +1,104 @@ +# -*- coding: utf-8 -*- + +""" +The table names and column types are following the creation detail listed +in "snowflake_table_creation.sql". + +Snowflake Reference: +1, ResultMetadata: https://docs.snowflake.com/en/developer-guide/python-connector/python-connector-api#label-python-connector-resultmetadata-object +2, Type Codes: https://docs.snowflake.com/en/developer-guide/python-connector/python-connector-api#label-python-connector-type-codes +---------------------------------------------- +type_code String Representation Data Type +0 FIXED NUMBER/INT +1 REAL REAL +2 TEXT VARCHAR/STRING +3 DATE DATE +4 TIMESTAMP TIMESTAMP +5 VARIANT VARIANT +6 TIMESTAMP_LTZ TIMESTAMP_LTZ +7 TIMESTAMP_TZ TIMESTAMP_TZ +8 TIMESTAMP_NTZ TIMESTAMP_TZ +9 OBJECT OBJECT +10 ARRAY ARRAY +11 BINARY BINARY +12 TIME TIME +13 BOOLEAN BOOLEAN +---------------------------------------------- + +(last update: 2023-11-30) + +""" + +snowflake_registry_table_names_and_column_types = { + "DATA_SOURCES": { + "DATA_SOURCE_NAME": {"type_code": 2, "type": "VARCHAR"}, + "PROJECT_ID": {"type_code": 2, "type": "VARCHAR"}, + "LAST_UPDATED_TIMESTAMP": {"type_code": 6, "type": "TIMESTAMP_LTZ"}, + "DATA_SOURCE_PROTO": {"type_code": 11, "type": "BINARY"}, + }, + "ENTITIES": { + "ENTITY_NAME": {"type_code": 2, "type": "VARCHAR"}, + "PROJECT_ID": {"type_code": 2, "type": "VARCHAR"}, + "LAST_UPDATED_TIMESTAMP": {"type_code": 6, "type": "TIMESTAMP_LTZ"}, + "ENTITY_PROTO": {"type_code": 11, "type": "BINARY"}, + }, + "FEAST_METADATA": { + "PROJECT_ID": {"type_code": 2, "type": "VARCHAR"}, + "METADATA_KEY": {"type_code": 2, "type": "VARCHAR"}, + "METADATA_VALUE": {"type_code": 2, "type": "VARCHAR"}, + "LAST_UPDATED_TIMESTAMP": {"type_code": 6, "type": "TIMESTAMP_LTZ"}, + }, + "FEATURE_SERVICES": { + "FEATURE_SERVICE_NAME": {"type_code": 2, "type": "VARCHAR"}, + "PROJECT_ID": {"type_code": 2, "type": "VARCHAR"}, + "LAST_UPDATED_TIMESTAMP": {"type_code": 6, "type": "TIMESTAMP_LTZ"}, + "FEATURE_SERVICE_PROTO": {"type_code": 11, "type": "BINARY"}, + }, + "FEATURE_VIEWS": { + "FEATURE_VIEW_NAME": {"type_code": 2, "type": "VARCHAR"}, + "PROJECT_ID": {"type_code": 2, "type": "VARCHAR"}, + "LAST_UPDATED_TIMESTAMP": {"type_code": 6, "type": "TIMESTAMP_LTZ"}, + "FEATURE_VIEW_PROTO": {"type_code": 11, "type": "BINARY"}, + "MATERIALIZED_INTERVALS": {"type_code": 11, "type": "BINARY"}, + "USER_METADATA": {"type_code": 11, "type": "BINARY"}, + }, + "MANAGED_INFRA": { + "INFRA_NAME": {"type_code": 2, "type": "VARCHAR"}, + "PROJECT_ID": {"type_code": 2, "type": "VARCHAR"}, + "LAST_UPDATED_TIMESTAMP": {"type_code": 6, "type": "TIMESTAMP_LTZ"}, + "INFRA_PROTO": {"type_code": 11, "type": "BINARY"}, + }, + "ON_DEMAND_FEATURE_VIEWS": { + "ON_DEMAND_FEATURE_VIEW_NAME": {"type_code": 2, "type": "VARCHAR"}, + "PROJECT_ID": {"type_code": 2, "type": "VARCHAR"}, + "LAST_UPDATED_TIMESTAMP": {"type_code": 6, "type": "TIMESTAMP_LTZ"}, + "ON_DEMAND_FEATURE_VIEW_PROTO": {"type_code": 11, "type": "BINARY"}, + "USER_METADATA": {"type_code": 11, "type": "BINARY"}, + }, + "REQUEST_FEATURE_VIEWS": { + "REQUEST_FEATURE_VIEW_NAME": {"type_code": 2, "type": "VARCHAR"}, + "PROJECT_ID": {"type_code": 2, "type": "VARCHAR"}, + "LAST_UPDATED_TIMESTAMP": {"type_code": 6, "type": "TIMESTAMP_LTZ"}, + "REQUEST_FEATURE_VIEW_PROTO": {"type_code": 11, "type": "BINARY"}, + "USER_METADATA": {"type_code": 11, "type": "BINARY"}, + }, + "SAVED_DATASETS": { + "SAVED_DATASET_NAME": {"type_code": 2, "type": "VARCHAR"}, + "PROJECT_ID": {"type_code": 2, "type": "VARCHAR"}, + "LAST_UPDATED_TIMESTAMP": {"type_code": 6, "type": "TIMESTAMP_LTZ"}, + "SAVED_DATASET_PROTO": {"type_code": 11, "type": "BINARY"}, + }, + "STREAM_FEATURE_VIEWS": { + "STREAM_FEATURE_VIEW_NAME": {"type_code": 2, "type": "VARCHAR"}, + "PROJECT_ID": {"type_code": 2, "type": "VARCHAR"}, + "LAST_UPDATED_TIMESTAMP": {"type_code": 6, "type": "TIMESTAMP_LTZ"}, + "STREAM_FEATURE_VIEW_PROTO": {"type_code": 11, "type": "BINARY"}, + "USER_METADATA": {"type_code": 11, "type": "BINARY"}, + }, + "VALIDATION_REFERENCES": { + "VALIDATION_REFERENCE_NAME": {"type_code": 2, "type": "VARCHAR"}, + "PROJECT_ID": {"type_code": 2, "type": "VARCHAR"}, + "LAST_UPDATED_TIMESTAMP": {"type_code": 6, "type": "TIMESTAMP_LTZ"}, + "VALIDATION_REFERENCE_PROTO": {"type_code": 11, "type": "BINARY"}, + }, +} diff --git a/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py b/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py index a4cda89a6f..3a56619bdb 100644 --- a/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py +++ b/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py @@ -49,19 +49,19 @@ def __init__(self, config: str, autocommit=True): def __enter__(self): - assert self.config.type in [ + assert self.config.type in { "snowflake.registry", "snowflake.offline", "snowflake.engine", "snowflake.online", - ] + } if self.config.type not in _cache: if self.config.type == "snowflake.registry": config_header = "connections.feast_registry" elif self.config.type == "snowflake.offline": config_header = "connections.feast_offline_store" - if self.config.type == "snowflake.engine": + elif self.config.type == "snowflake.engine": config_header = "connections.feast_batch_engine" elif self.config.type == "snowflake.online": config_header = "connections.feast_online_store" @@ -113,11 +113,11 @@ def __exit__(self, exc_type, exc_val, exc_tb): def assert_snowflake_feature_names(feature_view: FeatureView) -> None: for feature in feature_view.features: - assert feature.name not in [ + assert feature.name not in { "entity_key", "feature_name", "feature_value", - ], f"Feature Name: {feature.name} is a protected name to ensure query stability" + }, f"Feature Name: {feature.name} is a protected name to ensure query stability" return None From c0d358a49d5f576bb9f1017d1ee0db2d6cd5f1a5 Mon Sep 17 00:00:00 2001 From: Edson Tirelli Date: Thu, 25 Jan 2024 15:25:19 -0500 Subject: [PATCH 014/122] =?UTF-8?q?revert:=20Verify=20the=20existence=20of?= =?UTF-8?q?=20Registry=20tables=20in=20snowflake=E2=80=A6=20(#3907)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Revert "fix: Verify the existence of Registry tables in snowflake before calling CREATE sql command. Allow read-only user to call feast apply. (#3851)" This reverts commit 9a3590ea771ca3c3224f5e1a833453144e54284e. Signed-off-by: Edson Tirelli --- sdk/python/feast/infra/registry/snowflake.py | 71 ++---------- .../registry/snowflake_registry_table.py | 104 ------------------ .../infra/utils/snowflake/snowflake_utils.py | 10 +- 3 files changed, 14 insertions(+), 171 deletions(-) delete mode 100644 sdk/python/feast/infra/utils/snowflake/registry/snowflake_registry_table.py diff --git a/sdk/python/feast/infra/registry/snowflake.py b/sdk/python/feast/infra/registry/snowflake.py index 40ec27e7d9..56c7bc1f65 100644 --- a/sdk/python/feast/infra/registry/snowflake.py +++ b/sdk/python/feast/infra/registry/snowflake.py @@ -124,19 +124,15 @@ def __init__( f'"{self.registry_config.database}"."{self.registry_config.schema_}"' ) - if not self._verify_registry_database(): - # Verify the existing resitry database schema from snowflake. If any table names and column types is wrong, run table recreation SQL. - with GetSnowflakeConnection(self.registry_config) as conn: - sql_function_file = f"{os.path.dirname(feast.__file__)}/infra/utils/snowflake/registry/snowflake_table_creation.sql" - with open(sql_function_file, "r") as file: - sqlFile = file.read() - - sqlCommands = sqlFile.split(";") - for command in sqlCommands: - query = command.replace( - "REGISTRY_PATH", f"{self.registry_path}" - ) - execute_snowflake_statement(conn, query) + with GetSnowflakeConnection(self.registry_config) as conn: + sql_function_file = f"{os.path.dirname(feast.__file__)}/infra/utils/snowflake/registry/snowflake_table_creation.sql" + with open(sql_function_file, "r") as file: + sqlFile = file.read() + + sqlCommands = sqlFile.split(";") + for command in sqlCommands: + query = command.replace("REGISTRY_PATH", f"{self.registry_path}") + execute_snowflake_statement(conn, query) self.cached_registry_proto = self.proto() proto_registry_utils.init_project_metadata(self.cached_registry_proto, project) @@ -149,55 +145,6 @@ def __init__( ) self.project = project - def _verify_registry_database( - self, - ) -> bool: - """Verify the records in registry database. To check: - 1, the 11 tables are existed. - 2, the column types are correct. - - Example return from snowflake's cursor.describe("SELECT * FROM a_table") command: - [ResultMetadata(name='ENTITY_NAME', type_code=2, display_size=None, internal_size=16777216, precision=None, scale=None, is_nullable=False), - ResultMetadata(name='PROJECT_ID', type_code=2, display_size=None, internal_size=16777216, precision=None, scale=None, is_nullable=False), - ResultMetadata(name='LAST_UPDATED_TIMESTAMP', type_code=6, display_size=None, internal_size=None, precision=0, scale=9, is_nullable=False), - ResultMetadata(name='ENTITY_PROTO', type_code=11, display_size=None, internal_size=8388608, precision=None, scale=None, is_nullable=False)] - - Returns: - True if the necessary 11 tables are existed in Snowflake and schema of each table is correct. - False if failure happens. - """ - - from feast.infra.utils.snowflake.registry.snowflake_registry_table import ( - snowflake_registry_table_names_and_column_types as expect_tables, - ) - - res = True - - try: - with GetSnowflakeConnection(self.registry_config) as conn: - for table_name in expect_tables: - result_metadata_list = conn.cursor().describe( - f"SELECT * FROM {table_name}" - ) - for col in result_metadata_list: - if ( - expect_tables[table_name][col.name]["type_code"] - != col.type_code - ): - res = False - break - except Exception as e: - res = False # Set to False for all errors. - logger.debug( - f"Failed to verify Registry tables and columns types with exception: {e}." - ) - finally: - # The implementation in snowflake_utils.py will cache the established connection without re-connection logic. - # conn.close() - pass - - return res - def refresh(self, project: Optional[str] = None): if project: project_metadata = proto_registry_utils.get_project_metadata( diff --git a/sdk/python/feast/infra/utils/snowflake/registry/snowflake_registry_table.py b/sdk/python/feast/infra/utils/snowflake/registry/snowflake_registry_table.py deleted file mode 100644 index d24fbc27ec..0000000000 --- a/sdk/python/feast/infra/utils/snowflake/registry/snowflake_registry_table.py +++ /dev/null @@ -1,104 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -The table names and column types are following the creation detail listed -in "snowflake_table_creation.sql". - -Snowflake Reference: -1, ResultMetadata: https://docs.snowflake.com/en/developer-guide/python-connector/python-connector-api#label-python-connector-resultmetadata-object -2, Type Codes: https://docs.snowflake.com/en/developer-guide/python-connector/python-connector-api#label-python-connector-type-codes ----------------------------------------------- -type_code String Representation Data Type -0 FIXED NUMBER/INT -1 REAL REAL -2 TEXT VARCHAR/STRING -3 DATE DATE -4 TIMESTAMP TIMESTAMP -5 VARIANT VARIANT -6 TIMESTAMP_LTZ TIMESTAMP_LTZ -7 TIMESTAMP_TZ TIMESTAMP_TZ -8 TIMESTAMP_NTZ TIMESTAMP_TZ -9 OBJECT OBJECT -10 ARRAY ARRAY -11 BINARY BINARY -12 TIME TIME -13 BOOLEAN BOOLEAN ----------------------------------------------- - -(last update: 2023-11-30) - -""" - -snowflake_registry_table_names_and_column_types = { - "DATA_SOURCES": { - "DATA_SOURCE_NAME": {"type_code": 2, "type": "VARCHAR"}, - "PROJECT_ID": {"type_code": 2, "type": "VARCHAR"}, - "LAST_UPDATED_TIMESTAMP": {"type_code": 6, "type": "TIMESTAMP_LTZ"}, - "DATA_SOURCE_PROTO": {"type_code": 11, "type": "BINARY"}, - }, - "ENTITIES": { - "ENTITY_NAME": {"type_code": 2, "type": "VARCHAR"}, - "PROJECT_ID": {"type_code": 2, "type": "VARCHAR"}, - "LAST_UPDATED_TIMESTAMP": {"type_code": 6, "type": "TIMESTAMP_LTZ"}, - "ENTITY_PROTO": {"type_code": 11, "type": "BINARY"}, - }, - "FEAST_METADATA": { - "PROJECT_ID": {"type_code": 2, "type": "VARCHAR"}, - "METADATA_KEY": {"type_code": 2, "type": "VARCHAR"}, - "METADATA_VALUE": {"type_code": 2, "type": "VARCHAR"}, - "LAST_UPDATED_TIMESTAMP": {"type_code": 6, "type": "TIMESTAMP_LTZ"}, - }, - "FEATURE_SERVICES": { - "FEATURE_SERVICE_NAME": {"type_code": 2, "type": "VARCHAR"}, - "PROJECT_ID": {"type_code": 2, "type": "VARCHAR"}, - "LAST_UPDATED_TIMESTAMP": {"type_code": 6, "type": "TIMESTAMP_LTZ"}, - "FEATURE_SERVICE_PROTO": {"type_code": 11, "type": "BINARY"}, - }, - "FEATURE_VIEWS": { - "FEATURE_VIEW_NAME": {"type_code": 2, "type": "VARCHAR"}, - "PROJECT_ID": {"type_code": 2, "type": "VARCHAR"}, - "LAST_UPDATED_TIMESTAMP": {"type_code": 6, "type": "TIMESTAMP_LTZ"}, - "FEATURE_VIEW_PROTO": {"type_code": 11, "type": "BINARY"}, - "MATERIALIZED_INTERVALS": {"type_code": 11, "type": "BINARY"}, - "USER_METADATA": {"type_code": 11, "type": "BINARY"}, - }, - "MANAGED_INFRA": { - "INFRA_NAME": {"type_code": 2, "type": "VARCHAR"}, - "PROJECT_ID": {"type_code": 2, "type": "VARCHAR"}, - "LAST_UPDATED_TIMESTAMP": {"type_code": 6, "type": "TIMESTAMP_LTZ"}, - "INFRA_PROTO": {"type_code": 11, "type": "BINARY"}, - }, - "ON_DEMAND_FEATURE_VIEWS": { - "ON_DEMAND_FEATURE_VIEW_NAME": {"type_code": 2, "type": "VARCHAR"}, - "PROJECT_ID": {"type_code": 2, "type": "VARCHAR"}, - "LAST_UPDATED_TIMESTAMP": {"type_code": 6, "type": "TIMESTAMP_LTZ"}, - "ON_DEMAND_FEATURE_VIEW_PROTO": {"type_code": 11, "type": "BINARY"}, - "USER_METADATA": {"type_code": 11, "type": "BINARY"}, - }, - "REQUEST_FEATURE_VIEWS": { - "REQUEST_FEATURE_VIEW_NAME": {"type_code": 2, "type": "VARCHAR"}, - "PROJECT_ID": {"type_code": 2, "type": "VARCHAR"}, - "LAST_UPDATED_TIMESTAMP": {"type_code": 6, "type": "TIMESTAMP_LTZ"}, - "REQUEST_FEATURE_VIEW_PROTO": {"type_code": 11, "type": "BINARY"}, - "USER_METADATA": {"type_code": 11, "type": "BINARY"}, - }, - "SAVED_DATASETS": { - "SAVED_DATASET_NAME": {"type_code": 2, "type": "VARCHAR"}, - "PROJECT_ID": {"type_code": 2, "type": "VARCHAR"}, - "LAST_UPDATED_TIMESTAMP": {"type_code": 6, "type": "TIMESTAMP_LTZ"}, - "SAVED_DATASET_PROTO": {"type_code": 11, "type": "BINARY"}, - }, - "STREAM_FEATURE_VIEWS": { - "STREAM_FEATURE_VIEW_NAME": {"type_code": 2, "type": "VARCHAR"}, - "PROJECT_ID": {"type_code": 2, "type": "VARCHAR"}, - "LAST_UPDATED_TIMESTAMP": {"type_code": 6, "type": "TIMESTAMP_LTZ"}, - "STREAM_FEATURE_VIEW_PROTO": {"type_code": 11, "type": "BINARY"}, - "USER_METADATA": {"type_code": 11, "type": "BINARY"}, - }, - "VALIDATION_REFERENCES": { - "VALIDATION_REFERENCE_NAME": {"type_code": 2, "type": "VARCHAR"}, - "PROJECT_ID": {"type_code": 2, "type": "VARCHAR"}, - "LAST_UPDATED_TIMESTAMP": {"type_code": 6, "type": "TIMESTAMP_LTZ"}, - "VALIDATION_REFERENCE_PROTO": {"type_code": 11, "type": "BINARY"}, - }, -} diff --git a/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py b/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py index 3a56619bdb..a4cda89a6f 100644 --- a/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py +++ b/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py @@ -49,19 +49,19 @@ def __init__(self, config: str, autocommit=True): def __enter__(self): - assert self.config.type in { + assert self.config.type in [ "snowflake.registry", "snowflake.offline", "snowflake.engine", "snowflake.online", - } + ] if self.config.type not in _cache: if self.config.type == "snowflake.registry": config_header = "connections.feast_registry" elif self.config.type == "snowflake.offline": config_header = "connections.feast_offline_store" - elif self.config.type == "snowflake.engine": + if self.config.type == "snowflake.engine": config_header = "connections.feast_batch_engine" elif self.config.type == "snowflake.online": config_header = "connections.feast_online_store" @@ -113,11 +113,11 @@ def __exit__(self, exc_type, exc_val, exc_tb): def assert_snowflake_feature_names(feature_view: FeatureView) -> None: for feature in feature_view.features: - assert feature.name not in { + assert feature.name not in [ "entity_key", "feature_name", "feature_value", - }, f"Feature Name: {feature.name} is a protected name to ensure query stability" + ], f"Feature Name: {feature.name} is a protected name to ensure query stability" return None From 8d6bec8fc47986c84f366ce3edfe7d03fa6b2e9f Mon Sep 17 00:00:00 2001 From: John Lemmon <137814163+JohnLemmonMedely@users.noreply.github.com> Date: Thu, 25 Jan 2024 14:27:04 -0600 Subject: [PATCH 015/122] feat: Add support for arrays in snowflake (#3769) Adds support for arrays in snowflake Signed-off-by: john.lemmon --- .../feast/infra/offline_stores/snowflake.py | 31 ++++ .../infra/offline_stores/snowflake_source.py | 6 +- .../snowflake_python_udfs_creation.sql | 56 ++++++ .../snowflake/snowpark/snowflake_udfs.py | 175 ++++++++++++++++++ sdk/python/feast/type_map.py | 8 + sdk/python/tests/data/data_creator.py | 1 + .../feature_repos/repo_configuration.py | 4 +- .../universal/data_sources/snowflake.py | 4 +- .../materialization/test_snowflake.py | 84 +++++++-- 9 files changed, 350 insertions(+), 19 deletions(-) diff --git a/sdk/python/feast/infra/offline_stores/snowflake.py b/sdk/python/feast/infra/offline_stores/snowflake.py index 38568ce79b..4f11b1ac42 100644 --- a/sdk/python/feast/infra/offline_stores/snowflake.py +++ b/sdk/python/feast/infra/offline_stores/snowflake.py @@ -1,4 +1,5 @@ import contextlib +import json import os import uuid import warnings @@ -51,6 +52,17 @@ ) from feast.repo_config import FeastConfigBaseModel, RepoConfig from feast.saved_dataset import SavedDatasetStorage +from feast.types import ( + Array, + Bool, + Bytes, + Float32, + Float64, + Int32, + Int64, + String, + UnixTimestamp, +) from feast.usage import log_exceptions_and_usage try: @@ -320,6 +332,7 @@ def query_generator() -> Iterator[str]: on_demand_feature_views=OnDemandFeatureView.get_requested_odfvs( feature_refs, project, registry ), + feature_views=feature_views, metadata=RetrievalMetadata( features=feature_refs, keys=list(entity_schema.keys() - {entity_df_event_timestamp_col}), @@ -398,9 +411,12 @@ def __init__( config: RepoConfig, full_feature_names: bool, on_demand_feature_views: Optional[List[OnDemandFeatureView]] = None, + feature_views: Optional[List[FeatureView]] = None, metadata: Optional[RetrievalMetadata] = None, ): + if feature_views is None: + feature_views = [] if not isinstance(query, str): self._query_generator = query else: @@ -416,6 +432,7 @@ def query_generator() -> Iterator[str]: self.config = config self._full_feature_names = full_feature_names self._on_demand_feature_views = on_demand_feature_views or [] + self._feature_views = feature_views self._metadata = metadata self.export_path: Optional[str] if self.config.offline_store.blob_export_location: @@ -436,6 +453,20 @@ def _to_df_internal(self, timeout: Optional[int] = None) -> pd.DataFrame: self.snowflake_conn, self.to_sql() ).fetch_pandas_all() + for feature_view in self._feature_views: + for feature in feature_view.features: + if feature.dtype in [ + Array(String), + Array(Bytes), + Array(Int32), + Array(Int64), + Array(UnixTimestamp), + Array(Float64), + Array(Float32), + Array(Bool), + ]: + df[feature.name] = [json.loads(x) for x in df[feature.name]] + return df def _to_arrow_internal(self, timeout: Optional[int] = None) -> pyarrow.Table: diff --git a/sdk/python/feast/infra/offline_stores/snowflake_source.py b/sdk/python/feast/infra/offline_stores/snowflake_source.py index 95bd46f1ec..0cbf82dd1c 100644 --- a/sdk/python/feast/infra/offline_stores/snowflake_source.py +++ b/sdk/python/feast/infra/offline_stores/snowflake_source.py @@ -279,12 +279,12 @@ def get_table_column_names_and_types( else: row["snowflake_type"] = "NUMBERwSCALE" - elif row["type_code"] in [5, 9, 10, 12]: + elif row["type_code"] in [5, 9, 12]: error = snowflake_unsupported_map[row["type_code"]] raise NotImplementedError( f"The following Snowflake Data Type is not supported: {error}" ) - elif row["type_code"] in [1, 2, 3, 4, 6, 7, 8, 11, 13]: + elif row["type_code"] in [1, 2, 3, 4, 6, 7, 8, 10, 11, 13]: row["snowflake_type"] = snowflake_type_code_map[row["type_code"]] else: raise NotImplementedError( @@ -305,6 +305,7 @@ def get_table_column_names_and_types( 6: "TIMESTAMP_LTZ", 7: "TIMESTAMP_TZ", 8: "TIMESTAMP_NTZ", + 10: "ARRAY", 11: "BINARY", 13: "BOOLEAN", } @@ -312,7 +313,6 @@ def get_table_column_names_and_types( snowflake_unsupported_map = { 5: "VARIANT -- Try converting to VARCHAR", 9: "OBJECT -- Try converting to VARCHAR", - 10: "ARRAY -- Try converting to VARCHAR", 12: "TIME -- Try converting to VARCHAR", } diff --git a/sdk/python/feast/infra/utils/snowflake/snowpark/snowflake_python_udfs_creation.sql b/sdk/python/feast/infra/utils/snowflake/snowpark/snowflake_python_udfs_creation.sql index a197a3ee4c..a444c0b7c5 100644 --- a/sdk/python/feast/infra/utils/snowflake/snowpark/snowflake_python_udfs_creation.sql +++ b/sdk/python/feast/infra/utils/snowflake/snowpark/snowflake_python_udfs_creation.sql @@ -14,6 +14,62 @@ CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_varchar_to_string_pro HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_varchar_to_string_proto' IMPORTS = ('@STAGE_HOLDER/feast.zip'); +CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_array_bytes_to_list_bytes_proto(df ARRAY) + RETURNS BINARY + LANGUAGE PYTHON + RUNTIME_VERSION = '3.8' + PACKAGES = ('protobuf', 'pandas') + HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_bytes_to_list_bytes_proto' + IMPORTS = ('@STAGE_HOLDER/feast.zip'); + +CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_array_varchar_to_list_string_proto(df ARRAY) + RETURNS BINARY + LANGUAGE PYTHON + RUNTIME_VERSION = '3.8' + PACKAGES = ('protobuf', 'pandas') + HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_varchar_to_list_string_proto' + IMPORTS = ('@STAGE_HOLDER/feast.zip'); + +CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_array_number_to_list_int32_proto(df ARRAY) + RETURNS BINARY + LANGUAGE PYTHON + RUNTIME_VERSION = '3.8' + PACKAGES = ('protobuf', 'pandas') + HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_number_to_list_int32_proto' + IMPORTS = ('@STAGE_HOLDER/feast.zip'); + +CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_array_number_to_list_int64_proto(df ARRAY) + RETURNS BINARY + LANGUAGE PYTHON + RUNTIME_VERSION = '3.8' + PACKAGES = ('protobuf', 'pandas') + HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_number_to_list_int64_proto' + IMPORTS = ('@STAGE_HOLDER/feast.zip'); + +CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_array_float_to_list_double_proto(df ARRAY) + RETURNS BINARY + LANGUAGE PYTHON + RUNTIME_VERSION = '3.8' + PACKAGES = ('protobuf', 'pandas') + HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_float_to_list_double_proto' + IMPORTS = ('@STAGE_HOLDER/feast.zip'); + +CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_array_boolean_to_list_bool_proto(df ARRAY) + RETURNS BINARY + LANGUAGE PYTHON + RUNTIME_VERSION = '3.8' + PACKAGES = ('protobuf', 'pandas') + HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_boolean_to_list_bool_proto' + IMPORTS = ('@STAGE_HOLDER/feast.zip'); + +CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_array_timestamp_to_list_unix_timestamp_proto(df ARRAY) + RETURNS BINARY + LANGUAGE PYTHON + RUNTIME_VERSION = '3.8' + PACKAGES = ('protobuf', 'pandas') + HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_timestamp_to_list_unix_timestamp_proto' + IMPORTS = ('@STAGE_HOLDER/feast.zip'); + CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_number_to_int32_proto(df NUMBER) RETURNS BINARY LANGUAGE PYTHON diff --git a/sdk/python/feast/infra/utils/snowflake/snowpark/snowflake_udfs.py b/sdk/python/feast/infra/utils/snowflake/snowpark/snowflake_udfs.py index 02311ca55d..f5d5f10631 100644 --- a/sdk/python/feast/infra/utils/snowflake/snowpark/snowflake_udfs.py +++ b/sdk/python/feast/infra/utils/snowflake/snowpark/snowflake_udfs.py @@ -1,6 +1,7 @@ import sys from binascii import unhexlify +import numpy as np import pandas from _snowflake import vectorized @@ -59,6 +60,180 @@ def feast_snowflake_varchar_to_string_proto(df): return df +""" +CREATE OR REPLACE FUNCTION feast_snowflake_array_bytes_to_list_bytes_proto(df ARRAY) + RETURNS BINARY + LANGUAGE PYTHON + RUNTIME_VERSION = '3.8' + PACKAGES = ('protobuf', 'pandas') + HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_bytes_to_list_bytes_proto' + IMPORTS = ('@feast_stage/feast.zip'); +""" +# ValueType.STRING_LIST = 12 +@vectorized(input=pandas.DataFrame) +def feast_snowflake_array_bytes_to_list_bytes_proto(df): + sys._xoptions["snowflake_partner_attribution"].append("feast") + + # Sometimes bytes come in as strings so we need to convert back to float + numpy_arrays = np.asarray(df[0].to_list()).astype(bytes) + + df = list( + map( + ValueProto.SerializeToString, + python_values_to_proto_values(numpy_arrays, ValueType.BYTES_LIST), + ) + ) + return df + + +""" +CREATE OR REPLACE FUNCTION feast_snowflake_array_varchar_to_list_string_proto(df ARRAY) + RETURNS BINARY + LANGUAGE PYTHON + RUNTIME_VERSION = '3.8' + PACKAGES = ('protobuf', 'pandas') + HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_varchar_to_list_string_proto' + IMPORTS = ('@feast_stage/feast.zip'); +""" + + +@vectorized(input=pandas.DataFrame) +def feast_snowflake_array_varchar_to_list_string_proto(df): + sys._xoptions["snowflake_partner_attribution"].append("feast") + + df = list( + map( + ValueProto.SerializeToString, + python_values_to_proto_values(df[0].to_numpy(), ValueType.STRING_LIST), + ) + ) + return df + + +""" +CREATE OR REPLACE FUNCTION feast_snowflake_array_number_to_list_int32_proto(df ARRAY) + RETURNS BINARY + LANGUAGE PYTHON + RUNTIME_VERSION = '3.8' + PACKAGES = ('protobuf', 'pandas') + HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_number_to_list_int32_proto' + IMPORTS = ('@feast_stage/feast.zip'); +""" + + +@vectorized(input=pandas.DataFrame) +def feast_snowflake_array_number_to_list_int32_proto(df): + sys._xoptions["snowflake_partner_attribution"].append("feast") + + df = list( + map( + ValueProto.SerializeToString, + python_values_to_proto_values(df[0].to_numpy(), ValueType.INT32_LIST), + ) + ) + return df + + +""" +CREATE OR REPLACE FUNCTION feast_snowflake_array_number_to_list_int64_proto(df ARRAY) + RETURNS BINARY + LANGUAGE PYTHON + RUNTIME_VERSION = '3.8' + PACKAGES = ('protobuf', 'pandas') + HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_number_to_list_int64_proto' + IMPORTS = ('@feast_stage/feast.zip'); +""" + + +@vectorized(input=pandas.DataFrame) +def feast_snowflake_array_number_to_list_int64_proto(df): + sys._xoptions["snowflake_partner_attribution"].append("feast") + + df = list( + map( + ValueProto.SerializeToString, + python_values_to_proto_values(df[0].to_numpy(), ValueType.INT64_LIST), + ) + ) + return df + + +""" +CREATE OR REPLACE FUNCTION feast_snowflake_array_float_to_list_double_proto(df ARRAY) + RETURNS BINARY + LANGUAGE PYTHON + RUNTIME_VERSION = '3.8' + PACKAGES = ('protobuf', 'pandas') + HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_float_to_list_double_proto' + IMPORTS = ('@feast_stage/feast.zip'); +""" + + +@vectorized(input=pandas.DataFrame) +def feast_snowflake_array_float_to_list_double_proto(df): + sys._xoptions["snowflake_partner_attribution"].append("feast") + + numpy_arrays = np.asarray(df[0].to_list()).astype(float) + + df = list( + map( + ValueProto.SerializeToString, + python_values_to_proto_values(numpy_arrays, ValueType.DOUBLE_LIST), + ) + ) + return df + + +""" +CREATE OR REPLACE FUNCTION feast_snowflake_array_boolean_to_list_bool_proto(df ARRAY) + RETURNS BINARY + LANGUAGE PYTHON + RUNTIME_VERSION = '3.8' + PACKAGES = ('protobuf', 'pandas') + HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_boolean_to_list_bool_proto' + IMPORTS = ('@feast_stage/feast.zip'); +""" + + +@vectorized(input=pandas.DataFrame) +def feast_snowflake_array_boolean_to_list_bool_proto(df): + sys._xoptions["snowflake_partner_attribution"].append("feast") + + df = list( + map( + ValueProto.SerializeToString, + python_values_to_proto_values(df[0].to_numpy(), ValueType.BOOL_LIST), + ) + ) + return df + + +""" +CREATE OR REPLACE FUNCTION feast_snowflake_array_timestamp_to_list_unix_timestamp_proto(df ARRAY) + RETURNS BINARY + LANGUAGE PYTHON + RUNTIME_VERSION = '3.8' + PACKAGES = ('protobuf', 'pandas') + HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_timestamp_to_list_unix_timestamp_proto' + IMPORTS = ('@feast_stage/feast.zip'); +""" + + +@vectorized(input=pandas.DataFrame) +def feast_snowflake_array_timestamp_to_list_unix_timestamp_proto(df): + sys._xoptions["snowflake_partner_attribution"].append("feast") + + numpy_arrays = np.asarray(df[0].to_list()).astype(np.datetime64) + + df = list( + map( + ValueProto.SerializeToString, + python_values_to_proto_values(numpy_arrays, ValueType.UNIX_TIMESTAMP_LIST), + ) + ) + return df + + """ CREATE OR REPLACE FUNCTION feast_snowflake_number_to_int32_proto(df NUMBER) RETURNS BINARY diff --git a/sdk/python/feast/type_map.py b/sdk/python/feast/type_map.py index 9dbbb5a64c..e51e1e743b 100644 --- a/sdk/python/feast/type_map.py +++ b/sdk/python/feast/type_map.py @@ -680,6 +680,14 @@ def _convert_value_name_to_snowflake_udf(value_name: str, project_name: str) -> "FLOAT": f"feast_{project_name}_snowflake_float_to_double_proto", "BOOL": f"feast_{project_name}_snowflake_boolean_to_bool_proto", "UNIX_TIMESTAMP": f"feast_{project_name}_snowflake_timestamp_to_unix_timestamp_proto", + "BYTES_LIST": f"feast_{project_name}_snowflake_array_bytes_to_list_bytes_proto", + "STRING_LIST": f"feast_{project_name}_snowflake_array_varchar_to_list_string_proto", + "INT32_LIST": f"feast_{project_name}_snowflake_array_number_to_list_int32_proto", + "INT64_LIST": f"feast_{project_name}_snowflake_array_number_to_list_int64_proto", + "DOUBLE_LIST": f"feast_{project_name}_snowflake_array_float_to_list_double_proto", + "FLOAT_LIST": f"feast_{project_name}_snowflake_array_float_to_list_double_proto", + "BOOL_LIST": f"feast_{project_name}_snowflake_array_boolean_to_list_bool_proto", + "UNIX_TIMESTAMP_LIST": f"feast_{project_name}_snowflake_array_timestamp_to_list_unix_timestamp_proto", } return name_map[value_name].upper() diff --git a/sdk/python/tests/data/data_creator.py b/sdk/python/tests/data/data_creator.py index 2155468445..8d5b1979fa 100644 --- a/sdk/python/tests/data/data_creator.py +++ b/sdk/python/tests/data/data_creator.py @@ -59,6 +59,7 @@ def get_feature_values_for_dtype( "int64": [1, 2, 3, 4, 5], "float": [1.0, None, 3.0, 4.0, 5.0], "string": ["1", None, "3", "4", "5"], + "bytes": [b"1", None, b"3", b"4", b"5"], "bool": [True, None, False, True, False], "datetime": [ datetime(1980, 1, 1), diff --git a/sdk/python/tests/integration/feature_repos/repo_configuration.py b/sdk/python/tests/integration/feature_repos/repo_configuration.py index fda5b3c11d..027dea2c58 100644 --- a/sdk/python/tests/integration/feature_repos/repo_configuration.py +++ b/sdk/python/tests/integration/feature_repos/repo_configuration.py @@ -83,8 +83,8 @@ "password": os.getenv("SNOWFLAKE_CI_PASSWORD", ""), "role": os.getenv("SNOWFLAKE_CI_ROLE", ""), "warehouse": os.getenv("SNOWFLAKE_CI_WAREHOUSE", ""), - "database": "FEAST", - "schema": "ONLINE", + "database": os.getenv("SNOWFLAKE_CI_DATABASE", "FEAST"), + "schema": os.getenv("SNOWFLAKE_CI_SCHEMA_ONLINE", "ONLINE"), } BIGTABLE_CONFIG = { diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py index c7e5961a88..c14780da97 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py @@ -36,8 +36,8 @@ def __init__(self, project_name: str, *args, **kwargs): password=os.environ["SNOWFLAKE_CI_PASSWORD"], role=os.environ["SNOWFLAKE_CI_ROLE"], warehouse=os.environ["SNOWFLAKE_CI_WAREHOUSE"], - database="FEAST", - schema="OFFLINE", + database=os.environ.get("SNOWFLAKE_CI_DATABASE", "FEAST"), + schema=os.environ.get("SNOWFLAKE_CI_SCHEMA_OFFLINE", "OFFLINE"), storage_integration_name=os.getenv("BLOB_EXPORT_STORAGE_NAME", "FEAST_S3"), blob_export_location=os.getenv( "BLOB_EXPORT_URI", "s3://feast-snowflake-offload/export" diff --git a/sdk/python/tests/integration/materialization/test_snowflake.py b/sdk/python/tests/integration/materialization/test_snowflake.py index 0cf1471dfe..daa96a87c9 100644 --- a/sdk/python/tests/integration/materialization/test_snowflake.py +++ b/sdk/python/tests/integration/materialization/test_snowflake.py @@ -1,10 +1,13 @@ import os -from datetime import timedelta +from datetime import datetime, timedelta import pytest +from pytz import utc +from feast import Field from feast.entity import Entity from feast.feature_view import FeatureView +from feast.types import Array, Bool, Bytes, Float64, Int32, Int64, String, UnixTimestamp from tests.data.data_creator import create_basic_driver_dataset from tests.integration.feature_repos.integration_test_repo_config import ( IntegrationTestRepoConfig, @@ -24,8 +27,8 @@ "password": os.getenv("SNOWFLAKE_CI_PASSWORD", ""), "role": os.getenv("SNOWFLAKE_CI_ROLE", ""), "warehouse": os.getenv("SNOWFLAKE_CI_WAREHOUSE", ""), - "database": "FEAST", - "schema": "MATERIALIZATION", + "database": os.getenv("SNOWFLAKE_CI_DATABASE", "FEAST"), + "schema": os.getenv("SNOWFLAKE_CI_SCHEMA_MATERIALIZATION", "MATERIALIZATION"), } SNOWFLAKE_ONLINE_CONFIG = { @@ -35,15 +38,16 @@ "password": os.getenv("SNOWFLAKE_CI_PASSWORD", ""), "role": os.getenv("SNOWFLAKE_CI_ROLE", ""), "warehouse": os.getenv("SNOWFLAKE_CI_WAREHOUSE", ""), - "database": "FEAST", - "schema": "ONLINE", + "database": os.getenv("SNOWFLAKE_CI_DATABASE", "FEAST"), + "schema": os.getenv("SNOWFLAKE_CI_SCHEMA_ONLINE", "ONLINE"), } +@pytest.mark.parametrize("online_store", [SNOWFLAKE_ONLINE_CONFIG, "sqlite"]) @pytest.mark.integration -def test_snowflake_materialization_consistency_internal(): +def test_snowflake_materialization_consistency(online_store): snowflake_config = IntegrationTestRepoConfig( - online_store=SNOWFLAKE_ONLINE_CONFIG, + online_store=online_store, offline_store_creator=SnowflakeDataSourceCreator, batch_engine=SNOWFLAKE_ENGINE_CONFIG, ) @@ -84,15 +88,32 @@ def test_snowflake_materialization_consistency_internal(): snowflake_environment.data_source_creator.teardown() +@pytest.mark.parametrize( + "feature_dtype, feast_dtype", + [ + ("string", Array(String)), + ("bytes", Array(Bytes)), + ("int32", Array(Int32)), + ("int64", Array(Int64)), + ("float", Array(Float64)), + ("bool", Array(Bool)), + ("datetime", Array(UnixTimestamp)), + ], +) +@pytest.mark.parametrize("feature_is_empty_list", [False]) +@pytest.mark.parametrize("online_store", [SNOWFLAKE_ONLINE_CONFIG, "sqlite"]) @pytest.mark.integration -def test_snowflake_materialization_consistency_external(): +def test_snowflake_materialization_consistency_internal_with_lists( + feature_dtype, feast_dtype, feature_is_empty_list, online_store +): snowflake_config = IntegrationTestRepoConfig( + online_store=online_store, offline_store_creator=SnowflakeDataSourceCreator, batch_engine=SNOWFLAKE_ENGINE_CONFIG, ) snowflake_environment = construct_test_environment(snowflake_config, None) - df = create_basic_driver_dataset() + df = create_basic_driver_dataset(Int32, feature_dtype, True, feature_is_empty_list) ds = snowflake_environment.data_source_creator.create_data_source( df, snowflake_environment.feature_store.project, @@ -105,23 +126,62 @@ def test_snowflake_materialization_consistency_external(): join_keys=["driver_id"], ) + schema = [ + Field(name="driver_id", dtype=Int32), + Field(name="value", dtype=feast_dtype), + ] driver_stats_fv = FeatureView( name="driver_hourly_stats", entities=[driver], ttl=timedelta(weeks=52), + schema=schema, source=ds, ) try: fs.apply([driver, driver_stats_fv]) - # materialization is run in two steps and - # we use timestamp from generated dataframe as a split point split_dt = df["ts_1"][4].to_pydatetime() - timedelta(seconds=1) print(f"Split datetime: {split_dt}") + now = datetime.utcnow() + + full_feature_names = True + start_date = (now - timedelta(hours=5)).replace(tzinfo=utc) + end_date = split_dt + fs.materialize( + feature_views=[driver_stats_fv.name], + start_date=start_date, + end_date=end_date, + ) + + expected_values = { + "int32": [3] * 2, + "int64": [3] * 2, + "float": [3.0] * 2, + "string": ["3"] * 2, + "bytes": [b"3"] * 2, + "bool": [False] * 2, + "datetime": [datetime(1981, 1, 1, tzinfo=utc)] * 2, + } + expected_value = [] if feature_is_empty_list else expected_values[feature_dtype] + + response_dict = fs.get_online_features( + [f"{driver_stats_fv.name}:value"], + [{"driver_id": 1}], + full_feature_names=full_feature_names, + ).to_dict() + + actual_value = response_dict[f"{driver_stats_fv.name}__value"][0] + assert actual_value is not None, f"Response: {response_dict}" + if feature_dtype == "float": + for actual_num, expected_num in zip(actual_value, expected_value): + assert ( + abs(actual_num - expected_num) < 1e-6 + ), f"Response: {response_dict}, Expected: {expected_value}" + else: + assert actual_value == expected_value - validate_offline_online_store_consistency(fs, driver_stats_fv, split_dt) finally: fs.teardown() snowflake_environment.data_source_creator.teardown() From ea8ad1731a5ebe798b11181fc0027f7cac0e1526 Mon Sep 17 00:00:00 2001 From: Alex Vinnik <33845028+alex-vinnik-sp@users.noreply.github.com> Date: Sat, 27 Jan 2024 04:53:31 -0600 Subject: [PATCH 016/122] feat: Support s3gov schema by snowflake offline store during materialization (#3891) --- .../feast/infra/offline_stores/snowflake.py | 7 ++- .../infra/offline_stores/test_snowflake.py | 57 +++++++++++++++++++ 2 files changed, 63 insertions(+), 1 deletion(-) create mode 100644 sdk/python/tests/unit/infra/offline_stores/test_snowflake.py diff --git a/sdk/python/feast/infra/offline_stores/snowflake.py b/sdk/python/feast/infra/offline_stores/snowflake.py index 4f11b1ac42..dd13ffc96c 100644 --- a/sdk/python/feast/infra/offline_stores/snowflake.py +++ b/sdk/python/feast/infra/offline_stores/snowflake.py @@ -615,12 +615,17 @@ def to_remote_storage(self) -> List[str]: HEADER = TRUE """ cursor = execute_snowflake_statement(self.snowflake_conn, query) + # s3gov schema is used by Snowflake in AWS govcloud regions + # remove gov portion from schema and pass it to online store upload + native_export_path = self.export_path.replace("s3gov://", "s3://") + return self._get_file_names_from_copy_into(cursor, native_export_path) + def _get_file_names_from_copy_into(self, cursor, native_export_path) -> List[str]: file_name_column_index = [ idx for idx, rm in enumerate(cursor.description) if rm.name == "FILE_NAME" ][0] return [ - f"{self.export_path}/{row[file_name_column_index]}" + f"{native_export_path}/{row[file_name_column_index]}" for row in cursor.fetchall() ] diff --git a/sdk/python/tests/unit/infra/offline_stores/test_snowflake.py b/sdk/python/tests/unit/infra/offline_stores/test_snowflake.py new file mode 100644 index 0000000000..afc3ae97ae --- /dev/null +++ b/sdk/python/tests/unit/infra/offline_stores/test_snowflake.py @@ -0,0 +1,57 @@ +import re +from unittest.mock import ANY, MagicMock, patch + +import pytest + +from feast.infra.offline_stores.snowflake import ( + SnowflakeOfflineStoreConfig, + SnowflakeRetrievalJob, +) +from feast.infra.online_stores.sqlite import SqliteOnlineStoreConfig +from feast.repo_config import RepoConfig + + +@pytest.fixture(params=["s3", "s3gov"]) +def retrieval_job(request): + offline_store_config = SnowflakeOfflineStoreConfig( + type="snowflake.offline", + account="snow", + user="snow", + password="snow", + role="snow", + warehouse="snow", + database="FEAST", + schema="OFFLINE", + storage_integration_name="FEAST_S3", + blob_export_location=f"{request.param}://feast-snowflake-offload/export", + ) + retrieval_job = SnowflakeRetrievalJob( + query="SELECT * FROM snowflake", + snowflake_conn=MagicMock(), + config=RepoConfig( + registry="s3://ml-test/repo/registry.db", + project="test", + provider="snowflake.offline", + online_store=SqliteOnlineStoreConfig(type="sqlite"), + offline_store=offline_store_config, + ), + full_feature_names=True, + on_demand_feature_views=[], + ) + return retrieval_job + + +def test_to_remote_storage(retrieval_job): + stored_files = ["just a path", "maybe another"] + with patch.object( + retrieval_job, "to_snowflake", return_value=None + ) as mock_to_snowflake, patch.object( + retrieval_job, "_get_file_names_from_copy_into", return_value=stored_files + ) as mock_get_file_names_from_copy: + assert ( + retrieval_job.to_remote_storage() == stored_files + ), "should return the list of files" + mock_to_snowflake.assert_called_once() + mock_get_file_names_from_copy.assert_called_once_with(ANY, ANY) + native_path = mock_get_file_names_from_copy.call_args[0][1] + assert re.match("^s3://.*", native_path), "path should be s3://*" From 2f99a617b6a5d8eae1e27c780bbfa94594f54441 Mon Sep 17 00:00:00 2001 From: senbong Date: Wed, 31 Jan 2024 01:50:13 +0800 Subject: [PATCH 017/122] fix: Create index only if not exists during MySQL online store update (#3905) Update mysql.py to create index only if not exists during update Signed-off-by: senbong --- .../contrib/mysql_online_store/mysql.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/sdk/python/feast/infra/online_stores/contrib/mysql_online_store/mysql.py b/sdk/python/feast/infra/online_stores/contrib/mysql_online_store/mysql.py index fa7dd2c2a4..c09cb126f0 100644 --- a/sdk/python/feast/infra/online_stores/contrib/mysql_online_store/mysql.py +++ b/sdk/python/feast/infra/online_stores/contrib/mysql_online_store/mysql.py @@ -178,8 +178,11 @@ def update( # We don't create any special state for the entities in this implementation. for table in tables_to_keep: + + table_name = _table_id(project, table) + index_name = f"{table_name}_ek" cur.execute( - f"""CREATE TABLE IF NOT EXISTS {_table_id(project, table)} (entity_key VARCHAR(512), + f"""CREATE TABLE IF NOT EXISTS {table_name} (entity_key VARCHAR(512), feature_name VARCHAR(256), value BLOB, event_ts timestamp NULL DEFAULT NULL, @@ -187,9 +190,16 @@ def update( PRIMARY KEY(entity_key, feature_name))""" ) - cur.execute( - f"ALTER TABLE {_table_id(project, table)} ADD INDEX {_table_id(project, table)}_ek (entity_key);" + index_exists = cur.execute( + f""" + SELECT 1 FROM information_schema.statistics + WHERE table_schema = DATABASE() AND table_name = '{table_name}' AND index_name = '{index_name}' + """ ) + if not index_exists: + cur.execute( + f"ALTER TABLE {table_name} ADD INDEX {index_name} (entity_key);" + ) for table in tables_to_delete: _drop_table_and_index(cur, project, table) From 987f0fdc99df1ef4507baff75e3df0e02bf42034 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mart=C3=AD=20Jord=C3=A0=20Roca?= <108732053+marti-jorda-roca@users.noreply.github.com> Date: Tue, 30 Jan 2024 18:50:28 +0100 Subject: [PATCH 018/122] fix: Bytewax materialization engine fails when loading feature_store.yaml (#3912) * bytewax materialization loads yaml config correctly Signed-off-by: marti-jorda-roca * added postgres dependency for SQL registries Signed-off-by: marti-jorda-roca --------- Signed-off-by: marti-jorda-roca --- .../feast/infra/materialization/contrib/bytewax/Dockerfile | 2 +- .../feast/infra/materialization/contrib/bytewax/dataflow.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/sdk/python/feast/infra/materialization/contrib/bytewax/Dockerfile b/sdk/python/feast/infra/materialization/contrib/bytewax/Dockerfile index a26661ead3..a7d0af9b41 100644 --- a/sdk/python/feast/infra/materialization/contrib/bytewax/Dockerfile +++ b/sdk/python/feast/infra/materialization/contrib/bytewax/Dockerfile @@ -25,5 +25,5 @@ COPY README.md README.md # git dir to infer the version of feast we're installing. # https://github.com/pypa/setuptools_scm#usage-from-docker # I think it also assumes that this dockerfile is being built from the root of the directory. -RUN --mount=source=.git,target=.git,type=bind pip3 install --no-cache-dir '.[aws,gcp,bytewax,snowflake]' +RUN --mount=source=.git,target=.git,type=bind pip3 install --no-cache-dir '.[aws,gcp,bytewax,snowflake,postgres]' diff --git a/sdk/python/feast/infra/materialization/contrib/bytewax/dataflow.py b/sdk/python/feast/infra/materialization/contrib/bytewax/dataflow.py index 9d9b328c0e..bbc32cc165 100644 --- a/sdk/python/feast/infra/materialization/contrib/bytewax/dataflow.py +++ b/sdk/python/feast/infra/materialization/contrib/bytewax/dataflow.py @@ -12,10 +12,10 @@ logging.basicConfig(level=logging.INFO) with open("/var/feast/feature_store.yaml") as f: - feast_config = yaml.safe_load(f) + feast_config = yaml.load(f, Loader=yaml.Loader) with open("/var/feast/bytewax_materialization_config.yaml") as b: - bytewax_config = yaml.safe_load(b) + bytewax_config = yaml.load(b, Loader=yaml.Loader) config = RepoConfig(**feast_config) store = FeatureStore(config=config) From fa8cfd478ebd0aa24d48589b116f517eb9cc6c83 Mon Sep 17 00:00:00 2001 From: Willem Pienaar <6728866+woop@users.noreply.github.com> Date: Tue, 30 Jan 2024 11:24:51 -0800 Subject: [PATCH 019/122] Update maintainers.md (#3918) --- community/maintainers.md | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/community/maintainers.md b/community/maintainers.md index 8aca48fd0d..9f8d349a5d 100644 --- a/community/maintainers.md +++ b/community/maintainers.md @@ -9,7 +9,12 @@ In alphabetical order | Name | GitHub Username | Email | Organization | | -------------- | ---------------- |-----------------------------| ------------------ | | Achal Shah | `achals` | achals@gmail.com | Tecton | -| Willem Pienaar | `woop` | will.pienaar@gmail.com | Tecton | +| Edson Tirelli | `etirelli` | ed.tirelli@gmail.com | Red Hat | +| Francisco Javier Arceo | `franciscojavierarceo` | arceofrancisco@gmail.com | Affirm | +| Hao Xu | `hao-affirm` | sduxuhao@gmail.com | JPMorgan | +| Jeremy Ary | `jeremyary` | jeremy.ary@gmail.com | Red Hat | +| Shuchu Han | `shuchu` | shuchu.han@gmail.com | Independent | +| Willem Pienaar | `woop` | will.pienaar@gmail.com | Cleric | | Zhiling Chen | `zhilingc` | chnzhlng@gmail.com | GetGround | ## Emeritus Maintainers From 936ecfdaf8419191c8c32a44890f20d4b1eba9f8 Mon Sep 17 00:00:00 2001 From: Francisco Javier Arceo Date: Tue, 30 Jan 2024 16:22:51 -0500 Subject: [PATCH 020/122] docs: Updating maintainers.md (#3919) --- community/maintainers.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/community/maintainers.md b/community/maintainers.md index 9f8d349a5d..0b3d4ab648 100644 --- a/community/maintainers.md +++ b/community/maintainers.md @@ -11,7 +11,7 @@ In alphabetical order | Achal Shah | `achals` | achals@gmail.com | Tecton | | Edson Tirelli | `etirelli` | ed.tirelli@gmail.com | Red Hat | | Francisco Javier Arceo | `franciscojavierarceo` | arceofrancisco@gmail.com | Affirm | -| Hao Xu | `hao-affirm` | sduxuhao@gmail.com | JPMorgan | +| Hao Xu | `HaoXuAI` | sduxuhao@gmail.com | JPMorgan | | Jeremy Ary | `jeremyary` | jeremy.ary@gmail.com | Red Hat | | Shuchu Han | `shuchu` | shuchu.han@gmail.com | Independent | | Willem Pienaar | `woop` | will.pienaar@gmail.com | Cleric | From 8bce6dc143837b1dc88f59336994148894d5ccbe Mon Sep 17 00:00:00 2001 From: Francisco Javier Arceo Date: Wed, 31 Jan 2024 17:23:14 -0500 Subject: [PATCH 021/122] docs: Updated development guide to include compiling the protos (#3896) * [docs] updated development guide to include compiling the protos Signed-off-by: franciscojavierarceo * adding note on spinning up the docker image Signed-off-by: franciscojavierarceo --------- Signed-off-by: franciscojavierarceo --- docs/project/development-guide.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/docs/project/development-guide.md b/docs/project/development-guide.md index 931d0243d2..43dae1d678 100644 --- a/docs/project/development-guide.md +++ b/docs/project/development-guide.md @@ -154,6 +154,16 @@ pip install -e ".[dev]" This will allow the installed feast version to automatically reflect changes to your local development version of Feast without needing to reinstall everytime you make code changes. +10. Compile the protubufs +```sh +make compile-protos-python +``` + +11. Spin up Docker Image +```sh +docker build -t docker-whale -f ./sdk/python/feast/infra/feature_servers/multicloud/Dockerfile . +``` + ### Code Style & Linting Feast Python SDK / CLI codebase: - Conforms to [Black code style](https://black.readthedocs.io/en/stable/the_black_code_style/current_style.html) From 7153cad6082edfded96999c49ee1bdc9329e11c3 Mon Sep 17 00:00:00 2001 From: Harry Date: Fri, 2 Feb 2024 20:17:46 +0700 Subject: [PATCH 022/122] fix: Prevent spamming pull busybox from dockerhub (#3923) Signed-off-by: Hai Nguyen --- .../contrib/bytewax/bytewax_materialization_engine.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/python/feast/infra/materialization/contrib/bytewax/bytewax_materialization_engine.py b/sdk/python/feast/infra/materialization/contrib/bytewax/bytewax_materialization_engine.py index 5c7a719532..060a47ce58 100644 --- a/sdk/python/feast/infra/materialization/contrib/bytewax/bytewax_materialization_engine.py +++ b/sdk/python/feast/infra/materialization/contrib/bytewax/bytewax_materialization_engine.py @@ -421,7 +421,7 @@ def _create_job_definition(self, job_id, namespace, pods, env, index_offset=0): } ], "image": "busybox", - "imagePullPolicy": "Always", + "imagePullPolicy": "IfNotPresent", "name": "init-hostfile", "resources": {}, "securityContext": { From bf026a018c16081669170ee79e7b191fcd328d8b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 3 Feb 2024 20:00:49 -0500 Subject: [PATCH 023/122] chore: Bump jupyterlab from 4.0.8 to 4.0.11 in /sdk/python/requirements (#3895) --- sdk/python/requirements/py3.10-ci-requirements.txt | 2 +- sdk/python/requirements/py3.8-ci-requirements.txt | 2 +- sdk/python/requirements/py3.9-ci-requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index 5e407c1a99..094418cda6 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -409,7 +409,7 @@ jupyter-server==2.11.2 # notebook-shim jupyter-server-terminals==0.4.4 # via jupyter-server -jupyterlab==4.0.8 +jupyterlab==4.0.11 # via notebook jupyterlab-pygments==0.2.2 # via nbconvert diff --git a/sdk/python/requirements/py3.8-ci-requirements.txt b/sdk/python/requirements/py3.8-ci-requirements.txt index 02eaf6dc30..c477c20835 100644 --- a/sdk/python/requirements/py3.8-ci-requirements.txt +++ b/sdk/python/requirements/py3.8-ci-requirements.txt @@ -425,7 +425,7 @@ jupyter-server==2.11.2 # notebook-shim jupyter-server-terminals==0.4.4 # via jupyter-server -jupyterlab==4.0.8 +jupyterlab==4.0.11 # via notebook jupyterlab-pygments==0.2.2 # via nbconvert diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index 43c49a4952..6f400fe240 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -416,7 +416,7 @@ jupyter-server==2.11.2 # notebook-shim jupyter-server-terminals==0.4.4 # via jupyter-server -jupyterlab==4.0.8 +jupyterlab==4.0.11 # via notebook jupyterlab-pygments==0.2.2 # via nbconvert From f494f02e1254b91b56b0b69f4a15edafe8d7291a Mon Sep 17 00:00:00 2001 From: Tornike Gurgenidze Date: Sun, 4 Feb 2024 21:26:12 +0400 Subject: [PATCH 024/122] fix: Remove unnecessary dependency on mysqlclient (#3925) --- sdk/python/feast/infra/feature_servers/multicloud/Dockerfile | 1 - .../feast/infra/feature_servers/multicloud/Dockerfile.dev | 1 - sdk/python/requirements/py3.10-ci-requirements.txt | 2 -- sdk/python/requirements/py3.8-ci-requirements.txt | 2 -- sdk/python/requirements/py3.9-ci-requirements.txt | 2 -- sdk/python/tests/unit/test_sql_registry.py | 2 +- setup.py | 2 +- 7 files changed, 2 insertions(+), 10 deletions(-) diff --git a/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile b/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile index c95c515fb4..fdd8e3ac51 100644 --- a/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile +++ b/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile @@ -4,7 +4,6 @@ RUN apt update && \ apt install -y \ jq \ python3-dev \ - default-libmysqlclient-dev \ build-essential RUN pip install pip --upgrade diff --git a/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile.dev b/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile.dev index ecbc199a5b..3fc1355d7a 100644 --- a/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile.dev +++ b/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile.dev @@ -4,7 +4,6 @@ RUN apt update && \ apt install -y \ jq \ python3-dev \ - default-libmysqlclient-dev \ build-essential RUN pip install pip --upgrade diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index 094418cda6..740356907d 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -472,8 +472,6 @@ mypy-extensions==1.0.0 # mypy mypy-protobuf==3.1.0 # via feast (setup.py) -mysqlclient==2.2.0 - # via feast (setup.py) nbclient==0.9.0 # via nbconvert nbconvert==7.11.0 diff --git a/sdk/python/requirements/py3.8-ci-requirements.txt b/sdk/python/requirements/py3.8-ci-requirements.txt index c477c20835..3bda9e72f9 100644 --- a/sdk/python/requirements/py3.8-ci-requirements.txt +++ b/sdk/python/requirements/py3.8-ci-requirements.txt @@ -488,8 +488,6 @@ mypy-extensions==1.0.0 # mypy mypy-protobuf==3.1.0 # via feast (setup.py) -mysqlclient==2.2.0 - # via feast (setup.py) nbclient==0.9.0 # via nbconvert nbconvert==7.11.0 diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index 6f400fe240..6989d5b4cc 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -479,8 +479,6 @@ mypy-extensions==1.0.0 # mypy mypy-protobuf==3.1.0 # via feast (setup.py) -mysqlclient==2.2.0 - # via feast (setup.py) nbclient==0.9.0 # via nbconvert nbconvert==7.11.0 diff --git a/sdk/python/tests/unit/test_sql_registry.py b/sdk/python/tests/unit/test_sql_registry.py index 39896d3a9d..b96dc6fe77 100644 --- a/sdk/python/tests/unit/test_sql_registry.py +++ b/sdk/python/tests/unit/test_sql_registry.py @@ -103,7 +103,7 @@ def mysql_registry(): registry_config = RegistryConfig( registry_type="sql", - path=f"mysql+mysqldb://{POSTGRES_USER}:{POSTGRES_PASSWORD}@127.0.0.1:{container_port}/{POSTGRES_DB}", + path=f"mysql+pymysql://{POSTGRES_USER}:{POSTGRES_PASSWORD}@127.0.0.1:{container_port}/{POSTGRES_DB}", ) yield SqlRegistry(registry_config, "project", None) diff --git a/setup.py b/setup.py index 33bf76e181..4905a7697d 100644 --- a/setup.py +++ b/setup.py @@ -116,7 +116,7 @@ "psycopg2-binary>=2.8.3,<3", ] -MYSQL_REQUIRED = ["mysqlclient", "pymysql", "types-PyMySQL"] +MYSQL_REQUIRED = ["pymysql", "types-PyMySQL"] HBASE_REQUIRED = [ "happybase>=1.2.0,<3", From 1f3cab825c927d8a9337de5cd340d0bb4ea70558 Mon Sep 17 00:00:00 2001 From: Harry Date: Tue, 6 Feb 2024 12:28:25 +0700 Subject: [PATCH 025/122] ci: Extend python base version for test cases (#3929) Signed-off-by: Hai Nguyen --- .github/workflows/pr_integration_tests.yml | 2 +- .github/workflows/pr_local_integration_tests.yml | 2 +- .github/workflows/unit_tests.yml | 2 -- 3 files changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/pr_integration_tests.yml b/.github/workflows/pr_integration_tests.yml index 73344ec2dd..26c85b0126 100644 --- a/.github/workflows/pr_integration_tests.yml +++ b/.github/workflows/pr_integration_tests.yml @@ -86,7 +86,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: [ "3.8" ] + python-version: [ "3.8", "3.10" ] os: [ ubuntu-latest ] env: OS: ${{ matrix.os }} diff --git a/.github/workflows/pr_local_integration_tests.yml b/.github/workflows/pr_local_integration_tests.yml index 111a9b51a9..aeb4100dc8 100644 --- a/.github/workflows/pr_local_integration_tests.yml +++ b/.github/workflows/pr_local_integration_tests.yml @@ -19,7 +19,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: [ "3.8" ] + python-version: [ "3.8", "3.10" ] os: [ ubuntu-latest ] env: OS: ${{ matrix.os }} diff --git a/.github/workflows/unit_tests.yml b/.github/workflows/unit_tests.yml index f03cd33346..31e6d08c74 100644 --- a/.github/workflows/unit_tests.yml +++ b/.github/workflows/unit_tests.yml @@ -12,8 +12,6 @@ jobs: exclude: - os: macOS-latest python-version: "3.9" - - os: macOS-latest - python-version: "3.10" env: OS: ${{ matrix.os }} PYTHON: ${{ matrix.python-version }} From 373e624abb8779b8a60d30aa08d25414d987bb1b Mon Sep 17 00:00:00 2001 From: Tornike Gurgenidze Date: Wed, 7 Feb 2024 03:26:32 +0400 Subject: [PATCH 026/122] feat: Add gRPC Registry Server (#3924) --- protos/feast/registry/RegistryServer.proto | 230 ++++++++++++++++++ sdk/python/feast/cli.py | 21 +- sdk/python/feast/constants.py | 3 + sdk/python/feast/feature_store.py | 7 + .../feast/infra/registry/base_registry.py | 4 +- sdk/python/feast/infra/registry/registry.py | 8 +- sdk/python/feast/registry_server.py | 202 +++++++++++++++ sdk/python/tests/unit/test_registry_server.py | 60 +++++ setup.py | 2 +- 9 files changed, 532 insertions(+), 5 deletions(-) create mode 100644 protos/feast/registry/RegistryServer.proto create mode 100644 sdk/python/feast/registry_server.py create mode 100644 sdk/python/tests/unit/test_registry_server.py diff --git a/protos/feast/registry/RegistryServer.proto b/protos/feast/registry/RegistryServer.proto new file mode 100644 index 0000000000..3e7773e89a --- /dev/null +++ b/protos/feast/registry/RegistryServer.proto @@ -0,0 +1,230 @@ +syntax = "proto3"; + +package feast.registry; + +import "google/protobuf/timestamp.proto"; +import "google/protobuf/empty.proto"; +import "feast/core/Registry.proto"; +import "feast/core/Entity.proto"; +import "feast/core/DataSource.proto"; +import "feast/core/FeatureView.proto"; +import "feast/core/RequestFeatureView.proto"; +import "feast/core/StreamFeatureView.proto"; +import "feast/core/OnDemandFeatureView.proto"; +import "feast/core/FeatureService.proto"; +import "feast/core/SavedDataset.proto"; +import "feast/core/ValidationProfile.proto"; +import "feast/core/InfraObject.proto"; + +service RegistryServer{ + // Entity RPCs + rpc GetEntity (GetEntityRequest) returns (feast.core.Entity) {} + rpc ListEntities (ListEntitiesRequest) returns (ListEntitiesResponse) {} + + // DataSource RPCs + rpc GetDataSource (GetDataSourceRequest) returns (feast.core.DataSource) {} + rpc ListDataSources (ListDataSourcesRequest) returns (ListDataSourcesResponse) {} + + // FeatureView RPCs + rpc GetFeatureView (GetFeatureViewRequest) returns (feast.core.FeatureView) {} + rpc ListFeatureViews (ListFeatureViewsRequest) returns (ListFeatureViewsResponse) {} + + // RequestFeatureView RPCs + rpc GetRequestFeatureView (GetRequestFeatureViewRequest) returns (feast.core.RequestFeatureView) {} + rpc ListRequestFeatureViews (ListRequestFeatureViewsRequest) returns (ListRequestFeatureViewsResponse) {} + + // StreamFeatureView RPCs + rpc GetStreamFeatureView (GetStreamFeatureViewRequest) returns (feast.core.StreamFeatureView) {} + rpc ListStreamFeatureViews (ListStreamFeatureViewsRequest) returns (ListStreamFeatureViewsResponse) {} + + // OnDemandFeatureView RPCs + rpc GetOnDemandFeatureView (GetOnDemandFeatureViewRequest) returns (feast.core.OnDemandFeatureView) {} + rpc ListOnDemandFeatureViews (ListOnDemandFeatureViewsRequest) returns (ListOnDemandFeatureViewsResponse) {} + + // FeatureService RPCs + rpc GetFeatureService (GetFeatureServiceRequest) returns (feast.core.FeatureService) {} + rpc ListFeatureServices (ListFeatureServicesRequest) returns (ListFeatureServicesResponse) {} + + // SavedDataset RPCs + rpc GetSavedDataset (GetSavedDatasetRequest) returns (feast.core.SavedDataset) {} + rpc ListSavedDatasets (ListSavedDatasetsRequest) returns (ListSavedDatasetsResponse) {} + + // ValidationReference RPCs + rpc GetValidationReference (GetValidationReferenceRequest) returns (feast.core.ValidationReference) {} + rpc ListValidationReferences (ListValidationReferencesRequest) returns (ListValidationReferencesResponse) {} + + rpc ListProjectMetadata (ListProjectMetadataRequest) returns (ListProjectMetadataResponse) {} + rpc GetInfra (GetInfraRequest) returns (feast.core.Infra) {} + rpc Refresh (RefreshRequest) returns (google.protobuf.Empty) {} + rpc Proto (google.protobuf.Empty) returns (feast.core.Registry) {} + +} + +message RefreshRequest { + string project = 1; +} + +message GetInfraRequest { + string project = 1; + bool allow_cache = 2; +} + +message ListProjectMetadataRequest { + string project = 1; + bool allow_cache = 2; +} + +message ListProjectMetadataResponse { + repeated feast.core.ProjectMetadata project_metadata = 1; +} + +message GetEntityRequest { + string name = 1; + string project = 2; + bool allow_cache = 3; +} + +message ListEntitiesRequest { + string project = 1; + bool allow_cache = 2; +} + +message ListEntitiesResponse { + repeated feast.core.Entity entities = 1; +} + +// DataSources + +message GetDataSourceRequest { + string name = 1; + string project = 2; + bool allow_cache = 3; +} + +message ListDataSourcesRequest { + string project = 1; + bool allow_cache = 2; +} + +message ListDataSourcesResponse { + repeated feast.core.DataSource data_sources = 1; +} + +// FeatureViews + +message GetFeatureViewRequest { + string name = 1; + string project = 2; + bool allow_cache = 3; +} + +message ListFeatureViewsRequest { + string project = 1; + bool allow_cache = 2; +} + +message ListFeatureViewsResponse { + repeated feast.core.FeatureView feature_views = 1; +} + +// RequestFeatureView + +message GetRequestFeatureViewRequest { + string name = 1; + string project = 2; + bool allow_cache = 3; +} + +message ListRequestFeatureViewsRequest { + string project = 1; + bool allow_cache = 2; +} + +message ListRequestFeatureViewsResponse { + repeated feast.core.RequestFeatureView request_feature_views = 1; +} + +// StreamFeatureView + +message GetStreamFeatureViewRequest { + string name = 1; + string project = 2; + bool allow_cache = 3; +} + +message ListStreamFeatureViewsRequest { + string project = 1; + bool allow_cache = 2; +} + +message ListStreamFeatureViewsResponse { + repeated feast.core.StreamFeatureView stream_feature_views = 1; +} + +// OnDemandFeatureView + +message GetOnDemandFeatureViewRequest { + string name = 1; + string project = 2; + bool allow_cache = 3; +} + +message ListOnDemandFeatureViewsRequest { + string project = 1; + bool allow_cache = 2; +} + +message ListOnDemandFeatureViewsResponse { + repeated feast.core.OnDemandFeatureView on_demand_feature_views = 1; +} + +// FeatureServices + +message GetFeatureServiceRequest { + string name = 1; + string project = 2; + bool allow_cache = 3; +} + +message ListFeatureServicesRequest { + string project = 1; + bool allow_cache = 2; +} + +message ListFeatureServicesResponse { + repeated feast.core.FeatureService feature_services = 1; +} + +// SavedDataset + +message GetSavedDatasetRequest { + string name = 1; + string project = 2; + bool allow_cache = 3; +} + +message ListSavedDatasetsRequest { + string project = 1; + bool allow_cache = 2; +} + +message ListSavedDatasetsResponse { + repeated feast.core.SavedDataset saved_datasets = 1; +} + +// ValidationReference + +message GetValidationReferenceRequest { + string name = 1; + string project = 2; + bool allow_cache = 3; +} + +message ListValidationReferencesRequest { + string project = 1; + bool allow_cache = 2; +} + +message ListValidationReferencesResponse { + repeated feast.core.ValidationReference validation_references = 1; +} diff --git a/sdk/python/feast/cli.py b/sdk/python/feast/cli.py index 2eb2c27bcb..985c44b821 100644 --- a/sdk/python/feast/cli.py +++ b/sdk/python/feast/cli.py @@ -25,7 +25,10 @@ from pygments import formatters, highlight, lexers from feast import utils -from feast.constants import DEFAULT_FEATURE_TRANSFORMATION_SERVER_PORT +from feast.constants import ( + DEFAULT_FEATURE_TRANSFORMATION_SERVER_PORT, + DEFAULT_REGISTRY_SERVER_PORT, +) from feast.errors import FeastObjectNotFoundException, FeastProviderLoginError from feast.feature_view import FeatureView from feast.infra.contrib.grpc_server import get_grpc_server @@ -753,6 +756,22 @@ def serve_transformations_command(ctx: click.Context, port: int): store.serve_transformations(port) +@cli.command("serve_registry") +@click.option( + "--port", + "-p", + type=click.INT, + default=DEFAULT_REGISTRY_SERVER_PORT, + help="Specify a port for the server", +) +@click.pass_context +def serve_registry_command(ctx: click.Context, port: int): + """Start a registry server locally on a given port.""" + store = create_feature_store(ctx) + + store.serve_registry(port) + + @cli.command("validate") @click.option( "--feature-service", diff --git a/sdk/python/feast/constants.py b/sdk/python/feast/constants.py index 574d79f416..c022ecba55 100644 --- a/sdk/python/feast/constants.py +++ b/sdk/python/feast/constants.py @@ -44,5 +44,8 @@ # Default FTS port DEFAULT_FEATURE_TRANSFORMATION_SERVER_PORT = 6569 +# Default registry server port +DEFAULT_REGISTRY_SERVER_PORT = 6570 + # Environment variable for feature server docker image tag DOCKER_IMAGE_TAG_ENV_NAME: str = "FEAST_SERVER_DOCKER_IMAGE_TAG" diff --git a/sdk/python/feast/feature_store.py b/sdk/python/feast/feature_store.py index d3f98f8032..4a53672b2e 100644 --- a/sdk/python/feast/feature_store.py +++ b/sdk/python/feast/feature_store.py @@ -2278,6 +2278,13 @@ def serve_ui( root_path=root_path, ) + @log_exceptions_and_usage + def serve_registry(self, port: int) -> None: + """Start registry server locally on a given port.""" + from feast import registry_server + + registry_server.start_server(self, port) + @log_exceptions_and_usage def serve_transformations(self, port: int) -> None: """Start the feature transformation server locally on a given port.""" diff --git a/sdk/python/feast/infra/registry/base_registry.py b/sdk/python/feast/infra/registry/base_registry.py index 14b098bb12..8928a5800d 100644 --- a/sdk/python/feast/infra/registry/base_registry.py +++ b/sdk/python/feast/infra/registry/base_registry.py @@ -329,7 +329,9 @@ def list_feature_views( # request feature view operations @abstractmethod - def get_request_feature_view(self, name: str, project: str) -> RequestFeatureView: + def get_request_feature_view( + self, name: str, project: str, allow_cache: bool = False + ) -> RequestFeatureView: """ Retrieves a request feature view. diff --git a/sdk/python/feast/infra/registry/registry.py b/sdk/python/feast/infra/registry/registry.py index 1a72cbb4a5..fc7be75e0d 100644 --- a/sdk/python/feast/infra/registry/registry.py +++ b/sdk/python/feast/infra/registry/registry.py @@ -528,8 +528,12 @@ def list_feature_views( ) return proto_registry_utils.list_feature_views(registry_proto, project) - def get_request_feature_view(self, name: str, project: str): - registry_proto = self._get_registry_proto(project=project, allow_cache=False) + def get_request_feature_view( + self, name: str, project: str, allow_cache: bool = False + ): + registry_proto = self._get_registry_proto( + project=project, allow_cache=allow_cache + ) return proto_registry_utils.get_request_feature_view( registry_proto, name, project ) diff --git a/sdk/python/feast/registry_server.py b/sdk/python/feast/registry_server.py new file mode 100644 index 0000000000..221715480e --- /dev/null +++ b/sdk/python/feast/registry_server.py @@ -0,0 +1,202 @@ +from concurrent import futures + +import grpc +from google.protobuf.empty_pb2 import Empty + +from feast import FeatureStore +from feast.protos.feast.registry import RegistryServer_pb2, RegistryServer_pb2_grpc + + +class RegistryServer(RegistryServer_pb2_grpc.RegistryServerServicer): + def __init__(self, store: FeatureStore) -> None: + super().__init__() + self.proxied_registry = store.registry + + def GetEntity(self, request: RegistryServer_pb2.GetEntityRequest, context): + return self.proxied_registry.get_entity( + name=request.name, project=request.project, allow_cache=request.allow_cache + ).to_proto() + + def ListEntities(self, request, context): + return RegistryServer_pb2.ListEntitiesResponse( + entities=[ + entity.to_proto() + for entity in self.proxied_registry.list_entities( + project=request.project, allow_cache=request.allow_cache + ) + ] + ) + + def GetDataSource(self, request: RegistryServer_pb2.GetDataSourceRequest, context): + return self.proxied_registry.get_data_source( + name=request.name, project=request.project, allow_cache=request.allow_cache + ).to_proto() + + def ListDataSources(self, request, context): + return RegistryServer_pb2.ListDataSourcesResponse( + data_sources=[ + data_source.to_proto() + for data_source in self.proxied_registry.list_data_sources( + project=request.project, allow_cache=request.allow_cache + ) + ] + ) + + def GetFeatureView( + self, request: RegistryServer_pb2.GetFeatureViewRequest, context + ): + return self.proxied_registry.get_feature_view( + name=request.name, project=request.project, allow_cache=request.allow_cache + ).to_proto() + + def ListFeatureViews(self, request, context): + return RegistryServer_pb2.ListFeatureViewsResponse( + feature_views=[ + feature_view.to_proto() + for feature_view in self.proxied_registry.list_feature_views( + project=request.project, allow_cache=request.allow_cache + ) + ] + ) + + def GetRequestFeatureView( + self, request: RegistryServer_pb2.GetRequestFeatureViewRequest, context + ): + return self.proxied_registry.get_request_feature_view( + name=request.name, project=request.project, allow_cache=request.allow_cache + ).to_proto() + + def ListRequestFeatureViews(self, request, context): + return RegistryServer_pb2.ListRequestFeatureViewsResponse( + request_feature_views=[ + request_feature_view.to_proto() + for request_feature_view in self.proxied_registry.list_request_feature_views( + project=request.project, allow_cache=request.allow_cache + ) + ] + ) + + def GetStreamFeatureView( + self, request: RegistryServer_pb2.GetStreamFeatureViewRequest, context + ): + return self.proxied_registry.get_stream_feature_view( + name=request.name, project=request.project, allow_cache=request.allow_cache + ).to_proto() + + def ListStreamFeatureViews(self, request, context): + return RegistryServer_pb2.ListStreamFeatureViewsResponse( + stream_feature_views=[ + stream_feature_view.to_proto() + for stream_feature_view in self.proxied_registry.list_stream_feature_views( + project=request.project, allow_cache=request.allow_cache + ) + ] + ) + + def GetOnDemandFeatureView( + self, request: RegistryServer_pb2.GetOnDemandFeatureViewRequest, context + ): + return self.proxied_registry.get_on_demand_feature_view( + name=request.name, project=request.project, allow_cache=request.allow_cache + ).to_proto() + + def ListOnDemandFeatureViews(self, request, context): + return RegistryServer_pb2.ListOnDemandFeatureViewsResponse( + on_demand_feature_views=[ + on_demand_feature_view.to_proto() + for on_demand_feature_view in self.proxied_registry.list_on_demand_feature_views( + project=request.project, allow_cache=request.allow_cache + ) + ] + ) + + def GetFeatureService( + self, request: RegistryServer_pb2.GetFeatureServiceRequest, context + ): + return self.proxied_registry.get_feature_service( + name=request.name, project=request.project, allow_cache=request.allow_cache + ).to_proto() + + def ListFeatureServices( + self, request: RegistryServer_pb2.ListFeatureServicesRequest, context + ): + return RegistryServer_pb2.ListFeatureServicesResponse( + feature_services=[ + feature_service.to_proto() + for feature_service in self.proxied_registry.list_feature_services( + project=request.project, allow_cache=request.allow_cache + ) + ] + ) + + def GetSavedDataset( + self, request: RegistryServer_pb2.GetSavedDatasetRequest, context + ): + return self.proxied_registry.get_saved_dataset( + name=request.name, project=request.project, allow_cache=request.allow_cache + ).to_proto() + + def ListSavedDatasets( + self, request: RegistryServer_pb2.ListSavedDatasetsRequest, context + ): + return RegistryServer_pb2.ListSavedDatasetsResponse( + saved_datasets=[ + saved_dataset.to_proto() + for saved_dataset in self.proxied_registry.list_saved_datasets( + project=request.project, allow_cache=request.allow_cache + ) + ] + ) + + def GetValidationReference( + self, request: RegistryServer_pb2.GetValidationReferenceRequest, context + ): + return self.proxied_registry.get_validation_reference( + name=request.name, project=request.project, allow_cache=request.allow_cache + ).to_proto() + + def ListValidationReferences( + self, request: RegistryServer_pb2.ListValidationReferencesRequest, context + ): + return RegistryServer_pb2.ListValidationReferencesResponse( + validation_references=[ + validation_reference.to_proto() + for validation_reference in self.proxied_registry.list_validation_references( + project=request.project, allow_cache=request.allow_cache + ) + ] + ) + + def ListProjectMetadata( + self, request: RegistryServer_pb2.ListProjectMetadataRequest, context + ): + return RegistryServer_pb2.ListProjectMetadataResponse( + project_metadata=[ + project_metadata.to_proto() + for project_metadata in self.proxied_registry.list_project_metadata( + project=request.project, allow_cache=request.allow_cache + ) + ] + ) + + def GetInfra(self, request: RegistryServer_pb2.GetInfraRequest, context): + return self.proxied_registry.get_infra( + project=request.project, allow_cache=request.allow_cache + ).to_proto() + + def Refresh(self, request, context): + self.proxied_registry.refresh(request.project) + return Empty() + + def Proto(self, request, context): + return self.proxied_registry.proto() + + +def start_server(store: FeatureStore, port: int): + server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) + RegistryServer_pb2_grpc.add_RegistryServerServicer_to_server( + RegistryServer(store), server + ) + server.add_insecure_port(f"[::]:{port}") + server.start() + server.wait_for_termination() diff --git a/sdk/python/tests/unit/test_registry_server.py b/sdk/python/tests/unit/test_registry_server.py new file mode 100644 index 0000000000..734bbfe19b --- /dev/null +++ b/sdk/python/tests/unit/test_registry_server.py @@ -0,0 +1,60 @@ +import assertpy +import grpc_testing +import pytest +from google.protobuf.empty_pb2 import Empty + +from feast import Entity, FeatureStore +from feast.protos.feast.registry import RegistryServer_pb2 +from feast.registry_server import RegistryServer + + +def call_registry_server(server, method: str, request=None): + service = RegistryServer_pb2.DESCRIPTOR.services_by_name["RegistryServer"] + rpc = server.invoke_unary_unary( + service.methods_by_name[method], (), request if request else Empty(), None + ) + + return rpc.termination() + + +@pytest.fixture +def registry_server(environment): + store: FeatureStore = environment.feature_store + + servicer = RegistryServer(store=store) + + return grpc_testing.server_from_dictionary( + {RegistryServer_pb2.DESCRIPTOR.services_by_name["RegistryServer"]: servicer}, + grpc_testing.strict_real_time(), + ) + + +def test_registry_server_get_entity(environment, registry_server): + store: FeatureStore = environment.feature_store + entity = Entity(name="driver", join_keys=["driver_id"]) + store.apply(entity) + + expected = store.get_entity(entity.name) + + get_entity_request = RegistryServer_pb2.GetEntityRequest( + name=entity.name, project=store.project, allow_cache=False + ) + response, trailing_metadata, code, details = call_registry_server( + registry_server, "GetEntity", get_entity_request + ) + response_entity = Entity.from_proto(response) + + assertpy.assert_that(response_entity).is_equal_to(expected) + + +def test_registry_server_proto(environment, registry_server): + store: FeatureStore = environment.feature_store + entity = Entity(name="driver", join_keys=["driver_id"]) + store.apply(entity) + + expected = store.registry.proto() + response, trailing_metadata, code, details = call_registry_server( + registry_server, "Proto" + ) + + assertpy.assert_that(response).is_equal_to(expected) diff --git a/setup.py b/setup.py index 4905a7697d..29b8dc5a68 100644 --- a/setup.py +++ b/setup.py @@ -234,7 +234,7 @@ else: use_scm_version = None -PROTO_SUBDIRS = ["core", "serving", "types", "storage"] +PROTO_SUBDIRS = ["core", "registry", "serving", "types", "storage"] PYTHON_CODE_PREFIX = "sdk/python" From b4aed657bf830502344ede5c98841d0d77ebf4ef Mon Sep 17 00:00:00 2001 From: Harry Date: Wed, 7 Feb 2024 13:50:12 +0700 Subject: [PATCH 027/122] chore: Set upper bound for moto package (#3937) chore: set upper bound for moto package Signed-off-by: Hai Nguyen --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 29b8dc5a68..4fb80871b2 100644 --- a/setup.py +++ b/setup.py @@ -155,7 +155,7 @@ "grpcio-testing>=1.56.2,<2", "minio==7.1.0", "mock==2.0.0", - "moto", + "moto<5", "mypy>=0.981,<0.990", "avro==1.10.0", "fsspec<2023.10.0", From 49d2988a562c66b3949cf2368fe44ed41e767eab Mon Sep 17 00:00:00 2001 From: Dongwoo Park <40623259+Woo-Dong@users.noreply.github.com> Date: Thu, 8 Feb 2024 14:39:29 +0900 Subject: [PATCH 028/122] fix: Trino as an OfflineStore Access Denied when BasicAuthenticaion (#3898) --- .../infra/offline_stores/contrib/trino_offline_store/trino.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino.py index f662cda913..d4cfdb6632 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino.py +++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino.py @@ -40,7 +40,7 @@ class BasicAuthModel(FeastConfigBaseModel): username: StrictStr - password: SecretStr + password: StrictStr class KerberosAuthModel(FeastConfigBaseModel): From c16e5afcc5273b0c26b79dd4e233a28618ac490a Mon Sep 17 00:00:00 2001 From: TS <67011812+tsisodia10@users.noreply.github.com> Date: Thu, 8 Feb 2024 09:54:41 -0500 Subject: [PATCH 029/122] fix: Typo Correction in Feast UI Readme (#3939) Modify the README to point to correct project list Signed-off-by: Twinkll Sisodia --- ui/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ui/README.md b/ui/README.md index e91a8741ec..a9ce5d3ec7 100644 --- a/ui/README.md +++ b/ui/README.md @@ -46,7 +46,7 @@ ReactDOM.render( ); ``` -When you start the React app, it will look for `project-list.json` to find a list of your projects. The JSON should looks something like this. +When you start the React app, it will look for `projects-list.json` to find a list of your projects. The JSON should looks something like this. ```json { From bdce99d8e4581b7c59558b91840f019a16194b41 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Attila=20T=C3=B3th?= Date: Fri, 9 Feb 2024 23:44:47 +0100 Subject: [PATCH 030/122] docs: Add ScyllaDB as online store alternative (fixed DCO) (#3944) --- docs/SUMMARY.md | 1 + docs/reference/online-stores/README.md | 4 +- docs/reference/online-stores/scylladb.md | 94 ++++++++++++++++++++++++ 3 files changed, 98 insertions(+), 1 deletion(-) create mode 100644 docs/reference/online-stores/scylladb.md diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md index c80ded2adf..8affea898e 100644 --- a/docs/SUMMARY.md +++ b/docs/SUMMARY.md @@ -99,6 +99,7 @@ * [MySQL (contrib)](reference/online-stores/mysql.md) * [Rockset (contrib)](reference/online-stores/rockset.md) * [Hazelcast (contrib)](reference/online-stores/hazelcast.md) + * [ScyllaDB (contrib)](reference/online-stores/scylladb.md) * [Providers](reference/providers/README.md) * [Local](reference/providers/local.md) * [Google Cloud Platform](reference/providers/google-cloud-platform.md) diff --git a/docs/reference/online-stores/README.md b/docs/reference/online-stores/README.md index f86e6f6a1d..d90bfcf163 100644 --- a/docs/reference/online-stores/README.md +++ b/docs/reference/online-stores/README.md @@ -54,4 +54,6 @@ Please see [Online Store](../../getting-started/architecture-and-components/onli [hazelcast.md](hazelcast.md) {% endcontent-ref %} - +{% content-ref url="scylladb.md" %} +[scylladb.md](scylladb.md) +{% endcontent-ref %} diff --git a/docs/reference/online-stores/scylladb.md b/docs/reference/online-stores/scylladb.md new file mode 100644 index 0000000000..e28e810e21 --- /dev/null +++ b/docs/reference/online-stores/scylladb.md @@ -0,0 +1,94 @@ +# ScyllaDB Cloud online store + +## Description + +ScyllaDB is a low-latency and high-performance Cassandra-compatible (uses CQL) database. You can use the existing Cassandra connector to use ScyllaDB as an online store in Feast. + +The [ScyllaDB](https://www.scylladb.com/) online store provides support for materializing feature values into a ScyllaDB or [ScyllaDB Cloud](https://www.scylladb.com/product/scylla-cloud/) cluster for serving online features real-time. + +## Getting started + +Install Feast with Cassandra support: +```bash +pip install "feast[cassandra]" +``` + +Create a new Feast project: +```bash +feast init REPO_NAME -t cassandra +``` + +### Example (ScyllaDB) + +{% code title="feature_store.yaml" %} +```yaml +project: scylla_feature_repo +registry: data/registry.db +provider: local +online_store: + type: cassandra + hosts: + - 172.17.0.2 + keyspace: feast + username: scylla + password: password +``` +{% endcode %} + +### Example (ScyllaDB Cloud) + +{% code title="feature_store.yaml" %} +```yaml +project: scylla_feature_repo +registry: data/registry.db +provider: local +online_store: + type: cassandra + hosts: + - node-0.aws_us_east_1.xxxxxxxx.clusters.scylla.cloud + - node-1.aws_us_east_1.xxxxxxxx.clusters.scylla.cloud + - node-2.aws_us_east_1.xxxxxxxx.clusters.scylla.cloud + keyspace: feast + username: scylla + password: password +``` +{% endcode %} + + +The full set of configuration options is available in [CassandraOnlineStoreConfig](https://rtd.feast.dev/en/master/#feast.infra.online_stores.contrib.cassandra_online_store.cassandra_online_store.CassandraOnlineStoreConfig). +For a full explanation of configuration options please look at file +`sdk/python/feast/infra/online_stores/contrib/cassandra_online_store/README.md`. + +Storage specifications can be found at `docs/specs/online_store_format.md`. + +## Functionality Matrix + +The set of functionality supported by online stores is described in detail [here](overview.md#functionality). +Below is a matrix indicating which functionality is supported by the Cassandra plugin. + +| | Cassandra | +| :-------------------------------------------------------- | :-------- | +| write feature values to the online store | yes | +| read feature values from the online store | yes | +| update infrastructure (e.g. tables) in the online store | yes | +| teardown infrastructure (e.g. tables) in the online store | yes | +| generate a plan of infrastructure changes | yes | +| support for on-demand transforms | yes | +| readable by Python SDK | yes | +| readable by Java | no | +| readable by Go | no | +| support for entityless feature views | yes | +| support for concurrent writing to the same key | no | +| support for ttl (time to live) at retrieval | no | +| support for deleting expired data | no | +| collocated by feature view | yes | +| collocated by feature service | no | +| collocated by entity key | no | + +To compare this set of functionality against other online stores, please see the full [functionality matrix](overview.md#functionality-matrix). + +## Resources + +* [Sample application with ScyllaDB](https://feature-store.scylladb.com/stable/) +* [ScyllaDB website](https://www.scylladb.com/) +* [ScyllaDB Cloud documentation](https://cloud.docs.scylladb.com/stable/) From 7d75fc525a7f2f46811d168ce71f91b5736ad788 Mon Sep 17 00:00:00 2001 From: Job Almekinders <55230856+job-almekinders@users.noreply.github.com> Date: Fri, 9 Feb 2024 23:45:22 +0100 Subject: [PATCH 031/122] fix: Add conn.commit() to Postgresonline_write_batch.online_write_batch (#3904) --- sdk/python/feast/infra/online_stores/contrib/postgres.py | 1 + 1 file changed, 1 insertion(+) diff --git a/sdk/python/feast/infra/online_stores/contrib/postgres.py b/sdk/python/feast/infra/online_stores/contrib/postgres.py index a12e66f109..49f87ddb0a 100644 --- a/sdk/python/feast/infra/online_stores/contrib/postgres.py +++ b/sdk/python/feast/infra/online_stores/contrib/postgres.py @@ -99,6 +99,7 @@ def online_write_batch( cur_batch, page_size=batch_size, ) + conn.commit() if progress: progress(len(cur_batch)) From d3a2a45d9bc2b690a7aa784ec7b0411e91244dab Mon Sep 17 00:00:00 2001 From: Tornike Gurgenidze Date: Sat, 10 Feb 2024 02:45:38 +0400 Subject: [PATCH 032/122] fix: Transformation server doesn't generate files from proto (#3902) --- sdk/python/feast/infra/transformation_servers/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/python/feast/infra/transformation_servers/Dockerfile b/sdk/python/feast/infra/transformation_servers/Dockerfile index c072ed0160..41f272c757 100644 --- a/sdk/python/feast/infra/transformation_servers/Dockerfile +++ b/sdk/python/feast/infra/transformation_servers/Dockerfile @@ -15,7 +15,7 @@ COPY README.md README.md # Install dependencies -RUN --mount=source=.git,target=.git,type=bind pip3 install --no-cache-dir -e '.[gcp,aws]' +RUN --mount=source=.git,target=.git,type=bind pip3 install --no-cache-dir '.[gcp,aws]' # Start feature transformation server CMD [ "python", "app.py" ] From 4e450ad3b1b6d2f66fd87e07805bb57772390142 Mon Sep 17 00:00:00 2001 From: Chester Date: Sat, 10 Feb 2024 21:00:09 +0800 Subject: [PATCH 033/122] chore: Bumping fastapi + starlette (#3938) --- Makefile | 2 +- sdk/python/feast/data_source.py | 19 +++++----- sdk/python/feast/feature_service.py | 2 +- sdk/python/feast/feature_view.py | 2 +- sdk/python/feast/importer.py | 3 +- .../infra/contrib/spark_kafka_processor.py | 11 +++++- .../feast/infra/contrib/stream_processor.py | 11 +++--- .../athena_offline_store/athena_source.py | 6 +-- .../athena_offline_store/tests/data_source.py | 2 +- .../mssql_offline_store/tests/data_source.py | 8 ++-- .../tests/data_source.py | 2 +- .../spark_offline_store/tests/data_source.py | 4 +- .../feast/infra/offline_stores/file_source.py | 2 +- .../infra/offline_stores/offline_store.py | 37 +++++++------------ .../feast/infra/offline_stores/redshift.py | 6 +-- .../infra/offline_stores/snowflake_source.py | 4 +- .../feast/infra/online_stores/dynamodb.py | 4 +- .../feast/infra/passthrough_provider.py | 2 +- sdk/python/feast/infra/provider.py | 2 +- .../feast/infra/registry/base_registry.py | 2 + .../feast/infra/registry/registry_store.py | 4 +- sdk/python/feast/infra/registry/snowflake.py | 2 +- sdk/python/feast/infra/utils/aws_utils.py | 2 +- sdk/python/feast/infra/utils/hbase_utils.py | 8 ++-- .../infra/utils/snowflake/snowflake_utils.py | 6 ++- sdk/python/feast/type_map.py | 9 +++-- .../requirements/py3.10-ci-requirements.txt | 26 ++++--------- .../requirements/py3.10-requirements.txt | 12 ++---- .../requirements/py3.8-ci-requirements.txt | 26 ++++--------- .../requirements/py3.8-requirements.txt | 12 ++---- .../requirements/py3.9-ci-requirements.txt | 26 ++++--------- .../requirements/py3.9-requirements.txt | 12 ++---- sdk/python/tests/data/data_creator.py | 2 +- sdk/python/tests/foo_provider.py | 8 ++-- .../universal/data_source_creator.py | 4 +- .../universal/data_sources/bigquery.py | 2 +- .../universal/data_sources/file.py | 6 +-- .../universal/data_sources/redshift.py | 2 +- .../universal/data_sources/snowflake.py | 2 +- .../universal/online_store_creator.py | 4 +- .../offline_stores/test_offline_store.py | 15 +++++--- setup.py | 4 +- 42 files changed, 147 insertions(+), 178 deletions(-) diff --git a/Makefile b/Makefile index 4b85c0e448..6736e64078 100644 --- a/Makefile +++ b/Makefile @@ -310,7 +310,7 @@ format-python: cd ${ROOT_DIR}/sdk/python; python -m black --target-version py38 feast tests lint-python: - cd ${ROOT_DIR}/sdk/python; python -m mypy + cd ${ROOT_DIR}/sdk/python; python -m mypy --exclude=/tests/ --follow-imports=skip feast cd ${ROOT_DIR}/sdk/python; python -m isort feast/ tests/ --check-only cd ${ROOT_DIR}/sdk/python; python -m flake8 feast/ tests/ cd ${ROOT_DIR}/sdk/python; python -m black --check feast tests diff --git a/sdk/python/feast/data_source.py b/sdk/python/feast/data_source.py index b7ce19aad9..3421fd5d30 100644 --- a/sdk/python/feast/data_source.py +++ b/sdk/python/feast/data_source.py @@ -11,7 +11,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - import enum import warnings from abc import ABC, abstractmethod @@ -485,12 +484,12 @@ def to_proto(self) -> DataSourceProto: return data_source_proto def validate(self, config: RepoConfig): - pass + raise NotImplementedError def get_table_column_names_and_types( self, config: RepoConfig ) -> Iterable[Tuple[str, str]]: - pass + raise NotImplementedError @staticmethod def source_datatype_to_feast_value_type() -> Callable[[str], ValueType]: @@ -534,12 +533,12 @@ def __init__( self.schema = schema def validate(self, config: RepoConfig): - pass + raise NotImplementedError def get_table_column_names_and_types( self, config: RepoConfig ) -> Iterable[Tuple[str, str]]: - pass + raise NotImplementedError def __eq__(self, other): if not isinstance(other, RequestSource): @@ -610,12 +609,12 @@ def source_datatype_to_feast_value_type() -> Callable[[str], ValueType]: @typechecked class KinesisSource(DataSource): def validate(self, config: RepoConfig): - pass + raise NotImplementedError def get_table_column_names_and_types( self, config: RepoConfig ) -> Iterable[Tuple[str, str]]: - pass + raise NotImplementedError @staticmethod def from_proto(data_source: DataSourceProto): @@ -639,7 +638,7 @@ def from_proto(data_source: DataSourceProto): @staticmethod def source_datatype_to_feast_value_type() -> Callable[[str], ValueType]: - pass + raise NotImplementedError def get_table_query_string(self) -> str: raise NotImplementedError @@ -772,12 +771,12 @@ def __hash__(self): return super().__hash__() def validate(self, config: RepoConfig): - pass + raise NotImplementedError def get_table_column_names_and_types( self, config: RepoConfig ) -> Iterable[Tuple[str, str]]: - pass + raise NotImplementedError @staticmethod def from_proto(data_source: DataSourceProto): diff --git a/sdk/python/feast/feature_service.py b/sdk/python/feast/feature_service.py index c3037a55da..7ec923205a 100644 --- a/sdk/python/feast/feature_service.py +++ b/sdk/python/feast/feature_service.py @@ -56,7 +56,7 @@ def __init__( *, name: str, features: List[Union[FeatureView, OnDemandFeatureView]], - tags: Dict[str, str] = None, + tags: Optional[Dict[str, str]] = None, description: str = "", owner: str = "", logging_config: Optional[LoggingConfig] = None, diff --git a/sdk/python/feast/feature_view.py b/sdk/python/feast/feature_view.py index 67f9662d31..f87ae7ab13 100644 --- a/sdk/python/feast/feature_view.py +++ b/sdk/python/feast/feature_view.py @@ -101,7 +101,7 @@ def __init__( name: str, source: DataSource, schema: Optional[List[Field]] = None, - entities: List[Entity] = None, + entities: Optional[List[Entity]] = None, ttl: Optional[timedelta] = timedelta(days=0), online: bool = True, description: str = "", diff --git a/sdk/python/feast/importer.py b/sdk/python/feast/importer.py index bbd592101a..d1d7d62901 100644 --- a/sdk/python/feast/importer.py +++ b/sdk/python/feast/importer.py @@ -1,4 +1,5 @@ import importlib +from typing import Optional from feast.errors import ( FeastClassImportError, @@ -7,7 +8,7 @@ ) -def import_class(module_name: str, class_name: str, class_type: str = None): +def import_class(module_name: str, class_name: str, class_type: Optional[str] = None): """ Dynamically loads and returns a class from a module. diff --git a/sdk/python/feast/infra/contrib/spark_kafka_processor.py b/sdk/python/feast/infra/contrib/spark_kafka_processor.py index ea55d89988..bac1c28b06 100644 --- a/sdk/python/feast/infra/contrib/spark_kafka_processor.py +++ b/sdk/python/feast/infra/contrib/spark_kafka_processor.py @@ -5,6 +5,7 @@ from pyspark.sql import DataFrame, SparkSession from pyspark.sql.avro.functions import from_avro from pyspark.sql.functions import col, from_json +from pyspark.sql.streaming import StreamingQuery from feast.data_format import AvroFormat, JsonFormat from feast.data_source import KafkaSource, PushMode @@ -63,7 +64,13 @@ def __init__( self.join_keys = [fs.get_entity(entity).join_key for entity in sfv.entities] super().__init__(fs=fs, sfv=sfv, data_source=sfv.stream_source) - def ingest_stream_feature_view(self, to: PushMode = PushMode.ONLINE) -> None: + # Type hinting for data_source type. + # data_source type has been checked to be an instance of KafkaSource. + self.data_source: KafkaSource = self.data_source # type: ignore + + def ingest_stream_feature_view( + self, to: PushMode = PushMode.ONLINE + ) -> StreamingQuery: ingested_stream_df = self._ingest_stream_data() transformed_df = self._construct_transformation_plan(ingested_stream_df) online_store_query = self._write_stream_data(transformed_df, to) @@ -122,7 +129,7 @@ def _ingest_stream_data(self) -> StreamTable: def _construct_transformation_plan(self, df: StreamTable) -> StreamTable: return self.sfv.udf.__call__(df) if self.sfv.udf else df - def _write_stream_data(self, df: StreamTable, to: PushMode): + def _write_stream_data(self, df: StreamTable, to: PushMode) -> StreamingQuery: # Validation occurs at the fs.write_to_online_store() phase against the stream feature view schema. def batch_write(row: DataFrame, batch_id: int): rows: pd.DataFrame = row.toPandas() diff --git a/sdk/python/feast/infra/contrib/stream_processor.py b/sdk/python/feast/infra/contrib/stream_processor.py index 24817c82ea..df4e144f8c 100644 --- a/sdk/python/feast/infra/contrib/stream_processor.py +++ b/sdk/python/feast/infra/contrib/stream_processor.py @@ -3,6 +3,7 @@ from typing import TYPE_CHECKING, Optional from pyspark.sql import DataFrame +from typing_extensions import TypeAlias from feast.data_source import DataSource, PushMode from feast.importer import import_class @@ -17,7 +18,7 @@ } # TODO: support more types other than just Spark. -StreamTable = DataFrame +StreamTable: TypeAlias = DataFrame class ProcessorConfig(FeastConfigBaseModel): @@ -54,13 +55,13 @@ def ingest_stream_feature_view(self, to: PushMode = PushMode.ONLINE) -> None: Ingests data from the stream source attached to the stream feature view; transforms the data and then persists it to the online store and/or offline store, depending on the 'to' parameter. """ - pass + raise NotImplementedError def _ingest_stream_data(self) -> StreamTable: """ Ingests data into a StreamTable. """ - pass + raise NotImplementedError def _construct_transformation_plan(self, table: StreamTable) -> StreamTable: """ @@ -68,14 +69,14 @@ def _construct_transformation_plan(self, table: StreamTable) -> StreamTable: evaluation, the StreamTable will not be materialized until it is actually evaluated. For example: df.collect() in spark or tbl.execute() in Flink. """ - pass + raise NotImplementedError def _write_stream_data(self, table: StreamTable, to: PushMode) -> None: """ Launches a job to persist stream data to the online store and/or offline store, depending on the 'to' parameter, and returns a handle for the job. """ - pass + raise NotImplementedError def get_stream_processor_object( diff --git a/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena_source.py b/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena_source.py index 8e9e3893f3..0aca42cd68 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena_source.py @@ -297,9 +297,9 @@ class SavedDatasetAthenaStorage(SavedDatasetStorage): def __init__( self, table_ref: str, - query: str = None, - database: str = None, - data_source: str = None, + query: Optional[str] = None, + database: Optional[str] = None, + data_source: Optional[str] = None, ): self.athena_options = AthenaOptions( table=table_ref, query=query, database=database, data_source=data_source diff --git a/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/tests/data_source.py b/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/tests/data_source.py index 384ab69e81..f68e109d6c 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/tests/data_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/tests/data_source.py @@ -51,7 +51,7 @@ def create_data_source( suffix: Optional[str] = None, timestamp_field="ts", created_timestamp_column="created_ts", - field_mapping: Dict[str, str] = None, + field_mapping: Optional[Dict[str, str]] = None, ) -> DataSource: table_name = destination_name diff --git a/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/tests/data_source.py b/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/tests/data_source.py index 9b751d98ef..2604cf7c18 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/tests/data_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/tests/data_source.py @@ -1,4 +1,4 @@ -from typing import Dict, List +from typing import Dict, List, Optional import pandas as pd import pytest @@ -66,7 +66,7 @@ def create_data_source( destination_name: str, timestamp_field="ts", created_timestamp_column="created_ts", - field_mapping: Dict[str, str] = None, + field_mapping: Optional[Dict[str, str]] = None, **kwargs, ) -> DataSource: # Make sure the field mapping is correct and convert the datetime datasources. @@ -99,10 +99,10 @@ def create_data_source( ) def create_saved_dataset_destination(self) -> SavedDatasetStorage: - pass + raise NotImplementedError def get_prefixed_table_name(self, destination_name: str) -> str: return f"{self.project_name}_{destination_name}" def teardown(self): - pass + raise NotImplementedError diff --git a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/data_source.py b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/data_source.py index f447950132..224fcea30f 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/data_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/data_source.py @@ -85,7 +85,7 @@ def create_data_source( suffix: Optional[str] = None, timestamp_field="ts", created_timestamp_column="created_ts", - field_mapping: Dict[str, str] = None, + field_mapping: Optional[Dict[str, str]] = None, ) -> DataSource: destination_name = self.get_prefixed_table_name(destination_name) diff --git a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/tests/data_source.py b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/tests/data_source.py index 71c07b20c2..7b4fda3b5f 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/tests/data_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/tests/data_source.py @@ -2,7 +2,7 @@ import shutil import tempfile import uuid -from typing import Dict, List +from typing import Dict, List, Optional import pandas as pd from pyspark import SparkConf @@ -70,7 +70,7 @@ def create_data_source( destination_name: str, timestamp_field="ts", created_timestamp_column="created_ts", - field_mapping: Dict[str, str] = None, + field_mapping: Optional[Dict[str, str]] = None, **kwargs, ) -> DataSource: if timestamp_field in df: diff --git a/sdk/python/feast/infra/offline_stores/file_source.py b/sdk/python/feast/infra/offline_stores/file_source.py index ac824b359f..887b410079 100644 --- a/sdk/python/feast/infra/offline_stores/file_source.py +++ b/sdk/python/feast/infra/offline_stores/file_source.py @@ -183,7 +183,7 @@ def create_filesystem_and_path( return None, path def get_table_query_string(self) -> str: - pass + raise NotImplementedError class FileOptions: diff --git a/sdk/python/feast/infra/offline_stores/offline_store.py b/sdk/python/feast/infra/offline_stores/offline_store.py index 6141e3c435..30135feccb 100644 --- a/sdk/python/feast/infra/offline_stores/offline_store.py +++ b/sdk/python/feast/infra/offline_stores/offline_store.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import warnings -from abc import ABC, abstractmethod +from abc import ABC from datetime import datetime from pathlib import Path from typing import TYPE_CHECKING, Any, Callable, List, Optional, Union @@ -150,9 +150,8 @@ def to_sql(self) -> str: """ Return RetrievalJob generated SQL statement if applicable. """ - pass + raise NotImplementedError - @abstractmethod def _to_df_internal(self, timeout: Optional[int] = None) -> pd.DataFrame: """ Synchronously executes the underlying query and returns the result as a pandas dataframe. @@ -162,9 +161,8 @@ def _to_df_internal(self, timeout: Optional[int] = None) -> pd.DataFrame: Does not handle on demand transformations or dataset validation. For either of those, `to_df` should be used. """ - pass + raise NotImplementedError - @abstractmethod def _to_arrow_internal(self, timeout: Optional[int] = None) -> pyarrow.Table: """ Synchronously executes the underlying query and returns the result as an arrow table. @@ -174,21 +172,18 @@ def _to_arrow_internal(self, timeout: Optional[int] = None) -> pyarrow.Table: Does not handle on demand transformations or dataset validation. For either of those, `to_arrow` should be used. """ - pass + raise NotImplementedError @property - @abstractmethod def full_feature_names(self) -> bool: """Returns True if full feature names should be applied to the results of the query.""" - pass + raise NotImplementedError @property - @abstractmethod def on_demand_feature_views(self) -> List[OnDemandFeatureView]: """Returns a list containing all the on demand feature views to be handled.""" - pass + raise NotImplementedError - @abstractmethod def persist( self, storage: SavedDatasetStorage, @@ -204,13 +199,12 @@ def persist( allow_overwrite: If True, a pre-existing location (e.g. table or file) can be overwritten. Currently not all individual offline store implementations make use of this parameter. """ - pass + raise NotImplementedError @property - @abstractmethod def metadata(self) -> Optional[RetrievalMetadata]: """Returns metadata about the retrieval job.""" - pass + raise NotImplementedError def supports_remote_storage_export(self) -> bool: """Returns True if the RetrievalJob supports `to_remote_storage`.""" @@ -226,7 +220,7 @@ def to_remote_storage(self) -> List[str]: Returns: A list of parquet file paths in remote storage. """ - raise NotImplementedError() + raise NotImplementedError class OfflineStore(ABC): @@ -239,7 +233,6 @@ class OfflineStore(ABC): """ @staticmethod - @abstractmethod def pull_latest_from_table_or_query( config: RepoConfig, data_source: DataSource, @@ -270,10 +263,9 @@ def pull_latest_from_table_or_query( Returns: A RetrievalJob that can be executed to get the entity rows. """ - pass + raise NotImplementedError @staticmethod - @abstractmethod def get_historical_features( config: RepoConfig, feature_views: List[FeatureView], @@ -302,10 +294,9 @@ def get_historical_features( Returns: A RetrievalJob that can be executed to get the features. """ - pass + raise NotImplementedError @staticmethod - @abstractmethod def pull_all_from_table_or_query( config: RepoConfig, data_source: DataSource, @@ -334,7 +325,7 @@ def pull_all_from_table_or_query( Returns: A RetrievalJob that can be executed to get the entity rows. """ - pass + raise NotImplementedError @staticmethod def write_logged_features( @@ -358,7 +349,7 @@ def write_logged_features( logging_config: A LoggingConfig object that determines where the logs will be written. registry: The registry for the current feature store. """ - raise NotImplementedError() + raise NotImplementedError @staticmethod def offline_write_batch( @@ -377,4 +368,4 @@ def offline_write_batch( progress: Function to be called once a portion of the data has been written, used to show progress. """ - raise NotImplementedError() + raise NotImplementedError diff --git a/sdk/python/feast/infra/offline_stores/redshift.py b/sdk/python/feast/infra/offline_stores/redshift.py index 837cf49655..6034bf5ac7 100644 --- a/sdk/python/feast/infra/offline_stores/redshift.py +++ b/sdk/python/feast/infra/offline_stores/redshift.py @@ -51,13 +51,13 @@ class RedshiftOfflineStoreConfig(FeastConfigBaseModel): type: Literal["redshift"] = "redshift" """ Offline store type selector""" - cluster_id: Optional[StrictStr] + cluster_id: Optional[StrictStr] = None """ Redshift cluster identifier, for provisioned clusters """ - user: Optional[StrictStr] + user: Optional[StrictStr] = None """ Redshift user name, only required for provisioned clusters """ - workgroup: Optional[StrictStr] + workgroup: Optional[StrictStr] = None """ Redshift workgroup identifier, for serverless """ region: StrictStr diff --git a/sdk/python/feast/infra/offline_stores/snowflake_source.py b/sdk/python/feast/infra/offline_stores/snowflake_source.py index 0cbf82dd1c..e29197c68d 100644 --- a/sdk/python/feast/infra/offline_stores/snowflake_source.py +++ b/sdk/python/feast/infra/offline_stores/snowflake_source.py @@ -1,5 +1,5 @@ import warnings -from typing import Callable, Dict, Iterable, Optional, Tuple +from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple from typeguard import typechecked @@ -223,7 +223,7 @@ def get_table_column_names_and_types( query = f"SELECT * FROM {self.get_table_query_string()} LIMIT 5" cursor = execute_snowflake_statement(conn, query) - metadata = [ + metadata: List[Dict[str, Any]] = [ { "column_name": column.name, "type_code": column.type_code, diff --git a/sdk/python/feast/infra/online_stores/dynamodb.py b/sdk/python/feast/infra/online_stores/dynamodb.py index 525978e736..a1eef16f40 100644 --- a/sdk/python/feast/infra/online_stores/dynamodb.py +++ b/sdk/python/feast/infra/online_stores/dynamodb.py @@ -288,12 +288,12 @@ def _get_dynamodb_resource(self, region: str, endpoint_url: Optional[str] = None ) return self._dynamodb_resource - def _sort_dynamodb_response(self, responses: list, order: list): + def _sort_dynamodb_response(self, responses: list, order: list) -> Any: """DynamoDB Batch Get Item doesn't return items in a particular order.""" # Assign an index to order order_with_index = {value: idx for idx, value in enumerate(order)} # Sort table responses by index - table_responses_ordered = [ + table_responses_ordered: Any = [ (order_with_index[tbl_res["entity_id"]], tbl_res) for tbl_res in responses ] table_responses_ordered = sorted( diff --git a/sdk/python/feast/infra/passthrough_provider.py b/sdk/python/feast/infra/passthrough_provider.py index 28b10c1259..811abe106c 100644 --- a/sdk/python/feast/infra/passthrough_provider.py +++ b/sdk/python/feast/infra/passthrough_provider.py @@ -180,7 +180,7 @@ def online_read( config: RepoConfig, table: FeatureView, entity_keys: List[EntityKeyProto], - requested_features: List[str] = None, + requested_features: Optional[List[str]] = None, ) -> List: set_usage_attribute("provider", self.__class__.__name__) result = [] diff --git a/sdk/python/feast/infra/provider.py b/sdk/python/feast/infra/provider.py index 82879b264a..2a9670cace 100644 --- a/sdk/python/feast/infra/provider.py +++ b/sdk/python/feast/infra/provider.py @@ -211,7 +211,7 @@ def online_read( config: RepoConfig, table: FeatureView, entity_keys: List[EntityKeyProto], - requested_features: List[str] = None, + requested_features: Optional[List[str]] = None, ) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]: """ Reads features values for the given entity keys. diff --git a/sdk/python/feast/infra/registry/base_registry.py b/sdk/python/feast/infra/registry/base_registry.py index 8928a5800d..f89b079478 100644 --- a/sdk/python/feast/infra/registry/base_registry.py +++ b/sdk/python/feast/infra/registry/base_registry.py @@ -503,7 +503,9 @@ def list_validation_references( Returns: List of request feature views """ + raise NotImplementedError + @abstractmethod def list_project_metadata( self, project: str, allow_cache: bool = False ) -> List[ProjectMetadata]: diff --git a/sdk/python/feast/infra/registry/registry_store.py b/sdk/python/feast/infra/registry/registry_store.py index c42a55cd9d..5151fd74b2 100644 --- a/sdk/python/feast/infra/registry/registry_store.py +++ b/sdk/python/feast/infra/registry/registry_store.py @@ -17,7 +17,7 @@ def get_registry_proto(self) -> RegistryProto: Returns: Returns either the registry proto stored at the registry path, or an empty registry proto. """ - pass + raise NotImplementedError @abstractmethod def update_registry_proto(self, registry_proto: RegistryProto): @@ -40,7 +40,7 @@ def teardown(self): class NoopRegistryStore(RegistryStore): def get_registry_proto(self) -> RegistryProto: - pass + return RegistryProto() def update_registry_proto(self, registry_proto: RegistryProto): pass diff --git a/sdk/python/feast/infra/registry/snowflake.py b/sdk/python/feast/infra/registry/snowflake.py index 56c7bc1f65..c1ebf13d6b 100644 --- a/sdk/python/feast/infra/registry/snowflake.py +++ b/sdk/python/feast/infra/registry/snowflake.py @@ -418,7 +418,7 @@ def _delete_object( """ cursor = execute_snowflake_statement(conn, query) - if cursor.rowcount < 1 and not_found_exception: + if cursor.rowcount < 1 and not_found_exception: # type: ignore raise not_found_exception(name, project) self._set_last_updated_metadata(datetime.utcnow(), project) diff --git a/sdk/python/feast/infra/utils/aws_utils.py b/sdk/python/feast/infra/utils/aws_utils.py index ef83c6d1c6..c3604ee41f 100644 --- a/sdk/python/feast/infra/utils/aws_utils.py +++ b/sdk/python/feast/infra/utils/aws_utils.py @@ -816,7 +816,7 @@ def execute_athena_query( database: str, workgroup: str, query: str, - temp_table: str = None, + temp_table: Optional[str] = None, ) -> str: """Execute athena statement synchronously. Waits for the query to finish. diff --git a/sdk/python/feast/infra/utils/hbase_utils.py b/sdk/python/feast/infra/utils/hbase_utils.py index d44f93f161..72afda2ef3 100644 --- a/sdk/python/feast/infra/utils/hbase_utils.py +++ b/sdk/python/feast/infra/utils/hbase_utils.py @@ -1,4 +1,4 @@ -from typing import List +from typing import List, Optional from happybase import ConnectionPool @@ -38,9 +38,9 @@ class HBaseConnector: def __init__( self, - pool: ConnectionPool = None, - host: str = None, - port: int = None, + pool: Optional[ConnectionPool] = None, + host: Optional[str] = None, + port: Optional[int] = None, connection_pool_size: int = 4, ): if pool is None: diff --git a/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py b/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py index a4cda89a6f..8eb5177ac2 100644 --- a/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py +++ b/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py @@ -43,7 +43,11 @@ class GetSnowflakeConnection: - def __init__(self, config: str, autocommit=True): + def __init__( + self, + config: str, + autocommit=True, + ): self.config = config self.autocommit = autocommit diff --git a/sdk/python/feast/type_map.py b/sdk/python/feast/type_map.py index e51e1e743b..ad3e273d37 100644 --- a/sdk/python/feast/type_map.py +++ b/sdk/python/feast/type_map.py @@ -51,7 +51,7 @@ import pyarrow # null timestamps get converted to -9223372036854775808 -NULL_TIMESTAMP_INT_VALUE = np.datetime64("NaT").astype(int) +NULL_TIMESTAMP_INT_VALUE: int = np.datetime64("NaT").astype(int) def feast_value_type_to_python_type(field_value_proto: ProtoValue) -> Any: @@ -114,7 +114,10 @@ def feast_value_type_to_pandas_type(value_type: ValueType) -> Any: def python_type_to_feast_value_type( - name: str, value: Any = None, recurse: bool = True, type_name: Optional[str] = None + name: str, + value: Optional[Any] = None, + recurse: bool = True, + type_name: Optional[str] = None, ) -> ValueType: """ Finds the equivalent Feast Value Type for a Python value. Both native @@ -321,7 +324,7 @@ def _python_datetime_to_int_timestamp( elif isinstance(value, Timestamp): int_timestamps.append(int(value.ToSeconds())) elif isinstance(value, np.datetime64): - int_timestamps.append(value.astype("datetime64[s]").astype(np.int_)) + int_timestamps.append(value.astype("datetime64[s]").astype(np.int_)) # type: ignore[attr-defined] elif isinstance(value, type(np.nan)): int_timestamps.append(NULL_TIMESTAMP_INT_VALUE) else: diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index 740356907d..9435a68deb 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -121,9 +121,7 @@ comm==0.2.0 # ipykernel # ipywidgets coverage[toml]==7.3.2 - # via - # coverage - # pytest-cov + # via pytest-cov cryptography==41.0.6 # via # azure-identity @@ -173,7 +171,7 @@ execnet==2.0.2 # via pytest-xdist executing==2.0.1 # via stack-data -fastapi==0.99.1 +fastapi==0.109.1 # via feast (setup.py) fastavro==1.9.0 # via @@ -226,9 +224,7 @@ google-auth==2.23.4 google-auth-httplib2==0.1.1 # via google-api-python-client google-cloud-bigquery[pandas]==3.12.0 - # via - # feast (setup.py) - # google-cloud-bigquery + # via feast (setup.py) google-cloud-bigquery-storage==2.22.0 # via feast (setup.py) google-cloud-bigtable==2.21.0 @@ -462,7 +458,7 @@ msgpack==1.0.7 # via cachecontrol multiprocess==0.70.15 # via bytewax -mypy==0.982 +mypy==1.8.0 # via # feast (setup.py) # sqlalchemy @@ -801,9 +797,7 @@ sniffio==1.3.0 snowballstemmer==2.2.0 # via sphinx snowflake-connector-python[pandas]==3.5.0 - # via - # feast (setup.py) - # snowflake-connector-python + # via feast (setup.py) sortedcontainers==2.4.0 # via snowflake-connector-python soupsieve==2.5 @@ -829,14 +823,12 @@ sphinxcontrib-qthelp==1.0.6 sphinxcontrib-serializinghtml==1.1.9 # via sphinx sqlalchemy[mypy]==1.4.50 - # via - # feast (setup.py) - # sqlalchemy + # via feast (setup.py) sqlalchemy2-stubs==0.0.2a37 # via sqlalchemy stack-data==0.6.3 # via ipython -starlette==0.27.0 +starlette==0.35.1 # via fastapi tabulate==0.9.0 # via feast (setup.py) @@ -961,9 +953,7 @@ urllib3==1.26.18 # rockset # snowflake-connector-python uvicorn[standard]==0.24.0.post1 - # via - # feast (setup.py) - # uvicorn + # via feast (setup.py) uvloop==0.19.0 # via uvicorn virtualenv==20.23.0 diff --git a/sdk/python/requirements/py3.10-requirements.txt b/sdk/python/requirements/py3.10-requirements.txt index 18486d7fa9..5d5d451e14 100644 --- a/sdk/python/requirements/py3.10-requirements.txt +++ b/sdk/python/requirements/py3.10-requirements.txt @@ -42,7 +42,7 @@ dill==0.3.7 # via feast (setup.py) exceptiongroup==1.1.3 # via anyio -fastapi==0.99.1 +fastapi==0.109.1 # via feast (setup.py) fastavro==1.9.0 # via @@ -175,12 +175,10 @@ sniffio==1.3.0 # anyio # httpx sqlalchemy[mypy]==1.4.50 - # via - # feast (setup.py) - # sqlalchemy + # via feast (setup.py) sqlalchemy2-stubs==0.0.2a37 # via sqlalchemy -starlette==0.27.0 +starlette==0.35.1 # via fastapi tabulate==0.9.0 # via feast (setup.py) @@ -210,9 +208,7 @@ typing-extensions==4.8.0 urllib3==2.1.0 # via requests uvicorn[standard]==0.24.0.post1 - # via - # feast (setup.py) - # uvicorn + # via feast (setup.py) uvloop==0.19.0 # via uvicorn volatile==2.1.0 diff --git a/sdk/python/requirements/py3.8-ci-requirements.txt b/sdk/python/requirements/py3.8-ci-requirements.txt index 3bda9e72f9..808a58e11b 100644 --- a/sdk/python/requirements/py3.8-ci-requirements.txt +++ b/sdk/python/requirements/py3.8-ci-requirements.txt @@ -127,9 +127,7 @@ comm==0.2.0 # ipykernel # ipywidgets coverage[toml]==7.3.2 - # via - # coverage - # pytest-cov + # via pytest-cov cryptography==41.0.6 # via # azure-identity @@ -178,7 +176,7 @@ execnet==2.0.2 # via pytest-xdist executing==2.0.1 # via stack-data -fastapi==0.99.1 +fastapi==0.109.1 # via feast (setup.py) fastavro==1.9.0 # via @@ -231,9 +229,7 @@ google-auth==2.23.4 google-auth-httplib2==0.1.1 # via google-api-python-client google-cloud-bigquery[pandas]==3.12.0 - # via - # feast (setup.py) - # google-cloud-bigquery + # via feast (setup.py) google-cloud-bigquery-storage==2.22.0 # via feast (setup.py) google-cloud-bigtable==2.21.0 @@ -478,7 +474,7 @@ msgpack==1.0.7 # via cachecontrol multiprocess==0.70.15 # via bytewax -mypy==0.982 +mypy==1.8.0 # via # feast (setup.py) # sqlalchemy @@ -824,9 +820,7 @@ sniffio==1.3.0 snowballstemmer==2.2.0 # via sphinx snowflake-connector-python[pandas]==3.5.0 - # via - # feast (setup.py) - # snowflake-connector-python + # via feast (setup.py) sortedcontainers==2.4.0 # via snowflake-connector-python soupsieve==2.5 @@ -846,14 +840,12 @@ sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 # via sphinx sqlalchemy[mypy]==1.4.50 - # via - # feast (setup.py) - # sqlalchemy + # via feast (setup.py) sqlalchemy2-stubs==0.0.2a37 # via sqlalchemy stack-data==0.6.3 # via ipython -starlette==0.27.0 +starlette==0.35.1 # via fastapi tabulate==0.9.0 # via feast (setup.py) @@ -981,9 +973,7 @@ urllib3==1.26.18 # rockset # snowflake-connector-python uvicorn[standard]==0.24.0.post1 - # via - # feast (setup.py) - # uvicorn + # via feast (setup.py) uvloop==0.19.0 # via uvicorn virtualenv==20.23.0 diff --git a/sdk/python/requirements/py3.8-requirements.txt b/sdk/python/requirements/py3.8-requirements.txt index c180c50c81..163fa4c9a8 100644 --- a/sdk/python/requirements/py3.8-requirements.txt +++ b/sdk/python/requirements/py3.8-requirements.txt @@ -42,7 +42,7 @@ dill==0.3.7 # via feast (setup.py) exceptiongroup==1.1.3 # via anyio -fastapi==0.99.1 +fastapi==0.109.1 # via feast (setup.py) fastavro==1.9.0 # via @@ -180,12 +180,10 @@ sniffio==1.3.0 # anyio # httpx sqlalchemy[mypy]==1.4.50 - # via - # feast (setup.py) - # sqlalchemy + # via feast (setup.py) sqlalchemy2-stubs==0.0.2a37 # via sqlalchemy -starlette==0.27.0 +starlette==0.35.1 # via fastapi tabulate==0.9.0 # via feast (setup.py) @@ -216,9 +214,7 @@ typing-extensions==4.8.0 urllib3==2.1.0 # via requests uvicorn[standard]==0.24.0.post1 - # via - # feast (setup.py) - # uvicorn + # via feast (setup.py) uvloop==0.19.0 # via uvicorn volatile==2.1.0 diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index 6989d5b4cc..f9d7ac3fb9 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -121,9 +121,7 @@ comm==0.2.0 # ipykernel # ipywidgets coverage[toml]==7.3.2 - # via - # coverage - # pytest-cov + # via pytest-cov cryptography==41.0.6 # via # azure-identity @@ -173,7 +171,7 @@ execnet==2.0.2 # via pytest-xdist executing==2.0.1 # via stack-data -fastapi==0.99.1 +fastapi==0.109.1 # via feast (setup.py) fastavro==1.9.0 # via @@ -226,9 +224,7 @@ google-auth==2.23.4 google-auth-httplib2==0.1.1 # via google-api-python-client google-cloud-bigquery[pandas]==3.12.0 - # via - # feast (setup.py) - # google-cloud-bigquery + # via feast (setup.py) google-cloud-bigquery-storage==2.22.0 # via feast (setup.py) google-cloud-bigtable==2.21.0 @@ -469,7 +465,7 @@ msgpack==1.0.7 # via cachecontrol multiprocess==0.70.15 # via bytewax -mypy==0.982 +mypy==1.8.0 # via # feast (setup.py) # sqlalchemy @@ -810,9 +806,7 @@ sniffio==1.3.0 snowballstemmer==2.2.0 # via sphinx snowflake-connector-python[pandas]==3.5.0 - # via - # feast (setup.py) - # snowflake-connector-python + # via feast (setup.py) sortedcontainers==2.4.0 # via snowflake-connector-python soupsieve==2.5 @@ -838,14 +832,12 @@ sphinxcontrib-qthelp==1.0.6 sphinxcontrib-serializinghtml==1.1.9 # via sphinx sqlalchemy[mypy]==1.4.50 - # via - # feast (setup.py) - # sqlalchemy + # via feast (setup.py) sqlalchemy2-stubs==0.0.2a37 # via sqlalchemy stack-data==0.6.3 # via ipython -starlette==0.27.0 +starlette==0.35.1 # via fastapi tabulate==0.9.0 # via feast (setup.py) @@ -973,9 +965,7 @@ urllib3==1.26.18 # rockset # snowflake-connector-python uvicorn[standard]==0.24.0.post1 - # via - # feast (setup.py) - # uvicorn + # via feast (setup.py) uvloop==0.19.0 # via uvicorn virtualenv==20.23.0 diff --git a/sdk/python/requirements/py3.9-requirements.txt b/sdk/python/requirements/py3.9-requirements.txt index 3b6f88b4e2..4d9b8f107d 100644 --- a/sdk/python/requirements/py3.9-requirements.txt +++ b/sdk/python/requirements/py3.9-requirements.txt @@ -42,7 +42,7 @@ dill==0.3.7 # via feast (setup.py) exceptiongroup==1.1.3 # via anyio -fastapi==0.99.1 +fastapi==0.109.1 # via feast (setup.py) fastavro==1.9.0 # via @@ -175,12 +175,10 @@ sniffio==1.3.0 # anyio # httpx sqlalchemy[mypy]==1.4.50 - # via - # feast (setup.py) - # sqlalchemy + # via feast (setup.py) sqlalchemy2-stubs==0.0.2a37 # via sqlalchemy -starlette==0.27.0 +starlette==0.35.1 # via fastapi tabulate==0.9.0 # via feast (setup.py) @@ -211,9 +209,7 @@ typing-extensions==4.8.0 urllib3==2.1.0 # via requests uvicorn[standard]==0.24.0.post1 - # via - # feast (setup.py) - # uvicorn + # via feast (setup.py) uvloop==0.19.0 # via uvicorn volatile==2.1.0 diff --git a/sdk/python/tests/data/data_creator.py b/sdk/python/tests/data/data_creator.py index 8d5b1979fa..1fc66aee84 100644 --- a/sdk/python/tests/data/data_creator.py +++ b/sdk/python/tests/data/data_creator.py @@ -9,7 +9,7 @@ def create_basic_driver_dataset( entity_type: FeastType = Int32, - feature_dtype: str = None, + feature_dtype: Optional[str] = None, feature_is_list: bool = False, list_has_empty_list: bool = False, ) -> pd.DataFrame: diff --git a/sdk/python/tests/foo_provider.py b/sdk/python/tests/foo_provider.py index d27e2645d4..ba256a3813 100644 --- a/sdk/python/tests/foo_provider.py +++ b/sdk/python/tests/foo_provider.py @@ -71,16 +71,16 @@ def get_historical_features( project: str, full_feature_names: bool = False, ) -> RetrievalJob: - pass + return RetrievalJob() def online_read( self, config: RepoConfig, table: FeatureView, entity_keys: List[EntityKeyProto], - requested_features: List[str] = None, + requested_features: Optional[List[str]] = None, ) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]: - pass + return [] def retrieve_saved_dataset(self, config: RepoConfig, dataset: SavedDataset): pass @@ -102,4 +102,4 @@ def retrieve_feature_service_logs( config: RepoConfig, registry: BaseRegistry, ) -> RetrievalJob: - pass + return RetrievalJob() diff --git a/sdk/python/tests/integration/feature_repos/universal/data_source_creator.py b/sdk/python/tests/integration/feature_repos/universal/data_source_creator.py index b36af0db47..d64463606f 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_source_creator.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_source_creator.py @@ -20,7 +20,7 @@ def create_data_source( destination_name: str, event_timestamp_column="ts", created_timestamp_column="created_ts", - field_mapping: Dict[str, str] = None, + field_mapping: Optional[Dict[str, str]] = None, timestamp_field: Optional[str] = None, ) -> DataSource: """ @@ -53,7 +53,7 @@ def create_saved_dataset_destination(self) -> SavedDatasetStorage: ... def create_logged_features_destination(self) -> LoggingDestination: - pass + raise NotImplementedError @abstractmethod def teardown(self): diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py index 384037eef1..215d19ba7f 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py @@ -66,7 +66,7 @@ def create_data_source( destination_name: str, timestamp_field="ts", created_timestamp_column="created_ts", - field_mapping: Dict[str, str] = None, + field_mapping: Optional[Dict[str, str]] = None, **kwargs, ) -> DataSource: diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py index 124dd4c88d..3263785683 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py @@ -41,7 +41,7 @@ def create_data_source( destination_name: str, timestamp_field="ts", created_timestamp_column="created_ts", - field_mapping: Dict[str, str] = None, + field_mapping: Optional[Dict[str, str]] = None, ) -> DataSource: destination_name = self.get_prefixed_table_name(destination_name) @@ -96,7 +96,7 @@ def create_data_source( destination_name: str, timestamp_field="ts", created_timestamp_column="created_ts", - field_mapping: Dict[str, str] = None, + field_mapping: Optional[Dict[str, str]] = None, ) -> DataSource: destination_name = self.get_prefixed_table_name(destination_name) @@ -171,7 +171,7 @@ def create_data_source( suffix: Optional[str] = None, timestamp_field="ts", created_timestamp_column="created_ts", - field_mapping: Dict[str, str] = None, + field_mapping: Optional[Dict[str, str]] = None, ) -> DataSource: filename = f"{destination_name}.parquet" port = self.minio.get_exposed_port("9000") diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py index dfe8e3d33b..e6f20d6125 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py @@ -51,7 +51,7 @@ def create_data_source( suffix: Optional[str] = None, timestamp_field="ts", created_timestamp_column="created_ts", - field_mapping: Dict[str, str] = None, + field_mapping: Optional[Dict[str, str]] = None, ) -> DataSource: destination_name = self.get_prefixed_table_name(destination_name) diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py index c14780da97..1414291a18 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py @@ -51,7 +51,7 @@ def create_data_source( suffix: Optional[str] = None, timestamp_field="ts", created_timestamp_column="created_ts", - field_mapping: Dict[str, str] = None, + field_mapping: Optional[Dict[str, str]] = None, ) -> DataSource: destination_name = self.get_prefixed_table_name(destination_name) diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store_creator.py b/sdk/python/tests/integration/feature_repos/universal/online_store_creator.py index c3872ea697..10a8143739 100644 --- a/sdk/python/tests/integration/feature_repos/universal/online_store_creator.py +++ b/sdk/python/tests/integration/feature_repos/universal/online_store_creator.py @@ -8,7 +8,7 @@ def __init__(self, project_name: str, **kwargs): self.project_name = project_name def create_online_store(self) -> FeastConfigBaseModel: - ... + raise NotImplementedError def teardown(self): - ... + raise NotImplementedError diff --git a/sdk/python/tests/unit/infra/offline_stores/test_offline_store.py b/sdk/python/tests/unit/infra/offline_stores/test_offline_store.py index ef0cce0470..220bdba0da 100644 --- a/sdk/python/tests/unit/infra/offline_stores/test_offline_store.py +++ b/sdk/python/tests/unit/infra/offline_stores/test_offline_store.py @@ -39,6 +39,9 @@ class MockRetrievalJob(RetrievalJob): + def to_sql(self) -> str: + return "" + def _to_df_internal(self, timeout: Optional[int] = None) -> pd.DataFrame: """ Synchronously executes the underlying query and returns the result as a pandas dataframe. @@ -46,7 +49,7 @@ def _to_df_internal(self, timeout: Optional[int] = None) -> pd.DataFrame: Does not handle on demand transformations or dataset validation. For either of those, `to_df` should be used. """ - pass + return pd.DataFrame() def _to_arrow_internal(self, timeout: Optional[int] = None) -> pyarrow.Table: """ @@ -55,17 +58,17 @@ def _to_arrow_internal(self, timeout: Optional[int] = None) -> pyarrow.Table: Does not handle on demand transformations or dataset validation. For either of those, `to_arrow` should be used. """ - pass + return pyarrow.Table() @property def full_feature_names(self) -> bool: """Returns True if full feature names should be applied to the results of the query.""" - pass + return False @property def on_demand_feature_views(self) -> List[OnDemandFeatureView]: """Returns a list containing all the on demand feature views to be handled.""" - pass + return [] def persist( self, @@ -87,7 +90,7 @@ def persist( @property def metadata(self) -> Optional[RetrievalMetadata]: """Returns metadata about the retrieval job.""" - pass + raise NotImplementedError # Since RetreivalJob are not really tested for subclasses we add some tests here. @@ -208,7 +211,7 @@ def retrieval_job(request, environment): def test_to_sql(): - assert MockRetrievalJob().to_sql() is None + assert MockRetrievalJob().to_sql() == "" @pytest.mark.parametrize("timeout", (None, 30)) diff --git a/setup.py b/setup.py index 4fb80871b2..81ae63a7a4 100644 --- a/setup.py +++ b/setup.py @@ -71,7 +71,7 @@ "toml>=0.10.0,<1", "tqdm>=4,<5", "typeguard==2.13.3", - "fastapi>=0.68.0,<0.100", + "fastapi>=0.68.0", "uvicorn[standard]>=0.14.0,<1", "gunicorn", "dask>=2021.1.0", @@ -156,7 +156,7 @@ "minio==7.1.0", "mock==2.0.0", "moto<5", - "mypy>=0.981,<0.990", + "mypy>=1.4.1", "avro==1.10.0", "fsspec<2023.10.0", "urllib3>=1.25.4,<3", From dbb59ba0932e5962b34b14e7218a1ddae86a9686 Mon Sep 17 00:00:00 2001 From: Tornike Gurgenidze Date: Tue, 13 Feb 2024 23:45:28 +0400 Subject: [PATCH 034/122] fix: Rewrite Spark materialization engine to use mapInPandas (#3936) rewrite spark materilization engine to use mapInPandas Signed-off-by: tokoko --- .../spark/spark_materialization_engine.py | 67 ++++++++++--------- 1 file changed, 35 insertions(+), 32 deletions(-) diff --git a/sdk/python/feast/infra/materialization/contrib/spark/spark_materialization_engine.py b/sdk/python/feast/infra/materialization/contrib/spark/spark_materialization_engine.py index ed4388aeb3..798d3a8e6f 100644 --- a/sdk/python/feast/infra/materialization/contrib/spark/spark_materialization_engine.py +++ b/sdk/python/feast/infra/materialization/contrib/spark/spark_materialization_engine.py @@ -3,6 +3,7 @@ from typing import Callable, List, Literal, Optional, Sequence, Union, cast import dill +import pandas import pandas as pd import pyarrow from tqdm import tqdm @@ -178,9 +179,9 @@ def _materialize_one( self.repo_config.batch_engine.partitions ) - spark_df.foreachPartition( - lambda x: _process_by_partition(x, spark_serialized_artifacts) - ) + spark_df.mapInPandas( + lambda x: _map_by_partition(x, spark_serialized_artifacts), "status int" + ).count() # dummy action to force evaluation return SparkMaterializationJob( job_id=job_id, status=MaterializationJobStatus.SUCCEEDED @@ -225,38 +226,40 @@ def unserialize(self): return feature_view, online_store, repo_config -def _process_by_partition(rows, spark_serialized_artifacts: _SparkSerializedArtifacts): - """Load pandas df to online store""" - - # convert to pyarrow table - dicts = [] - for row in rows: - dicts.append(row.asDict()) +def _map_by_partition(iterator, spark_serialized_artifacts: _SparkSerializedArtifacts): + for pdf in iterator: + if pdf.shape[0] == 0: + print("Skipping") + return - df = pd.DataFrame.from_records(dicts) - if df.shape[0] == 0: - print("Skipping") - return + table = pyarrow.Table.from_pandas(pdf) - table = pyarrow.Table.from_pandas(df) + ( + feature_view, + online_store, + repo_config, + ) = spark_serialized_artifacts.unserialize() + + if feature_view.batch_source.field_mapping is not None: + table = _run_pyarrow_field_mapping( + table, feature_view.batch_source.field_mapping + ) - # unserialize artifacts - feature_view, online_store, repo_config = spark_serialized_artifacts.unserialize() + join_key_to_value_type = { + entity.name: entity.dtype.to_value_type() + for entity in feature_view.entity_columns + } - if feature_view.batch_source.field_mapping is not None: - table = _run_pyarrow_field_mapping( - table, feature_view.batch_source.field_mapping + rows_to_write = _convert_arrow_to_proto( + table, feature_view, join_key_to_value_type + ) + online_store.online_write_batch( + repo_config, + feature_view, + rows_to_write, + lambda x: None, ) - join_key_to_value_type = { - entity.name: entity.dtype.to_value_type() - for entity in feature_view.entity_columns - } - - rows_to_write = _convert_arrow_to_proto(table, feature_view, join_key_to_value_type) - online_store.online_write_batch( - repo_config, - feature_view, - rows_to_write, - lambda x: None, - ) + yield pd.DataFrame( + [pd.Series(range(1, 2))] + ) # dummy result because mapInPandas needs to return something From 5c9f592890da7c4b857191050c1dacd0b39f78a0 Mon Sep 17 00:00:00 2001 From: cburroughs Date: Wed, 14 Feb 2024 17:52:47 -0500 Subject: [PATCH 035/122] chore: Loosen fsspec requirements to allow recent releases (#3922) * chore: Loosen fsspec requirements to allow recent releases (I'm not sure the project has a super consistent pattern for when to specify a maximum version, but was going for the smallest possible change.) Signed-off-by: Chris Burroughs * drop redundant fsspec now that this is in another extra Signed-off-by: Chris Burroughs * post rebase regen Signed-off-by: Chris Burroughs --------- Signed-off-by: Chris Burroughs --- .../requirements/py3.10-ci-requirements.txt | 266 +++++++++--------- .../requirements/py3.10-requirements.txt | 82 +++--- .../requirements/py3.8-ci-requirements.txt | 239 ++++++++-------- .../requirements/py3.8-requirements.txt | 80 +++--- .../requirements/py3.9-ci-requirements.txt | 265 +++++++++-------- .../requirements/py3.9-requirements.txt | 82 +++--- setup.py | 5 +- 7 files changed, 510 insertions(+), 509 deletions(-) diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index 9435a68deb..ffb4662eb1 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -4,11 +4,12 @@ # # pip-compile --extra=ci --output-file=sdk/python/requirements/py3.10-ci-requirements.txt # -alabaster==0.7.13 + +alabaster==0.7.16 # via sphinx altair==4.2.0 # via great-expectations -anyio==4.0.0 +anyio==4.2.0 # via # httpx # jupyter-server @@ -32,14 +33,14 @@ async-lru==2.0.4 # via jupyterlab async-timeout==4.0.3 # via redis -attrs==23.1.0 +attrs==23.2.0 # via # bowler # jsonschema # referencing avro==1.10.0 # via feast (setup.py) -azure-core==1.29.5 +azure-core==1.30.0 # via # azure-identity # azure-storage-blob @@ -47,21 +48,21 @@ azure-identity==1.15.0 # via feast (setup.py) azure-storage-blob==12.19.0 # via feast (setup.py) -babel==2.13.1 +babel==2.14.0 # via # jupyterlab-server # sphinx -beautifulsoup4==4.12.2 +beautifulsoup4==4.12.3 # via nbconvert black==22.12.0 # via feast (setup.py) bleach==6.1.0 # via nbconvert -boto3==1.29.2 +boto3==1.34.42 # via # feast (setup.py) # moto -botocore==1.32.2 +botocore==1.34.42 # via # boto3 # moto @@ -74,13 +75,13 @@ build==1.0.3 # pip-tools bytewax==0.15.1 # via feast (setup.py) -cachecontrol==0.13.1 +cachecontrol==0.14.0 # via firebase-admin cachetools==5.3.2 # via google-auth -cassandra-driver==3.28.0 +cassandra-driver==3.29.0 # via feast (setup.py) -certifi==2023.7.22 +certifi==2024.2.2 # via # httpcore # httpx @@ -116,13 +117,13 @@ colorama==0.4.6 # via # feast (setup.py) # great-expectations -comm==0.2.0 +comm==0.2.1 # via # ipykernel # ipywidgets -coverage[toml]==7.3.2 +coverage[toml]==7.4.1 # via pytest-cov -cryptography==41.0.6 +cryptography==41.0.7 # via # azure-identity # azure-storage-blob @@ -135,11 +136,11 @@ cryptography==41.0.6 # snowflake-connector-python # types-pyopenssl # types-redis -dask==2023.11.0 +dask==2024.2.0 # via feast (setup.py) -db-dtypes==1.1.1 +db-dtypes==1.2.0 # via google-cloud-bigquery -debugpy==1.8.0 +debugpy==1.8.1 # via ipykernel decorator==5.1.1 # via ipython @@ -147,14 +148,14 @@ defusedxml==0.7.1 # via nbconvert deprecation==2.1.0 # via testcontainers -dill==0.3.7 +dill==0.3.8 # via # bytewax # feast (setup.py) # multiprocess -distlib==0.3.7 +distlib==0.3.8 # via virtualenv -docker==6.1.3 +docker==7.0.0 # via # feast (setup.py) # testcontainers @@ -162,7 +163,7 @@ docutils==0.19 # via sphinx entrypoints==0.4 # via altair -exceptiongroup==1.1.3 +exceptiongroup==1.2.0 # via # anyio # ipython @@ -171,13 +172,13 @@ execnet==2.0.2 # via pytest-xdist executing==2.0.1 # via stack-data -fastapi==0.109.1 +fastapi==0.109.2 # via feast (setup.py) -fastavro==1.9.0 +fastavro==1.9.4 # via # feast (setup.py) # pandavro -fastjsonschema==2.19.0 +fastjsonschema==2.19.1 # via nbformat filelock==3.13.1 # via @@ -191,7 +192,7 @@ flake8==6.0.0 # via feast (setup.py) fqdn==1.5.1 # via jsonschema -fsspec==2023.9.2 +fsspec==2023.12.2 # via # dask # feast (setup.py) @@ -199,7 +200,7 @@ geojson==2.5.0 # via rockset geomet==0.2.1.post1 # via cassandra-driver -google-api-core[grpc]==2.14.0 +google-api-core[grpc]==2.17.1 # via # feast (setup.py) # firebase-admin @@ -211,9 +212,9 @@ google-api-core[grpc]==2.14.0 # google-cloud-datastore # google-cloud-firestore # google-cloud-storage -google-api-python-client==2.108.0 +google-api-python-client==2.118.0 # via firebase-admin -google-auth==2.23.4 +google-auth==2.27.0 # via # google-api-core # google-api-python-client @@ -221,26 +222,26 @@ google-auth==2.23.4 # google-cloud-core # google-cloud-storage # kubernetes -google-auth-httplib2==0.1.1 +google-auth-httplib2==0.2.0 # via google-api-python-client google-cloud-bigquery[pandas]==3.12.0 # via feast (setup.py) -google-cloud-bigquery-storage==2.22.0 +google-cloud-bigquery-storage==2.24.0 # via feast (setup.py) -google-cloud-bigtable==2.21.0 +google-cloud-bigtable==2.23.0 # via feast (setup.py) -google-cloud-core==2.3.3 +google-cloud-core==2.4.1 # via # google-cloud-bigquery # google-cloud-bigtable # google-cloud-datastore # google-cloud-firestore # google-cloud-storage -google-cloud-datastore==2.18.0 +google-cloud-datastore==2.19.0 # via feast (setup.py) -google-cloud-firestore==2.13.1 +google-cloud-firestore==2.14.0 # via firebase-admin -google-cloud-storage==2.13.0 +google-cloud-storage==2.14.0 # via # feast (setup.py) # firebase-admin @@ -248,11 +249,11 @@ google-crc32c==1.5.0 # via # google-cloud-storage # google-resumable-media -google-resumable-media==2.6.0 +google-resumable-media==2.7.0 # via # google-cloud-bigquery # google-cloud-storage -googleapis-common-protos[grpc]==1.61.0 +googleapis-common-protos[grpc]==1.62.0 # via # feast (setup.py) # google-api-core @@ -260,11 +261,11 @@ googleapis-common-protos[grpc]==1.61.0 # grpcio-status great-expectations==0.15.50 # via feast (setup.py) -greenlet==3.0.1 +greenlet==3.0.3 # via sqlalchemy -grpc-google-iam-v1==0.12.7 +grpc-google-iam-v1==0.13.0 # via google-cloud-bigtable -grpcio==1.59.2 +grpcio==1.60.1 # via # feast (setup.py) # google-api-core @@ -276,15 +277,15 @@ grpcio==1.59.2 # grpcio-status # grpcio-testing # grpcio-tools -grpcio-health-checking==1.59.2 +grpcio-health-checking==1.60.1 # via feast (setup.py) -grpcio-reflection==1.59.2 +grpcio-reflection==1.60.1 # via feast (setup.py) -grpcio-status==1.59.2 +grpcio-status==1.60.1 # via google-api-core -grpcio-testing==1.59.2 +grpcio-testing==1.60.1 # via feast (setup.py) -grpcio-tools==1.59.2 +grpcio-tools==1.60.1 # via feast (setup.py) gunicorn==21.2.0 # via feast (setup.py) @@ -296,9 +297,9 @@ happybase==1.2.0 # via feast (setup.py) hazelcast-python-client==5.3.0 # via feast (setup.py) -hiredis==2.2.3 +hiredis==2.3.2 # via feast (setup.py) -httpcore==1.0.2 +httpcore==1.0.3 # via httpx httplib2==0.22.0 # via @@ -306,11 +307,13 @@ httplib2==0.22.0 # google-auth-httplib2 httptools==0.6.1 # via uvicorn -httpx==0.25.1 - # via feast (setup.py) -identify==2.5.31 +httpx==0.26.0 + # via + # feast (setup.py) + # jupyterlab +identify==2.5.34 # via pre-commit -idna==3.4 +idna==3.6 # via # anyio # httpx @@ -319,7 +322,7 @@ idna==3.4 # snowflake-connector-python imagesize==1.4.1 # via sphinx -importlib-metadata==6.8.0 +importlib-metadata==6.11.0 # via # dask # feast (setup.py) @@ -328,20 +331,20 @@ importlib-resources==6.1.1 # via feast (setup.py) iniconfig==2.0.0 # via pytest -ipykernel==6.26.0 +ipykernel==6.29.2 # via jupyterlab -ipython==8.17.2 +ipython==8.21.0 # via # great-expectations # ipykernel # ipywidgets -ipywidgets==8.1.1 +ipywidgets==8.1.2 # via great-expectations isodate==0.6.1 # via azure-storage-blob isoduration==20.11.0 # via jsonschema -isort==5.12.0 +isort==5.13.2 # via feast (setup.py) jedi==0.19.1 # via ipython @@ -368,7 +371,7 @@ jsonpointer==2.4 # via # jsonpatch # jsonschema -jsonschema[format-nongpl]==4.20.0 +jsonschema[format-nongpl]==4.21.1 # via # altair # feast (setup.py) @@ -376,14 +379,14 @@ jsonschema[format-nongpl]==4.20.0 # jupyter-events # jupyterlab-server # nbformat -jsonschema-specifications==2023.11.1 +jsonschema-specifications==2023.12.1 # via jsonschema jupyter-client==8.6.0 # via # ipykernel # jupyter-server # nbclient -jupyter-core==5.5.0 +jupyter-core==5.7.1 # via # ipykernel # jupyter-client @@ -396,24 +399,24 @@ jupyter-events==0.9.0 # via jupyter-server jupyter-lsp==2.2.2 # via jupyterlab -jupyter-server==2.11.2 +jupyter-server==2.12.5 # via # jupyter-lsp # jupyterlab # jupyterlab-server # notebook # notebook-shim -jupyter-server-terminals==0.4.4 +jupyter-server-terminals==0.5.2 # via jupyter-server -jupyterlab==4.0.11 +jupyterlab==4.1.1 # via notebook -jupyterlab-pygments==0.2.2 +jupyterlab-pygments==0.3.0 # via nbconvert -jupyterlab-server==2.25.1 +jupyterlab-server==2.25.3 # via # jupyterlab # notebook -jupyterlab-widgets==3.0.9 +jupyterlab-widgets==3.0.10 # via ipywidgets kubernetes==20.13.0 # via feast (setup.py) @@ -421,12 +424,12 @@ locket==1.0.0 # via partd makefun==1.15.2 # via great-expectations -markupsafe==2.1.3 +markupsafe==2.1.5 # via # jinja2 # nbconvert # werkzeug -marshmallow==3.20.1 +marshmallow==3.20.2 # via great-expectations matplotlib-inline==0.1.6 # via @@ -440,23 +443,23 @@ mistune==3.0.2 # via # great-expectations # nbconvert -mmh3==4.0.1 +mmh3==4.1.0 # via feast (setup.py) mock==2.0.0 # via feast (setup.py) moreorless==0.4.0 # via bowler -moto==4.2.9 +moto==4.2.14 # via feast (setup.py) -msal==1.25.0 +msal==1.26.0 # via # azure-identity # msal-extensions -msal-extensions==1.0.0 +msal-extensions==1.1.0 # via azure-identity msgpack==1.0.7 # via cachecontrol -multiprocess==0.70.15 +multiprocess==0.70.16 # via bytewax mypy==1.8.0 # via @@ -470,7 +473,7 @@ mypy-protobuf==3.1.0 # via feast (setup.py) nbclient==0.9.0 # via nbconvert -nbconvert==7.11.0 +nbconvert==7.16.0 # via jupyter-server nbformat==5.9.2 # via @@ -478,11 +481,11 @@ nbformat==5.9.2 # jupyter-server # nbclient # nbconvert -nest-asyncio==1.5.8 +nest-asyncio==1.6.0 # via ipykernel nodeenv==1.8.0 # via pre-commit -notebook==7.0.6 +notebook==7.1.0 # via great-expectations notebook-shim==0.2.3 # via @@ -500,7 +503,7 @@ numpy==1.24.4 # scipy oauthlib==3.2.2 # via requests-oauthlib -overrides==7.4.0 +overrides==7.7.0 # via jupyter-server packaging==23.2 # via @@ -517,6 +520,7 @@ packaging==23.2 # jupyterlab # jupyterlab-server # marshmallow + # msal-extensions # nbconvert # pytest # snowflake-connector-python @@ -532,17 +536,17 @@ pandas==1.5.3 # snowflake-connector-python pandavro==1.5.2 # via feast (setup.py) -pandocfilters==1.5.0 +pandocfilters==1.5.1 # via nbconvert parso==0.8.3 # via jedi partd==1.4.1 # via dask -pathspec==0.11.2 +pathspec==0.12.1 # via black pbr==6.0.0 # via mock -pexpect==4.8.0 +pexpect==4.9.0 # via ipython pip-tools==7.3.0 # via feast (setup.py) @@ -552,7 +556,7 @@ platformdirs==3.11.0 # jupyter-core # snowflake-connector-python # virtualenv -pluggy==1.3.0 +pluggy==1.4.0 # via pytest ply==3.11 # via thriftpy2 @@ -560,11 +564,11 @@ portalocker==2.8.2 # via msal-extensions pre-commit==3.3.1 # via feast (setup.py) -prometheus-client==0.18.0 +prometheus-client==0.20.0 # via jupyter-server -prompt-toolkit==3.0.41 +prompt-toolkit==3.0.43 # via ipython -proto-plus==1.22.3 +proto-plus==1.23.0 # via # feast (setup.py) # google-cloud-bigquery @@ -608,13 +612,13 @@ py-cpuinfo==9.0.0 # via pytest-benchmark py4j==0.10.9.7 # via pyspark -pyarrow==14.0.1 +pyarrow==15.0.0 # via # db-dtypes # feast (setup.py) # google-cloud-bigquery # snowflake-connector-python -pyasn1==0.5.0 +pyasn1==0.5.1 # via # pyasn1-modules # rsa @@ -626,14 +630,14 @@ pycodestyle==2.10.0 # via flake8 pycparser==2.21 # via cffi -pydantic==1.10.13 +pydantic==1.10.14 # via # fastapi # feast (setup.py) # great-expectations pyflakes==3.0.1 # via flake8 -pygments==2.16.1 +pygments==2.17.2 # via # feast (setup.py) # ipython @@ -643,11 +647,11 @@ pyjwt[crypto]==2.8.0 # via # msal # snowflake-connector-python -pymssql==2.2.10 +pymssql==2.2.11 # via feast (setup.py) pymysql==1.1.0 # via feast (setup.py) -pyodbc==5.0.1 +pyodbc==5.1.0 # via feast (setup.py) pyopenssl==23.3.0 # via snowflake-connector-python @@ -659,7 +663,7 @@ pyproject-hooks==1.0.0 # via build pyspark==3.5.0 # via feast (setup.py) -pytest==7.4.3 +pytest==7.4.4 # via # feast (setup.py) # pytest-benchmark @@ -681,7 +685,7 @@ pytest-ordering==0.6 # via feast (setup.py) pytest-timeout==1.4.2 # via feast (setup.py) -pytest-xdist==3.4.0 +pytest-xdist==3.5.0 # via feast (setup.py) python-dateutil==2.8.2 # via @@ -695,11 +699,11 @@ python-dateutil==2.8.2 # pandas # rockset # trino -python-dotenv==1.0.0 +python-dotenv==1.0.1 # via uvicorn python-json-logger==2.0.7 # via jupyter-events -pytz==2023.3.post1 +pytz==2024.1 # via # great-expectations # pandas @@ -714,19 +718,19 @@ pyyaml==6.0.1 # pre-commit # responses # uvicorn -pyzmq==25.1.1 +pyzmq==25.1.2 # via # ipykernel # jupyter-client # jupyter-server redis==4.6.0 # via feast (setup.py) -referencing==0.31.0 +referencing==0.33.0 # via # jsonschema # jsonschema-specifications # jupyter-events -regex==2023.10.3 +regex==2023.12.25 # via feast (setup.py) requests==2.31.0 # via @@ -749,7 +753,7 @@ requests==2.31.0 # trino requests-oauthlib==1.3.1 # via kubernetes -responses==0.24.1 +responses==0.25.0 # via moto rfc3339-validator==0.1.4 # via @@ -761,7 +765,7 @@ rfc3986-validator==0.1.1 # jupyter-events rockset==2.1.0 # via feast (setup.py) -rpds-py==0.13.0 +rpds-py==0.18.0 # via # jsonschema # referencing @@ -769,9 +773,9 @@ rsa==4.9 # via google-auth ruamel-yaml==0.17.17 # via great-expectations -s3transfer==0.7.0 +s3transfer==0.10.0 # via boto3 -scipy==1.11.3 +scipy==1.12.0 # via great-expectations send2trash==1.8.2 # via jupyter-server @@ -780,7 +784,6 @@ six==1.16.0 # asttokens # azure-core # bleach - # cassandra-driver # geomet # happybase # isodate @@ -796,39 +799,33 @@ sniffio==1.3.0 # httpx snowballstemmer==2.2.0 # via sphinx -snowflake-connector-python[pandas]==3.5.0 +snowflake-connector-python[pandas]==3.7.0 # via feast (setup.py) sortedcontainers==2.4.0 # via snowflake-connector-python soupsieve==2.5 # via beautifulsoup4 sphinx==6.2.1 - # via - # feast (setup.py) - # sphinxcontrib-applehelp - # sphinxcontrib-devhelp - # sphinxcontrib-htmlhelp - # sphinxcontrib-qthelp - # sphinxcontrib-serializinghtml -sphinxcontrib-applehelp==1.0.7 + # via feast (setup.py) +sphinxcontrib-applehelp==1.0.8 # via sphinx -sphinxcontrib-devhelp==1.0.5 +sphinxcontrib-devhelp==1.0.6 # via sphinx -sphinxcontrib-htmlhelp==2.0.4 +sphinxcontrib-htmlhelp==2.0.5 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx -sphinxcontrib-qthelp==1.0.6 +sphinxcontrib-qthelp==1.0.7 # via sphinx -sphinxcontrib-serializinghtml==1.1.9 +sphinxcontrib-serializinghtml==1.1.10 # via sphinx -sqlalchemy[mypy]==1.4.50 +sqlalchemy[mypy]==1.4.51 # via feast (setup.py) -sqlalchemy2-stubs==0.0.2a37 +sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy stack-data==0.6.3 # via ipython -starlette==0.35.1 +starlette==0.36.3 # via fastapi tabulate==0.9.0 # via feast (setup.py) @@ -858,12 +855,12 @@ tomli==2.0.1 # pytest tomlkit==0.12.3 # via snowflake-connector-python -toolz==0.12.0 +toolz==0.12.1 # via # altair # dask # partd -tornado==6.3.3 +tornado==6.4 # via # ipykernel # jupyter-client @@ -871,11 +868,11 @@ tornado==6.3.3 # jupyterlab # notebook # terminado -tqdm==4.66.1 +tqdm==4.66.2 # via # feast (setup.py) # great-expectations -traitlets==5.13.0 +traitlets==5.14.1 # via # comm # ipykernel @@ -900,28 +897,29 @@ types-protobuf==3.19.22 # mypy-protobuf types-pymysql==1.1.0.1 # via feast (setup.py) -types-pyopenssl==23.3.0.0 +types-pyopenssl==24.0.0.20240130 # via types-redis -types-python-dateutil==2.8.19.14 +types-python-dateutil==2.8.19.20240106 # via # arrow # feast (setup.py) -types-pytz==2023.3.1.1 +types-pytz==2024.1.0.20240203 # via feast (setup.py) types-pyyaml==6.0.12.12 # via feast (setup.py) -types-redis==4.6.0.10 +types-redis==4.6.0.20240106 # via feast (setup.py) types-requests==2.30.0.0 # via feast (setup.py) -types-setuptools==68.2.0.1 +types-setuptools==69.0.0.20240125 # via feast (setup.py) -types-tabulate==0.9.0.3 +types-tabulate==0.9.0.20240106 # via feast (setup.py) types-urllib3==1.26.25.14 # via types-requests -typing-extensions==4.8.0 +typing-extensions==4.9.0 # via + # anyio # async-lru # azure-core # azure-storage-blob @@ -951,8 +949,7 @@ urllib3==1.26.18 # requests # responses # rockset - # snowflake-connector-python -uvicorn[standard]==0.24.0.post1 +uvicorn[standard]==0.27.1 # via feast (setup.py) uvloop==0.19.0 # via uvicorn @@ -964,7 +961,7 @@ volatile==2.1.0 # via bowler watchfiles==0.21.0 # via uvicorn -wcwidth==0.2.10 +wcwidth==0.2.13 # via prompt-toolkit webcolors==1.13 # via jsonschema @@ -972,18 +969,17 @@ webencodings==0.5.1 # via # bleach # tinycss2 -websocket-client==1.6.4 +websocket-client==1.7.0 # via - # docker # jupyter-server # kubernetes websockets==12.0 # via uvicorn werkzeug==3.0.1 # via moto -wheel==0.41.3 +wheel==0.42.0 # via pip-tools -widgetsnbextension==4.0.9 +widgetsnbextension==4.0.10 # via ipywidgets wrapt==1.16.0 # via testcontainers diff --git a/sdk/python/requirements/py3.10-requirements.txt b/sdk/python/requirements/py3.10-requirements.txt index 5d5d451e14..d38a287d72 100644 --- a/sdk/python/requirements/py3.10-requirements.txt +++ b/sdk/python/requirements/py3.10-requirements.txt @@ -4,21 +4,22 @@ # # pip-compile --output-file=sdk/python/requirements/py3.10-requirements.txt # -anyio==4.0.0 + +anyio==4.2.0 # via # httpx # starlette # watchfiles appdirs==1.4.4 # via fissix -attrs==23.1.0 +attrs==23.2.0 # via # bowler # jsonschema # referencing bowler==0.9.0 # via feast (setup.py) -certifi==2023.7.22 +certifi==2024.2.2 # via # httpcore # httpx @@ -36,35 +37,35 @@ cloudpickle==3.0.0 # via dask colorama==0.4.6 # via feast (setup.py) -dask==2023.11.0 +dask==2024.2.0 # via feast (setup.py) -dill==0.3.7 +dill==0.3.8 # via feast (setup.py) -exceptiongroup==1.1.3 +exceptiongroup==1.2.0 # via anyio -fastapi==0.109.1 +fastapi==0.109.2 # via feast (setup.py) -fastavro==1.9.0 +fastavro==1.9.4 # via # feast (setup.py) # pandavro fissix==21.11.13 # via bowler -fsspec==2023.10.0 +fsspec==2024.2.0 # via dask -greenlet==3.0.1 +greenlet==3.0.3 # via sqlalchemy -grpcio==1.59.2 +grpcio==1.60.1 # via # feast (setup.py) # grpcio-health-checking # grpcio-reflection # grpcio-tools -grpcio-health-checking==1.59.2 +grpcio-health-checking==1.60.1 # via feast (setup.py) -grpcio-reflection==1.59.2 +grpcio-reflection==1.60.1 # via feast (setup.py) -grpcio-tools==1.59.2 +grpcio-tools==1.60.1 # via feast (setup.py) gunicorn==21.2.0 # via feast (setup.py) @@ -72,18 +73,18 @@ h11==0.14.0 # via # httpcore # uvicorn -httpcore==1.0.2 +httpcore==1.0.3 # via httpx httptools==0.6.1 # via uvicorn -httpx==0.25.1 +httpx==0.26.0 # via feast (setup.py) -idna==3.4 +idna==3.6 # via # anyio # httpx # requests -importlib-metadata==6.8.0 +importlib-metadata==6.11.0 # via # dask # feast (setup.py) @@ -91,19 +92,19 @@ importlib-resources==6.1.1 # via feast (setup.py) jinja2==3.1.3 # via feast (setup.py) -jsonschema==4.20.0 +jsonschema==4.21.1 # via feast (setup.py) -jsonschema-specifications==2023.11.1 +jsonschema-specifications==2023.12.1 # via jsonschema locket==1.0.0 # via partd -markupsafe==2.1.3 +markupsafe==2.1.5 # via jinja2 -mmh3==4.0.1 +mmh3==4.1.0 # via feast (setup.py) moreorless==0.4.0 # via bowler -mypy==1.7.0 +mypy==1.8.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -127,7 +128,7 @@ pandavro==1.5.2 # via feast (setup.py) partd==1.4.1 # via dask -proto-plus==1.22.3 +proto-plus==1.23.0 # via feast (setup.py) protobuf==4.23.3 # via @@ -137,32 +138,32 @@ protobuf==4.23.3 # grpcio-tools # mypy-protobuf # proto-plus -pyarrow==14.0.1 +pyarrow==15.0.0 # via feast (setup.py) -pydantic==1.10.13 +pydantic==1.10.14 # via # fastapi # feast (setup.py) -pygments==2.16.1 +pygments==2.17.2 # via feast (setup.py) python-dateutil==2.8.2 # via pandas -python-dotenv==1.0.0 +python-dotenv==1.0.1 # via uvicorn -pytz==2023.3.post1 +pytz==2024.1 # via pandas pyyaml==6.0.1 # via # dask # feast (setup.py) # uvicorn -referencing==0.31.0 +referencing==0.33.0 # via # jsonschema # jsonschema-specifications requests==2.31.0 # via feast (setup.py) -rpds-py==0.13.0 +rpds-py==0.18.0 # via # jsonschema # referencing @@ -174,11 +175,11 @@ sniffio==1.3.0 # via # anyio # httpx -sqlalchemy[mypy]==1.4.50 +sqlalchemy[mypy]==1.4.51 # via feast (setup.py) -sqlalchemy2-stubs==0.0.2a37 +sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy -starlette==0.35.1 +starlette==0.36.3 # via fastapi tabulate==0.9.0 # via feast (setup.py) @@ -188,26 +189,27 @@ toml==0.10.2 # via feast (setup.py) tomli==2.0.1 # via mypy -toolz==0.12.0 +toolz==0.12.1 # via # dask # partd -tqdm==4.66.1 +tqdm==4.66.2 # via feast (setup.py) typeguard==2.13.3 # via feast (setup.py) -types-protobuf==4.24.0.4 +types-protobuf==4.24.0.20240129 # via mypy-protobuf -typing-extensions==4.8.0 +typing-extensions==4.9.0 # via + # anyio # fastapi # mypy # pydantic # sqlalchemy2-stubs # uvicorn -urllib3==2.1.0 +urllib3==2.2.0 # via requests -uvicorn[standard]==0.24.0.post1 +uvicorn[standard]==0.27.1 # via feast (setup.py) uvloop==0.19.0 # via uvicorn diff --git a/sdk/python/requirements/py3.8-ci-requirements.txt b/sdk/python/requirements/py3.8-ci-requirements.txt index 808a58e11b..33dd89c362 100644 --- a/sdk/python/requirements/py3.8-ci-requirements.txt +++ b/sdk/python/requirements/py3.8-ci-requirements.txt @@ -4,11 +4,12 @@ # # pip-compile --extra=ci --output-file=sdk/python/requirements/py3.8-ci-requirements.txt # + alabaster==0.7.13 # via sphinx altair==4.2.0 # via great-expectations -anyio==4.0.0 +anyio==4.2.0 # via # httpx # jupyter-server @@ -32,14 +33,14 @@ async-lru==2.0.4 # via jupyterlab async-timeout==4.0.3 # via redis -attrs==23.1.0 +attrs==23.2.0 # via # bowler # jsonschema # referencing avro==1.10.0 # via feast (setup.py) -azure-core==1.29.5 +azure-core==1.30.0 # via # azure-identity # azure-storage-blob @@ -47,7 +48,7 @@ azure-identity==1.15.0 # via feast (setup.py) azure-storage-blob==12.19.0 # via feast (setup.py) -babel==2.13.1 +babel==2.14.0 # via # jupyterlab-server # sphinx @@ -57,17 +58,17 @@ backports-zoneinfo==0.2.1 # via # trino # tzlocal -beautifulsoup4==4.12.2 +beautifulsoup4==4.12.3 # via nbconvert black==22.12.0 # via feast (setup.py) bleach==6.1.0 # via nbconvert -boto3==1.29.2 +boto3==1.34.42 # via # feast (setup.py) # moto -botocore==1.32.2 +botocore==1.34.42 # via # boto3 # moto @@ -80,13 +81,13 @@ build==1.0.3 # pip-tools bytewax==0.15.1 # via feast (setup.py) -cachecontrol==0.13.1 +cachecontrol==0.14.0 # via firebase-admin cachetools==5.3.2 # via google-auth -cassandra-driver==3.28.0 +cassandra-driver==3.29.0 # via feast (setup.py) -certifi==2023.7.22 +certifi==2024.2.2 # via # httpcore # httpx @@ -122,13 +123,13 @@ colorama==0.4.6 # via # feast (setup.py) # great-expectations -comm==0.2.0 +comm==0.2.1 # via # ipykernel # ipywidgets -coverage[toml]==7.3.2 +coverage[toml]==7.4.1 # via pytest-cov -cryptography==41.0.6 +cryptography==41.0.7 # via # azure-identity # azure-storage-blob @@ -143,9 +144,9 @@ cryptography==41.0.6 # types-redis dask==2023.5.0 # via feast (setup.py) -db-dtypes==1.1.1 +db-dtypes==1.2.0 # via google-cloud-bigquery -debugpy==1.8.0 +debugpy==1.8.1 # via ipykernel decorator==5.1.1 # via ipython @@ -153,14 +154,14 @@ defusedxml==0.7.1 # via nbconvert deprecation==2.1.0 # via testcontainers -dill==0.3.7 +dill==0.3.8 # via # bytewax # feast (setup.py) # multiprocess -distlib==0.3.7 +distlib==0.3.8 # via virtualenv -docker==6.1.3 +docker==7.0.0 # via # feast (setup.py) # testcontainers @@ -168,7 +169,7 @@ docutils==0.19 # via sphinx entrypoints==0.4 # via altair -exceptiongroup==1.1.3 +exceptiongroup==1.2.0 # via # anyio # pytest @@ -176,13 +177,13 @@ execnet==2.0.2 # via pytest-xdist executing==2.0.1 # via stack-data -fastapi==0.109.1 +fastapi==0.109.2 # via feast (setup.py) -fastavro==1.9.0 +fastavro==1.9.4 # via # feast (setup.py) # pandavro -fastjsonschema==2.19.0 +fastjsonschema==2.19.1 # via nbformat filelock==3.13.1 # via @@ -196,7 +197,7 @@ flake8==6.0.0 # via feast (setup.py) fqdn==1.5.1 # via jsonschema -fsspec==2023.9.2 +fsspec==2023.12.2 # via # dask # feast (setup.py) @@ -204,7 +205,7 @@ geojson==2.5.0 # via rockset geomet==0.2.1.post1 # via cassandra-driver -google-api-core[grpc]==2.14.0 +google-api-core[grpc]==2.17.1 # via # feast (setup.py) # firebase-admin @@ -216,9 +217,9 @@ google-api-core[grpc]==2.14.0 # google-cloud-datastore # google-cloud-firestore # google-cloud-storage -google-api-python-client==2.108.0 +google-api-python-client==2.118.0 # via firebase-admin -google-auth==2.23.4 +google-auth==2.27.0 # via # google-api-core # google-api-python-client @@ -226,26 +227,26 @@ google-auth==2.23.4 # google-cloud-core # google-cloud-storage # kubernetes -google-auth-httplib2==0.1.1 +google-auth-httplib2==0.2.0 # via google-api-python-client google-cloud-bigquery[pandas]==3.12.0 # via feast (setup.py) -google-cloud-bigquery-storage==2.22.0 +google-cloud-bigquery-storage==2.24.0 # via feast (setup.py) -google-cloud-bigtable==2.21.0 +google-cloud-bigtable==2.23.0 # via feast (setup.py) -google-cloud-core==2.3.3 +google-cloud-core==2.4.1 # via # google-cloud-bigquery # google-cloud-bigtable # google-cloud-datastore # google-cloud-firestore # google-cloud-storage -google-cloud-datastore==2.18.0 +google-cloud-datastore==2.19.0 # via feast (setup.py) -google-cloud-firestore==2.13.1 +google-cloud-firestore==2.14.0 # via firebase-admin -google-cloud-storage==2.13.0 +google-cloud-storage==2.14.0 # via # feast (setup.py) # firebase-admin @@ -253,11 +254,11 @@ google-crc32c==1.5.0 # via # google-cloud-storage # google-resumable-media -google-resumable-media==2.6.0 +google-resumable-media==2.7.0 # via # google-cloud-bigquery # google-cloud-storage -googleapis-common-protos[grpc]==1.61.0 +googleapis-common-protos[grpc]==1.62.0 # via # feast (setup.py) # google-api-core @@ -265,11 +266,11 @@ googleapis-common-protos[grpc]==1.61.0 # grpcio-status great-expectations==0.15.50 # via feast (setup.py) -greenlet==3.0.1 +greenlet==3.0.3 # via sqlalchemy -grpc-google-iam-v1==0.12.7 +grpc-google-iam-v1==0.13.0 # via google-cloud-bigtable -grpcio==1.59.2 +grpcio==1.60.1 # via # feast (setup.py) # google-api-core @@ -281,15 +282,15 @@ grpcio==1.59.2 # grpcio-status # grpcio-testing # grpcio-tools -grpcio-health-checking==1.59.2 +grpcio-health-checking==1.60.1 # via feast (setup.py) -grpcio-reflection==1.59.2 +grpcio-reflection==1.60.1 # via feast (setup.py) -grpcio-status==1.59.2 +grpcio-status==1.60.1 # via google-api-core -grpcio-testing==1.59.2 +grpcio-testing==1.60.1 # via feast (setup.py) -grpcio-tools==1.59.2 +grpcio-tools==1.60.1 # via feast (setup.py) gunicorn==21.2.0 # via feast (setup.py) @@ -301,9 +302,9 @@ happybase==1.2.0 # via feast (setup.py) hazelcast-python-client==5.3.0 # via feast (setup.py) -hiredis==2.2.3 +hiredis==2.3.2 # via feast (setup.py) -httpcore==1.0.2 +httpcore==1.0.3 # via httpx httplib2==0.22.0 # via @@ -311,11 +312,13 @@ httplib2==0.22.0 # google-auth-httplib2 httptools==0.6.1 # via uvicorn -httpx==0.25.1 - # via feast (setup.py) -identify==2.5.31 +httpx==0.26.0 + # via + # feast (setup.py) + # jupyterlab +identify==2.5.34 # via pre-commit -idna==3.4 +idna==3.6 # via # anyio # httpx @@ -324,7 +327,7 @@ idna==3.4 # snowflake-connector-python imagesize==1.4.1 # via sphinx -importlib-metadata==6.8.0 +importlib-metadata==6.11.0 # via # build # dask @@ -344,20 +347,20 @@ importlib-resources==6.1.1 # jupyterlab iniconfig==2.0.0 # via pytest -ipykernel==6.26.0 +ipykernel==6.29.2 # via jupyterlab ipython==8.12.3 # via # great-expectations # ipykernel # ipywidgets -ipywidgets==8.1.1 +ipywidgets==8.1.2 # via great-expectations isodate==0.6.1 # via azure-storage-blob isoduration==20.11.0 # via jsonschema -isort==5.12.0 +isort==5.13.2 # via feast (setup.py) jedi==0.19.1 # via ipython @@ -384,7 +387,7 @@ jsonpointer==2.4 # via # jsonpatch # jsonschema -jsonschema[format-nongpl]==4.20.0 +jsonschema[format-nongpl]==4.21.1 # via # altair # feast (setup.py) @@ -392,14 +395,14 @@ jsonschema[format-nongpl]==4.20.0 # jupyter-events # jupyterlab-server # nbformat -jsonschema-specifications==2023.11.1 +jsonschema-specifications==2023.12.1 # via jsonschema jupyter-client==8.6.0 # via # ipykernel # jupyter-server # nbclient -jupyter-core==5.5.0 +jupyter-core==5.7.1 # via # ipykernel # jupyter-client @@ -412,24 +415,24 @@ jupyter-events==0.9.0 # via jupyter-server jupyter-lsp==2.2.2 # via jupyterlab -jupyter-server==2.11.2 +jupyter-server==2.12.5 # via # jupyter-lsp # jupyterlab # jupyterlab-server # notebook # notebook-shim -jupyter-server-terminals==0.4.4 +jupyter-server-terminals==0.5.2 # via jupyter-server -jupyterlab==4.0.11 +jupyterlab==4.1.1 # via notebook -jupyterlab-pygments==0.2.2 +jupyterlab-pygments==0.3.0 # via nbconvert -jupyterlab-server==2.25.1 +jupyterlab-server==2.25.3 # via # jupyterlab # notebook -jupyterlab-widgets==3.0.9 +jupyterlab-widgets==3.0.10 # via ipywidgets kubernetes==20.13.0 # via feast (setup.py) @@ -437,12 +440,12 @@ locket==1.0.0 # via partd makefun==1.15.2 # via great-expectations -markupsafe==2.1.3 +markupsafe==2.1.5 # via # jinja2 # nbconvert # werkzeug -marshmallow==3.20.1 +marshmallow==3.20.2 # via great-expectations matplotlib-inline==0.1.6 # via @@ -456,23 +459,23 @@ mistune==3.0.2 # via # great-expectations # nbconvert -mmh3==4.0.1 +mmh3==4.1.0 # via feast (setup.py) mock==2.0.0 # via feast (setup.py) moreorless==0.4.0 # via bowler -moto==4.2.9 +moto==4.2.14 # via feast (setup.py) -msal==1.25.0 +msal==1.26.0 # via # azure-identity # msal-extensions -msal-extensions==1.0.0 +msal-extensions==1.1.0 # via azure-identity msgpack==1.0.7 # via cachecontrol -multiprocess==0.70.15 +multiprocess==0.70.16 # via bytewax mypy==1.8.0 # via @@ -486,7 +489,7 @@ mypy-protobuf==3.1.0 # via feast (setup.py) nbclient==0.9.0 # via nbconvert -nbconvert==7.11.0 +nbconvert==7.16.0 # via jupyter-server nbformat==5.9.2 # via @@ -494,11 +497,11 @@ nbformat==5.9.2 # jupyter-server # nbclient # nbconvert -nest-asyncio==1.5.8 +nest-asyncio==1.6.0 # via ipykernel nodeenv==1.8.0 # via pre-commit -notebook==7.0.6 +notebook==7.1.0 # via great-expectations notebook-shim==0.2.3 # via @@ -516,7 +519,7 @@ numpy==1.24.4 # scipy oauthlib==3.2.2 # via requests-oauthlib -overrides==7.4.0 +overrides==7.7.0 # via jupyter-server packaging==23.2 # via @@ -533,6 +536,7 @@ packaging==23.2 # jupyterlab # jupyterlab-server # marshmallow + # msal-extensions # nbconvert # pytest # snowflake-connector-python @@ -548,17 +552,17 @@ pandas==1.5.3 # snowflake-connector-python pandavro==1.5.2 # via feast (setup.py) -pandocfilters==1.5.0 +pandocfilters==1.5.1 # via nbconvert parso==0.8.3 # via jedi partd==1.4.1 # via dask -pathspec==0.11.2 +pathspec==0.12.1 # via black pbr==6.0.0 # via mock -pexpect==4.8.0 +pexpect==4.9.0 # via ipython pickleshare==0.7.5 # via ipython @@ -572,7 +576,7 @@ platformdirs==3.11.0 # jupyter-core # snowflake-connector-python # virtualenv -pluggy==1.3.0 +pluggy==1.4.0 # via pytest ply==3.11 # via thriftpy2 @@ -580,11 +584,11 @@ portalocker==2.8.2 # via msal-extensions pre-commit==3.3.1 # via feast (setup.py) -prometheus-client==0.18.0 +prometheus-client==0.20.0 # via jupyter-server -prompt-toolkit==3.0.41 +prompt-toolkit==3.0.43 # via ipython -proto-plus==1.22.3 +proto-plus==1.23.0 # via # feast (setup.py) # google-cloud-bigquery @@ -628,13 +632,13 @@ py-cpuinfo==9.0.0 # via pytest-benchmark py4j==0.10.9.7 # via pyspark -pyarrow==14.0.1 +pyarrow==15.0.0 # via # db-dtypes # feast (setup.py) # google-cloud-bigquery # snowflake-connector-python -pyasn1==0.5.0 +pyasn1==0.5.1 # via # pyasn1-modules # rsa @@ -646,14 +650,14 @@ pycodestyle==2.10.0 # via flake8 pycparser==2.21 # via cffi -pydantic==1.10.13 +pydantic==1.10.14 # via # fastapi # feast (setup.py) # great-expectations pyflakes==3.0.1 # via flake8 -pygments==2.16.1 +pygments==2.17.2 # via # feast (setup.py) # ipython @@ -663,11 +667,11 @@ pyjwt[crypto]==2.8.0 # via # msal # snowflake-connector-python -pymssql==2.2.10 +pymssql==2.2.11 # via feast (setup.py) pymysql==1.1.0 # via feast (setup.py) -pyodbc==5.0.1 +pyodbc==5.1.0 # via feast (setup.py) pyopenssl==23.3.0 # via snowflake-connector-python @@ -679,7 +683,7 @@ pyproject-hooks==1.0.0 # via build pyspark==3.5.0 # via feast (setup.py) -pytest==7.4.3 +pytest==7.4.4 # via # feast (setup.py) # pytest-benchmark @@ -701,7 +705,7 @@ pytest-ordering==0.6 # via feast (setup.py) pytest-timeout==1.4.2 # via feast (setup.py) -pytest-xdist==3.4.0 +pytest-xdist==3.5.0 # via feast (setup.py) python-dateutil==2.8.2 # via @@ -715,11 +719,11 @@ python-dateutil==2.8.2 # pandas # rockset # trino -python-dotenv==1.0.0 +python-dotenv==1.0.1 # via uvicorn python-json-logger==2.0.7 # via jupyter-events -pytz==2023.3.post1 +pytz==2024.1 # via # babel # great-expectations @@ -735,19 +739,19 @@ pyyaml==6.0.1 # pre-commit # responses # uvicorn -pyzmq==25.1.1 +pyzmq==25.1.2 # via # ipykernel # jupyter-client # jupyter-server redis==4.6.0 # via feast (setup.py) -referencing==0.31.0 +referencing==0.33.0 # via # jsonschema # jsonschema-specifications # jupyter-events -regex==2023.10.3 +regex==2023.12.25 # via feast (setup.py) requests==2.31.0 # via @@ -770,7 +774,7 @@ requests==2.31.0 # trino requests-oauthlib==1.3.1 # via kubernetes -responses==0.24.1 +responses==0.25.0 # via moto rfc3339-validator==0.1.4 # via @@ -782,7 +786,7 @@ rfc3986-validator==0.1.1 # jupyter-events rockset==2.1.0 # via feast (setup.py) -rpds-py==0.13.0 +rpds-py==0.18.0 # via # jsonschema # referencing @@ -792,7 +796,7 @@ ruamel-yaml==0.17.17 # via great-expectations ruamel-yaml-clib==0.2.8 # via ruamel-yaml -s3transfer==0.7.0 +s3transfer==0.10.0 # via boto3 scipy==1.10.1 # via great-expectations @@ -803,7 +807,6 @@ six==1.16.0 # asttokens # azure-core # bleach - # cassandra-driver # geomet # happybase # isodate @@ -819,7 +822,7 @@ sniffio==1.3.0 # httpx snowballstemmer==2.2.0 # via sphinx -snowflake-connector-python[pandas]==3.5.0 +snowflake-connector-python[pandas]==3.7.0 # via feast (setup.py) sortedcontainers==2.4.0 # via snowflake-connector-python @@ -839,13 +842,13 @@ sphinxcontrib-qthelp==1.0.3 # via sphinx sphinxcontrib-serializinghtml==1.1.5 # via sphinx -sqlalchemy[mypy]==1.4.50 +sqlalchemy[mypy]==1.4.51 # via feast (setup.py) -sqlalchemy2-stubs==0.0.2a37 +sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy stack-data==0.6.3 # via ipython -starlette==0.35.1 +starlette==0.36.3 # via fastapi tabulate==0.9.0 # via feast (setup.py) @@ -875,12 +878,12 @@ tomli==2.0.1 # pytest tomlkit==0.12.3 # via snowflake-connector-python -toolz==0.12.0 +toolz==0.12.1 # via # altair # dask # partd -tornado==6.3.3 +tornado==6.4 # via # ipykernel # jupyter-client @@ -888,11 +891,11 @@ tornado==6.3.3 # jupyterlab # notebook # terminado -tqdm==4.66.1 +tqdm==4.66.2 # via # feast (setup.py) # great-expectations -traitlets==5.13.0 +traitlets==5.14.1 # via # comm # ipykernel @@ -917,28 +920,29 @@ types-protobuf==3.19.22 # mypy-protobuf types-pymysql==1.1.0.1 # via feast (setup.py) -types-pyopenssl==23.3.0.0 +types-pyopenssl==24.0.0.20240130 # via types-redis -types-python-dateutil==2.8.19.14 +types-python-dateutil==2.8.19.20240106 # via # arrow # feast (setup.py) -types-pytz==2023.3.1.1 +types-pytz==2024.1.0.20240203 # via feast (setup.py) types-pyyaml==6.0.12.12 # via feast (setup.py) -types-redis==4.6.0.10 +types-redis==4.6.0.20240106 # via feast (setup.py) types-requests==2.30.0.0 # via feast (setup.py) -types-setuptools==68.2.0.1 +types-setuptools==69.0.0.20240125 # via feast (setup.py) -types-tabulate==0.9.0.3 +types-tabulate==0.9.0.20240106 # via feast (setup.py) types-urllib3==1.26.25.14 # via types-requests -typing-extensions==4.8.0 +typing-extensions==4.9.0 # via + # anyio # async-lru # azure-core # azure-storage-blob @@ -972,7 +976,7 @@ urllib3==1.26.18 # responses # rockset # snowflake-connector-python -uvicorn[standard]==0.24.0.post1 +uvicorn[standard]==0.27.1 # via feast (setup.py) uvloop==0.19.0 # via uvicorn @@ -984,7 +988,7 @@ volatile==2.1.0 # via bowler watchfiles==0.21.0 # via uvicorn -wcwidth==0.2.10 +wcwidth==0.2.13 # via prompt-toolkit webcolors==1.13 # via jsonschema @@ -992,18 +996,17 @@ webencodings==0.5.1 # via # bleach # tinycss2 -websocket-client==1.6.4 +websocket-client==1.7.0 # via - # docker # jupyter-server # kubernetes websockets==12.0 # via uvicorn werkzeug==3.0.1 # via moto -wheel==0.41.3 +wheel==0.42.0 # via pip-tools -widgetsnbextension==4.0.9 +widgetsnbextension==4.0.10 # via ipywidgets wrapt==1.16.0 # via testcontainers diff --git a/sdk/python/requirements/py3.8-requirements.txt b/sdk/python/requirements/py3.8-requirements.txt index 163fa4c9a8..388bb3143f 100644 --- a/sdk/python/requirements/py3.8-requirements.txt +++ b/sdk/python/requirements/py3.8-requirements.txt @@ -4,21 +4,22 @@ # # pip-compile --output-file=sdk/python/requirements/py3.8-requirements.txt # -anyio==4.0.0 + +anyio==4.2.0 # via # httpx # starlette # watchfiles appdirs==1.4.4 # via fissix -attrs==23.1.0 +attrs==23.2.0 # via # bowler # jsonschema # referencing bowler==0.9.0 # via feast (setup.py) -certifi==2023.7.22 +certifi==2024.2.2 # via # httpcore # httpx @@ -38,33 +39,33 @@ colorama==0.4.6 # via feast (setup.py) dask==2023.5.0 # via feast (setup.py) -dill==0.3.7 +dill==0.3.8 # via feast (setup.py) -exceptiongroup==1.1.3 +exceptiongroup==1.2.0 # via anyio -fastapi==0.109.1 +fastapi==0.109.2 # via feast (setup.py) -fastavro==1.9.0 +fastavro==1.9.4 # via # feast (setup.py) # pandavro fissix==21.11.13 # via bowler -fsspec==2023.10.0 +fsspec==2024.2.0 # via dask -greenlet==3.0.1 +greenlet==3.0.3 # via sqlalchemy -grpcio==1.59.2 +grpcio==1.60.1 # via # feast (setup.py) # grpcio-health-checking # grpcio-reflection # grpcio-tools -grpcio-health-checking==1.59.2 +grpcio-health-checking==1.60.1 # via feast (setup.py) -grpcio-reflection==1.59.2 +grpcio-reflection==1.60.1 # via feast (setup.py) -grpcio-tools==1.59.2 +grpcio-tools==1.60.1 # via feast (setup.py) gunicorn==21.2.0 # via feast (setup.py) @@ -72,18 +73,18 @@ h11==0.14.0 # via # httpcore # uvicorn -httpcore==1.0.2 +httpcore==1.0.3 # via httpx httptools==0.6.1 # via uvicorn -httpx==0.25.1 +httpx==0.26.0 # via feast (setup.py) -idna==3.4 +idna==3.6 # via # anyio # httpx # requests -importlib-metadata==6.8.0 +importlib-metadata==6.11.0 # via # dask # feast (setup.py) @@ -94,19 +95,19 @@ importlib-resources==6.1.1 # jsonschema-specifications jinja2==3.1.3 # via feast (setup.py) -jsonschema==4.20.0 +jsonschema==4.21.1 # via feast (setup.py) -jsonschema-specifications==2023.11.1 +jsonschema-specifications==2023.12.1 # via jsonschema locket==1.0.0 # via partd -markupsafe==2.1.3 +markupsafe==2.1.5 # via jinja2 -mmh3==4.0.1 +mmh3==4.1.0 # via feast (setup.py) moreorless==0.4.0 # via bowler -mypy==1.7.0 +mypy==1.8.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -132,7 +133,7 @@ partd==1.4.1 # via dask pkgutil-resolve-name==1.3.10 # via jsonschema -proto-plus==1.22.3 +proto-plus==1.23.0 # via feast (setup.py) protobuf==4.23.3 # via @@ -142,32 +143,32 @@ protobuf==4.23.3 # grpcio-tools # mypy-protobuf # proto-plus -pyarrow==14.0.1 +pyarrow==15.0.0 # via feast (setup.py) -pydantic==1.10.13 +pydantic==1.10.14 # via # fastapi # feast (setup.py) -pygments==2.16.1 +pygments==2.17.2 # via feast (setup.py) python-dateutil==2.8.2 # via pandas -python-dotenv==1.0.0 +python-dotenv==1.0.1 # via uvicorn -pytz==2023.3.post1 +pytz==2024.1 # via pandas pyyaml==6.0.1 # via # dask # feast (setup.py) # uvicorn -referencing==0.31.0 +referencing==0.33.0 # via # jsonschema # jsonschema-specifications requests==2.31.0 # via feast (setup.py) -rpds-py==0.13.0 +rpds-py==0.18.0 # via # jsonschema # referencing @@ -179,11 +180,11 @@ sniffio==1.3.0 # via # anyio # httpx -sqlalchemy[mypy]==1.4.50 +sqlalchemy[mypy]==1.4.51 # via feast (setup.py) -sqlalchemy2-stubs==0.0.2a37 +sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy -starlette==0.35.1 +starlette==0.36.3 # via fastapi tabulate==0.9.0 # via feast (setup.py) @@ -193,27 +194,28 @@ toml==0.10.2 # via feast (setup.py) tomli==2.0.1 # via mypy -toolz==0.12.0 +toolz==0.12.1 # via # dask # partd -tqdm==4.66.1 +tqdm==4.66.2 # via feast (setup.py) typeguard==2.13.3 # via feast (setup.py) -types-protobuf==4.24.0.4 +types-protobuf==4.24.0.20240129 # via mypy-protobuf -typing-extensions==4.8.0 +typing-extensions==4.9.0 # via + # anyio # fastapi # mypy # pydantic # sqlalchemy2-stubs # starlette # uvicorn -urllib3==2.1.0 +urllib3==2.2.0 # via requests -uvicorn[standard]==0.24.0.post1 +uvicorn[standard]==0.27.1 # via feast (setup.py) uvloop==0.19.0 # via uvicorn diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index f9d7ac3fb9..9cb322d2f6 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -4,11 +4,12 @@ # # pip-compile --extra=ci --output-file=sdk/python/requirements/py3.9-ci-requirements.txt # -alabaster==0.7.13 + +alabaster==0.7.16 # via sphinx altair==4.2.0 # via great-expectations -anyio==4.0.0 +anyio==4.2.0 # via # httpx # jupyter-server @@ -32,14 +33,14 @@ async-lru==2.0.4 # via jupyterlab async-timeout==4.0.3 # via redis -attrs==23.1.0 +attrs==23.2.0 # via # bowler # jsonschema # referencing avro==1.10.0 # via feast (setup.py) -azure-core==1.29.5 +azure-core==1.30.0 # via # azure-identity # azure-storage-blob @@ -47,21 +48,21 @@ azure-identity==1.15.0 # via feast (setup.py) azure-storage-blob==12.19.0 # via feast (setup.py) -babel==2.13.1 +babel==2.14.0 # via # jupyterlab-server # sphinx -beautifulsoup4==4.12.2 +beautifulsoup4==4.12.3 # via nbconvert black==22.12.0 # via feast (setup.py) bleach==6.1.0 # via nbconvert -boto3==1.29.2 +boto3==1.34.42 # via # feast (setup.py) # moto -botocore==1.32.2 +botocore==1.34.42 # via # boto3 # moto @@ -74,13 +75,13 @@ build==1.0.3 # pip-tools bytewax==0.15.1 # via feast (setup.py) -cachecontrol==0.13.1 +cachecontrol==0.14.0 # via firebase-admin cachetools==5.3.2 # via google-auth -cassandra-driver==3.28.0 +cassandra-driver==3.29.0 # via feast (setup.py) -certifi==2023.7.22 +certifi==2024.2.2 # via # httpcore # httpx @@ -116,13 +117,13 @@ colorama==0.4.6 # via # feast (setup.py) # great-expectations -comm==0.2.0 +comm==0.2.1 # via # ipykernel # ipywidgets -coverage[toml]==7.3.2 +coverage[toml]==7.4.1 # via pytest-cov -cryptography==41.0.6 +cryptography==41.0.7 # via # azure-identity # azure-storage-blob @@ -135,11 +136,11 @@ cryptography==41.0.6 # snowflake-connector-python # types-pyopenssl # types-redis -dask==2023.11.0 +dask==2024.2.0 # via feast (setup.py) -db-dtypes==1.1.1 +db-dtypes==1.2.0 # via google-cloud-bigquery -debugpy==1.8.0 +debugpy==1.8.1 # via ipykernel decorator==5.1.1 # via ipython @@ -147,14 +148,14 @@ defusedxml==0.7.1 # via nbconvert deprecation==2.1.0 # via testcontainers -dill==0.3.7 +dill==0.3.8 # via # bytewax # feast (setup.py) # multiprocess -distlib==0.3.7 +distlib==0.3.8 # via virtualenv -docker==6.1.3 +docker==7.0.0 # via # feast (setup.py) # testcontainers @@ -162,7 +163,7 @@ docutils==0.19 # via sphinx entrypoints==0.4 # via altair -exceptiongroup==1.1.3 +exceptiongroup==1.2.0 # via # anyio # ipython @@ -171,13 +172,13 @@ execnet==2.0.2 # via pytest-xdist executing==2.0.1 # via stack-data -fastapi==0.109.1 +fastapi==0.109.2 # via feast (setup.py) -fastavro==1.9.0 +fastavro==1.9.4 # via # feast (setup.py) # pandavro -fastjsonschema==2.19.0 +fastjsonschema==2.19.1 # via nbformat filelock==3.13.1 # via @@ -191,7 +192,7 @@ flake8==6.0.0 # via feast (setup.py) fqdn==1.5.1 # via jsonschema -fsspec==2023.9.2 +fsspec==2023.12.2 # via # dask # feast (setup.py) @@ -199,7 +200,7 @@ geojson==2.5.0 # via rockset geomet==0.2.1.post1 # via cassandra-driver -google-api-core[grpc]==2.14.0 +google-api-core[grpc]==2.17.1 # via # feast (setup.py) # firebase-admin @@ -211,9 +212,9 @@ google-api-core[grpc]==2.14.0 # google-cloud-datastore # google-cloud-firestore # google-cloud-storage -google-api-python-client==2.108.0 +google-api-python-client==2.118.0 # via firebase-admin -google-auth==2.23.4 +google-auth==2.27.0 # via # google-api-core # google-api-python-client @@ -221,26 +222,26 @@ google-auth==2.23.4 # google-cloud-core # google-cloud-storage # kubernetes -google-auth-httplib2==0.1.1 +google-auth-httplib2==0.2.0 # via google-api-python-client google-cloud-bigquery[pandas]==3.12.0 # via feast (setup.py) -google-cloud-bigquery-storage==2.22.0 +google-cloud-bigquery-storage==2.24.0 # via feast (setup.py) -google-cloud-bigtable==2.21.0 +google-cloud-bigtable==2.23.0 # via feast (setup.py) -google-cloud-core==2.3.3 +google-cloud-core==2.4.1 # via # google-cloud-bigquery # google-cloud-bigtable # google-cloud-datastore # google-cloud-firestore # google-cloud-storage -google-cloud-datastore==2.18.0 +google-cloud-datastore==2.19.0 # via feast (setup.py) -google-cloud-firestore==2.13.1 +google-cloud-firestore==2.14.0 # via firebase-admin -google-cloud-storage==2.13.0 +google-cloud-storage==2.14.0 # via # feast (setup.py) # firebase-admin @@ -248,11 +249,11 @@ google-crc32c==1.5.0 # via # google-cloud-storage # google-resumable-media -google-resumable-media==2.6.0 +google-resumable-media==2.7.0 # via # google-cloud-bigquery # google-cloud-storage -googleapis-common-protos[grpc]==1.61.0 +googleapis-common-protos[grpc]==1.62.0 # via # feast (setup.py) # google-api-core @@ -260,11 +261,11 @@ googleapis-common-protos[grpc]==1.61.0 # grpcio-status great-expectations==0.15.50 # via feast (setup.py) -greenlet==3.0.1 +greenlet==3.0.3 # via sqlalchemy -grpc-google-iam-v1==0.12.7 +grpc-google-iam-v1==0.13.0 # via google-cloud-bigtable -grpcio==1.59.2 +grpcio==1.60.1 # via # feast (setup.py) # google-api-core @@ -276,15 +277,15 @@ grpcio==1.59.2 # grpcio-status # grpcio-testing # grpcio-tools -grpcio-health-checking==1.59.2 +grpcio-health-checking==1.60.1 # via feast (setup.py) -grpcio-reflection==1.59.2 +grpcio-reflection==1.60.1 # via feast (setup.py) -grpcio-status==1.59.2 +grpcio-status==1.60.1 # via google-api-core -grpcio-testing==1.59.2 +grpcio-testing==1.60.1 # via feast (setup.py) -grpcio-tools==1.59.2 +grpcio-tools==1.60.1 # via feast (setup.py) gunicorn==21.2.0 # via feast (setup.py) @@ -296,9 +297,9 @@ happybase==1.2.0 # via feast (setup.py) hazelcast-python-client==5.3.0 # via feast (setup.py) -hiredis==2.2.3 +hiredis==2.3.2 # via feast (setup.py) -httpcore==1.0.2 +httpcore==1.0.3 # via httpx httplib2==0.22.0 # via @@ -306,11 +307,13 @@ httplib2==0.22.0 # google-auth-httplib2 httptools==0.6.1 # via uvicorn -httpx==0.25.1 - # via feast (setup.py) -identify==2.5.31 +httpx==0.26.0 + # via + # feast (setup.py) + # jupyterlab +identify==2.5.34 # via pre-commit -idna==3.4 +idna==3.6 # via # anyio # httpx @@ -319,7 +322,7 @@ idna==3.4 # snowflake-connector-python imagesize==1.4.1 # via sphinx -importlib-metadata==6.8.0 +importlib-metadata==6.11.0 # via # build # dask @@ -335,20 +338,20 @@ importlib-resources==6.1.1 # via feast (setup.py) iniconfig==2.0.0 # via pytest -ipykernel==6.26.0 +ipykernel==6.29.2 # via jupyterlab -ipython==8.17.2 +ipython==8.18.1 # via # great-expectations # ipykernel # ipywidgets -ipywidgets==8.1.1 +ipywidgets==8.1.2 # via great-expectations isodate==0.6.1 # via azure-storage-blob isoduration==20.11.0 # via jsonschema -isort==5.12.0 +isort==5.13.2 # via feast (setup.py) jedi==0.19.1 # via ipython @@ -375,7 +378,7 @@ jsonpointer==2.4 # via # jsonpatch # jsonschema -jsonschema[format-nongpl]==4.20.0 +jsonschema[format-nongpl]==4.21.1 # via # altair # feast (setup.py) @@ -383,14 +386,14 @@ jsonschema[format-nongpl]==4.20.0 # jupyter-events # jupyterlab-server # nbformat -jsonschema-specifications==2023.11.1 +jsonschema-specifications==2023.12.1 # via jsonschema jupyter-client==8.6.0 # via # ipykernel # jupyter-server # nbclient -jupyter-core==5.5.0 +jupyter-core==5.7.1 # via # ipykernel # jupyter-client @@ -403,24 +406,24 @@ jupyter-events==0.9.0 # via jupyter-server jupyter-lsp==2.2.2 # via jupyterlab -jupyter-server==2.11.2 +jupyter-server==2.12.5 # via # jupyter-lsp # jupyterlab # jupyterlab-server # notebook # notebook-shim -jupyter-server-terminals==0.4.4 +jupyter-server-terminals==0.5.2 # via jupyter-server -jupyterlab==4.0.11 +jupyterlab==4.1.1 # via notebook -jupyterlab-pygments==0.2.2 +jupyterlab-pygments==0.3.0 # via nbconvert -jupyterlab-server==2.25.1 +jupyterlab-server==2.25.3 # via # jupyterlab # notebook -jupyterlab-widgets==3.0.9 +jupyterlab-widgets==3.0.10 # via ipywidgets kubernetes==20.13.0 # via feast (setup.py) @@ -428,12 +431,12 @@ locket==1.0.0 # via partd makefun==1.15.2 # via great-expectations -markupsafe==2.1.3 +markupsafe==2.1.5 # via # jinja2 # nbconvert # werkzeug -marshmallow==3.20.1 +marshmallow==3.20.2 # via great-expectations matplotlib-inline==0.1.6 # via @@ -447,23 +450,23 @@ mistune==3.0.2 # via # great-expectations # nbconvert -mmh3==4.0.1 +mmh3==4.1.0 # via feast (setup.py) mock==2.0.0 # via feast (setup.py) moreorless==0.4.0 # via bowler -moto==4.2.9 +moto==4.2.14 # via feast (setup.py) -msal==1.25.0 +msal==1.26.0 # via # azure-identity # msal-extensions -msal-extensions==1.0.0 +msal-extensions==1.1.0 # via azure-identity msgpack==1.0.7 # via cachecontrol -multiprocess==0.70.15 +multiprocess==0.70.16 # via bytewax mypy==1.8.0 # via @@ -477,7 +480,7 @@ mypy-protobuf==3.1.0 # via feast (setup.py) nbclient==0.9.0 # via nbconvert -nbconvert==7.11.0 +nbconvert==7.16.0 # via jupyter-server nbformat==5.9.2 # via @@ -485,11 +488,11 @@ nbformat==5.9.2 # jupyter-server # nbclient # nbconvert -nest-asyncio==1.5.8 +nest-asyncio==1.6.0 # via ipykernel nodeenv==1.8.0 # via pre-commit -notebook==7.0.6 +notebook==7.1.0 # via great-expectations notebook-shim==0.2.3 # via @@ -507,7 +510,7 @@ numpy==1.24.4 # scipy oauthlib==3.2.2 # via requests-oauthlib -overrides==7.4.0 +overrides==7.7.0 # via jupyter-server packaging==23.2 # via @@ -524,6 +527,7 @@ packaging==23.2 # jupyterlab # jupyterlab-server # marshmallow + # msal-extensions # nbconvert # pytest # snowflake-connector-python @@ -539,17 +543,17 @@ pandas==1.5.3 # snowflake-connector-python pandavro==1.5.2 # via feast (setup.py) -pandocfilters==1.5.0 +pandocfilters==1.5.1 # via nbconvert parso==0.8.3 # via jedi partd==1.4.1 # via dask -pathspec==0.11.2 +pathspec==0.12.1 # via black pbr==6.0.0 # via mock -pexpect==4.8.0 +pexpect==4.9.0 # via ipython pip-tools==7.3.0 # via feast (setup.py) @@ -559,7 +563,7 @@ platformdirs==3.11.0 # jupyter-core # snowflake-connector-python # virtualenv -pluggy==1.3.0 +pluggy==1.4.0 # via pytest ply==3.11 # via thriftpy2 @@ -567,11 +571,11 @@ portalocker==2.8.2 # via msal-extensions pre-commit==3.3.1 # via feast (setup.py) -prometheus-client==0.18.0 +prometheus-client==0.20.0 # via jupyter-server -prompt-toolkit==3.0.41 +prompt-toolkit==3.0.43 # via ipython -proto-plus==1.22.3 +proto-plus==1.23.0 # via # feast (setup.py) # google-cloud-bigquery @@ -615,13 +619,13 @@ py-cpuinfo==9.0.0 # via pytest-benchmark py4j==0.10.9.7 # via pyspark -pyarrow==14.0.1 +pyarrow==15.0.0 # via # db-dtypes # feast (setup.py) # google-cloud-bigquery # snowflake-connector-python -pyasn1==0.5.0 +pyasn1==0.5.1 # via # pyasn1-modules # rsa @@ -633,14 +637,14 @@ pycodestyle==2.10.0 # via flake8 pycparser==2.21 # via cffi -pydantic==1.10.13 +pydantic==1.10.14 # via # fastapi # feast (setup.py) # great-expectations pyflakes==3.0.1 # via flake8 -pygments==2.16.1 +pygments==2.17.2 # via # feast (setup.py) # ipython @@ -650,11 +654,11 @@ pyjwt[crypto]==2.8.0 # via # msal # snowflake-connector-python -pymssql==2.2.10 +pymssql==2.2.11 # via feast (setup.py) pymysql==1.1.0 # via feast (setup.py) -pyodbc==5.0.1 +pyodbc==5.1.0 # via feast (setup.py) pyopenssl==23.3.0 # via snowflake-connector-python @@ -666,7 +670,7 @@ pyproject-hooks==1.0.0 # via build pyspark==3.5.0 # via feast (setup.py) -pytest==7.4.3 +pytest==7.4.4 # via # feast (setup.py) # pytest-benchmark @@ -688,7 +692,7 @@ pytest-ordering==0.6 # via feast (setup.py) pytest-timeout==1.4.2 # via feast (setup.py) -pytest-xdist==3.4.0 +pytest-xdist==3.5.0 # via feast (setup.py) python-dateutil==2.8.2 # via @@ -702,11 +706,11 @@ python-dateutil==2.8.2 # pandas # rockset # trino -python-dotenv==1.0.0 +python-dotenv==1.0.1 # via uvicorn python-json-logger==2.0.7 # via jupyter-events -pytz==2023.3.post1 +pytz==2024.1 # via # great-expectations # pandas @@ -721,19 +725,19 @@ pyyaml==6.0.1 # pre-commit # responses # uvicorn -pyzmq==25.1.1 +pyzmq==25.1.2 # via # ipykernel # jupyter-client # jupyter-server redis==4.6.0 # via feast (setup.py) -referencing==0.31.0 +referencing==0.33.0 # via # jsonschema # jsonschema-specifications # jupyter-events -regex==2023.10.3 +regex==2023.12.25 # via feast (setup.py) requests==2.31.0 # via @@ -756,7 +760,7 @@ requests==2.31.0 # trino requests-oauthlib==1.3.1 # via kubernetes -responses==0.24.1 +responses==0.25.0 # via moto rfc3339-validator==0.1.4 # via @@ -768,7 +772,7 @@ rfc3986-validator==0.1.1 # jupyter-events rockset==2.1.0 # via feast (setup.py) -rpds-py==0.13.0 +rpds-py==0.18.0 # via # jsonschema # referencing @@ -778,9 +782,9 @@ ruamel-yaml==0.17.17 # via great-expectations ruamel-yaml-clib==0.2.8 # via ruamel-yaml -s3transfer==0.7.0 +s3transfer==0.10.0 # via boto3 -scipy==1.11.3 +scipy==1.12.0 # via great-expectations send2trash==1.8.2 # via jupyter-server @@ -789,7 +793,6 @@ six==1.16.0 # asttokens # azure-core # bleach - # cassandra-driver # geomet # happybase # isodate @@ -805,39 +808,33 @@ sniffio==1.3.0 # httpx snowballstemmer==2.2.0 # via sphinx -snowflake-connector-python[pandas]==3.5.0 +snowflake-connector-python[pandas]==3.7.0 # via feast (setup.py) sortedcontainers==2.4.0 # via snowflake-connector-python soupsieve==2.5 # via beautifulsoup4 sphinx==6.2.1 - # via - # feast (setup.py) - # sphinxcontrib-applehelp - # sphinxcontrib-devhelp - # sphinxcontrib-htmlhelp - # sphinxcontrib-qthelp - # sphinxcontrib-serializinghtml -sphinxcontrib-applehelp==1.0.7 + # via feast (setup.py) +sphinxcontrib-applehelp==1.0.8 # via sphinx -sphinxcontrib-devhelp==1.0.5 +sphinxcontrib-devhelp==1.0.6 # via sphinx -sphinxcontrib-htmlhelp==2.0.4 +sphinxcontrib-htmlhelp==2.0.5 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx -sphinxcontrib-qthelp==1.0.6 +sphinxcontrib-qthelp==1.0.7 # via sphinx -sphinxcontrib-serializinghtml==1.1.9 +sphinxcontrib-serializinghtml==1.1.10 # via sphinx -sqlalchemy[mypy]==1.4.50 +sqlalchemy[mypy]==1.4.51 # via feast (setup.py) -sqlalchemy2-stubs==0.0.2a37 +sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy stack-data==0.6.3 # via ipython -starlette==0.35.1 +starlette==0.36.3 # via fastapi tabulate==0.9.0 # via feast (setup.py) @@ -867,12 +864,12 @@ tomli==2.0.1 # pytest tomlkit==0.12.3 # via snowflake-connector-python -toolz==0.12.0 +toolz==0.12.1 # via # altair # dask # partd -tornado==6.3.3 +tornado==6.4 # via # ipykernel # jupyter-client @@ -880,11 +877,11 @@ tornado==6.3.3 # jupyterlab # notebook # terminado -tqdm==4.66.1 +tqdm==4.66.2 # via # feast (setup.py) # great-expectations -traitlets==5.13.0 +traitlets==5.14.1 # via # comm # ipykernel @@ -909,28 +906,29 @@ types-protobuf==3.19.22 # mypy-protobuf types-pymysql==1.1.0.1 # via feast (setup.py) -types-pyopenssl==23.3.0.0 +types-pyopenssl==24.0.0.20240130 # via types-redis -types-python-dateutil==2.8.19.14 +types-python-dateutil==2.8.19.20240106 # via # arrow # feast (setup.py) -types-pytz==2023.3.1.1 +types-pytz==2024.1.0.20240203 # via feast (setup.py) types-pyyaml==6.0.12.12 # via feast (setup.py) -types-redis==4.6.0.10 +types-redis==4.6.0.20240106 # via feast (setup.py) types-requests==2.30.0.0 # via feast (setup.py) -types-setuptools==68.2.0.1 +types-setuptools==69.0.0.20240125 # via feast (setup.py) -types-tabulate==0.9.0.3 +types-tabulate==0.9.0.20240106 # via feast (setup.py) types-urllib3==1.26.25.14 # via types-requests -typing-extensions==4.8.0 +typing-extensions==4.9.0 # via + # anyio # async-lru # azure-core # azure-storage-blob @@ -964,7 +962,7 @@ urllib3==1.26.18 # responses # rockset # snowflake-connector-python -uvicorn[standard]==0.24.0.post1 +uvicorn[standard]==0.27.1 # via feast (setup.py) uvloop==0.19.0 # via uvicorn @@ -976,7 +974,7 @@ volatile==2.1.0 # via bowler watchfiles==0.21.0 # via uvicorn -wcwidth==0.2.10 +wcwidth==0.2.13 # via prompt-toolkit webcolors==1.13 # via jsonschema @@ -984,18 +982,17 @@ webencodings==0.5.1 # via # bleach # tinycss2 -websocket-client==1.6.4 +websocket-client==1.7.0 # via - # docker # jupyter-server # kubernetes websockets==12.0 # via uvicorn werkzeug==3.0.1 # via moto -wheel==0.41.3 +wheel==0.42.0 # via pip-tools -widgetsnbextension==4.0.9 +widgetsnbextension==4.0.10 # via ipywidgets wrapt==1.16.0 # via testcontainers diff --git a/sdk/python/requirements/py3.9-requirements.txt b/sdk/python/requirements/py3.9-requirements.txt index 4d9b8f107d..012dac6f81 100644 --- a/sdk/python/requirements/py3.9-requirements.txt +++ b/sdk/python/requirements/py3.9-requirements.txt @@ -4,21 +4,22 @@ # # pip-compile --output-file=sdk/python/requirements/py3.9-requirements.txt # -anyio==4.0.0 + +anyio==4.2.0 # via # httpx # starlette # watchfiles appdirs==1.4.4 # via fissix -attrs==23.1.0 +attrs==23.2.0 # via # bowler # jsonschema # referencing bowler==0.9.0 # via feast (setup.py) -certifi==2023.7.22 +certifi==2024.2.2 # via # httpcore # httpx @@ -36,35 +37,35 @@ cloudpickle==3.0.0 # via dask colorama==0.4.6 # via feast (setup.py) -dask==2023.11.0 +dask==2024.2.0 # via feast (setup.py) -dill==0.3.7 +dill==0.3.8 # via feast (setup.py) -exceptiongroup==1.1.3 +exceptiongroup==1.2.0 # via anyio -fastapi==0.109.1 +fastapi==0.109.2 # via feast (setup.py) -fastavro==1.9.0 +fastavro==1.9.4 # via # feast (setup.py) # pandavro fissix==21.11.13 # via bowler -fsspec==2023.10.0 +fsspec==2024.2.0 # via dask -greenlet==3.0.1 +greenlet==3.0.3 # via sqlalchemy -grpcio==1.59.2 +grpcio==1.60.1 # via # feast (setup.py) # grpcio-health-checking # grpcio-reflection # grpcio-tools -grpcio-health-checking==1.59.2 +grpcio-health-checking==1.60.1 # via feast (setup.py) -grpcio-reflection==1.59.2 +grpcio-reflection==1.60.1 # via feast (setup.py) -grpcio-tools==1.59.2 +grpcio-tools==1.60.1 # via feast (setup.py) gunicorn==21.2.0 # via feast (setup.py) @@ -72,18 +73,18 @@ h11==0.14.0 # via # httpcore # uvicorn -httpcore==1.0.2 +httpcore==1.0.3 # via httpx httptools==0.6.1 # via uvicorn -httpx==0.25.1 +httpx==0.26.0 # via feast (setup.py) -idna==3.4 +idna==3.6 # via # anyio # httpx # requests -importlib-metadata==6.8.0 +importlib-metadata==6.11.0 # via # dask # feast (setup.py) @@ -91,19 +92,19 @@ importlib-resources==6.1.1 # via feast (setup.py) jinja2==3.1.3 # via feast (setup.py) -jsonschema==4.20.0 +jsonschema==4.21.1 # via feast (setup.py) -jsonschema-specifications==2023.11.1 +jsonschema-specifications==2023.12.1 # via jsonschema locket==1.0.0 # via partd -markupsafe==2.1.3 +markupsafe==2.1.5 # via jinja2 -mmh3==4.0.1 +mmh3==4.1.0 # via feast (setup.py) moreorless==0.4.0 # via bowler -mypy==1.7.0 +mypy==1.8.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -127,7 +128,7 @@ pandavro==1.5.2 # via feast (setup.py) partd==1.4.1 # via dask -proto-plus==1.22.3 +proto-plus==1.23.0 # via feast (setup.py) protobuf==4.23.3 # via @@ -137,32 +138,32 @@ protobuf==4.23.3 # grpcio-tools # mypy-protobuf # proto-plus -pyarrow==14.0.1 +pyarrow==15.0.0 # via feast (setup.py) -pydantic==1.10.13 +pydantic==1.10.14 # via # fastapi # feast (setup.py) -pygments==2.16.1 +pygments==2.17.2 # via feast (setup.py) python-dateutil==2.8.2 # via pandas -python-dotenv==1.0.0 +python-dotenv==1.0.1 # via uvicorn -pytz==2023.3.post1 +pytz==2024.1 # via pandas pyyaml==6.0.1 # via # dask # feast (setup.py) # uvicorn -referencing==0.31.0 +referencing==0.33.0 # via # jsonschema # jsonschema-specifications requests==2.31.0 # via feast (setup.py) -rpds-py==0.13.0 +rpds-py==0.18.0 # via # jsonschema # referencing @@ -174,11 +175,11 @@ sniffio==1.3.0 # via # anyio # httpx -sqlalchemy[mypy]==1.4.50 +sqlalchemy[mypy]==1.4.51 # via feast (setup.py) -sqlalchemy2-stubs==0.0.2a37 +sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy -starlette==0.35.1 +starlette==0.36.3 # via fastapi tabulate==0.9.0 # via feast (setup.py) @@ -188,27 +189,28 @@ toml==0.10.2 # via feast (setup.py) tomli==2.0.1 # via mypy -toolz==0.12.0 +toolz==0.12.1 # via # dask # partd -tqdm==4.66.1 +tqdm==4.66.2 # via feast (setup.py) typeguard==2.13.3 # via feast (setup.py) -types-protobuf==4.24.0.4 +types-protobuf==4.24.0.20240129 # via mypy-protobuf -typing-extensions==4.8.0 +typing-extensions==4.9.0 # via + # anyio # fastapi # mypy # pydantic # sqlalchemy2-stubs # starlette # uvicorn -urllib3==2.1.0 +urllib3==2.2.0 # via requests -uvicorn[standard]==0.24.0.post1 +uvicorn[standard]==0.27.1 # via feast (setup.py) uvloop==0.19.0 # via uvicorn diff --git a/setup.py b/setup.py index 81ae63a7a4..ebc4df31a8 100644 --- a/setup.py +++ b/setup.py @@ -90,7 +90,7 @@ "google-cloud-datastore>=2.1.0,<3", "google-cloud-storage>=1.34.0,<3", "google-cloud-bigtable>=2.11.0,<3", - "fsspec<2023.10.0", + "fsspec<=2024.1.0", ] REDIS_REQUIRED = [ @@ -98,7 +98,7 @@ "hiredis>=2.0.0,<3", ] -AWS_REQUIRED = ["boto3>=1.17.0,<2", "docker>=5.0.2", "fsspec<2023.10.0"] +AWS_REQUIRED = ["boto3>=1.17.0,<2", "docker>=5.0.2", "fsspec<=2024.1.0"] BYTEWAX_REQUIRED = ["bytewax==0.15.1", "docker>=5.0.2", "kubernetes<=20.13.0"] @@ -158,7 +158,6 @@ "moto<5", "mypy>=1.4.1", "avro==1.10.0", - "fsspec<2023.10.0", "urllib3>=1.25.4,<3", "psutil==5.9.0", "py>=1.11.0", # https://github.com/pytest-dev/pytest/issues/10420 From ec11a7cb8d56d8e2e5cda07e06b4c98dcc9d2ba3 Mon Sep 17 00:00:00 2001 From: Shuchu Han Date: Thu, 15 Feb 2024 11:04:06 -0500 Subject: [PATCH 036/122] feat: Update the Pydantic from v1 to v2 (#3948) --- sdk/python/feast/importer.py | 3 +- .../infra/contrib/spark_kafka_processor.py | 4 +- .../feast/infra/contrib/stream_processor.py | 6 +- .../feature_servers/aws_lambda/config.py | 3 +- .../infra/feature_servers/base_config.py | 2 +- .../feature_servers/gcp_cloudrun/config.py | 3 +- .../feature_servers/local_process/config.py | 2 +- .../infra/materialization/snowflake_engine.py | 6 +- .../feast/infra/offline_stores/bigquery.py | 22 ++- .../contrib/athena_offline_store/athena.py | 2 +- .../athena_offline_store/tests/data_source.py | 4 +- .../contrib/mssql_offline_store/mssql.py | 7 +- .../mssql_offline_store/tests/data_source.py | 4 +- .../postgres_offline_store/postgres.py | 2 +- .../tests/data_source.py | 4 +- .../spark_offline_store/tests/data_source.py | 9 +- .../test_config/manual_tests.py | 2 +- .../trino_offline_store/tests/data_source.py | 6 +- .../contrib/trino_offline_store/trino.py | 12 +- sdk/python/feast/infra/offline_stores/file.py | 3 +- .../feast/infra/offline_stores/redshift.py | 16 +- .../feast/infra/offline_stores/snowflake.py | 8 +- .../infra/offline_stores/snowflake_source.py | 6 +- .../feast/infra/online_stores/bigtable.py | 3 +- .../cassandra_online_store.py | 13 +- .../contrib/hbase_online_store/hbase.py | 3 +- .../contrib/mysql_online_store/mysql.py | 4 +- .../infra/online_stores/contrib/postgres.py | 3 +- .../feast/infra/online_stores/datastore.py | 13 +- .../feast/infra/online_stores/dynamodb.py | 3 +- sdk/python/feast/infra/online_stores/redis.py | 2 +- .../feast/infra/online_stores/snowflake.py | 9 +- .../feast/infra/online_stores/sqlite.py | 3 +- .../feast/infra/passthrough_provider.py | 2 +- .../feast/infra/registry/base_registry.py | 36 ++++ sdk/python/feast/infra/registry/snowflake.py | 9 +- .../infra/utils/snowflake/snowflake_utils.py | 6 +- sdk/python/feast/repo_config.py | 165 ++++++++---------- .../requirements/py3.10-ci-requirements.txt | 17 +- .../requirements/py3.10-requirements.txt | 16 +- .../requirements/py3.8-ci-requirements.txt | 16 +- .../requirements/py3.8-requirements.txt | 12 +- .../requirements/py3.9-ci-requirements.txt | 17 +- .../requirements/py3.9-requirements.txt | 8 +- sdk/python/tests/conftest.py | 5 +- .../feature_repos/repo_configuration.py | 16 +- .../universal/data_source_creator.py | 9 +- .../universal/data_sources/bigquery.py | 3 +- .../universal/data_sources/file.py | 9 +- .../universal/data_sources/redshift.py | 5 +- .../universal/data_sources/snowflake.py | 4 +- .../feature_repos/universal/feature_views.py | 3 +- .../universal/online_store_creator.py | 3 +- sdk/python/tests/unit/cli/test_cli_chdir.py | 9 +- .../offline_stores/test_offline_store.py | 5 +- .../infra/offline_stores/test_redshift.py | 1 + .../infra/scaffolding/test_repo_config.py | 12 +- sdk/python/tests/utils/e2e_test_validation.py | 2 +- setup.py | 4 +- 59 files changed, 333 insertions(+), 253 deletions(-) diff --git a/sdk/python/feast/importer.py b/sdk/python/feast/importer.py index d1d7d62901..938d29fe31 100644 --- a/sdk/python/feast/importer.py +++ b/sdk/python/feast/importer.py @@ -1,5 +1,4 @@ import importlib -from typing import Optional from feast.errors import ( FeastClassImportError, @@ -8,7 +7,7 @@ ) -def import_class(module_name: str, class_name: str, class_type: Optional[str] = None): +def import_class(module_name: str, class_name: str, class_type: str = ""): """ Dynamically loads and returns a class from a module. diff --git a/sdk/python/feast/infra/contrib/spark_kafka_processor.py b/sdk/python/feast/infra/contrib/spark_kafka_processor.py index bac1c28b06..fc4a34f17b 100644 --- a/sdk/python/feast/infra/contrib/spark_kafka_processor.py +++ b/sdk/python/feast/infra/contrib/spark_kafka_processor.py @@ -1,5 +1,5 @@ from types import MethodType -from typing import List, Optional +from typing import List, Optional, no_type_check import pandas as pd from pyspark.sql import DataFrame, SparkSession @@ -76,6 +76,8 @@ def ingest_stream_feature_view( online_store_query = self._write_stream_data(transformed_df, to) return online_store_query + # In the line 64 of __init__(), the "data_source" is assigned a stream_source (and has to be KafkaSource as in line 40). + @no_type_check def _ingest_stream_data(self) -> StreamTable: """Only supports json and avro formats currently.""" if self.format == "json": diff --git a/sdk/python/feast/infra/contrib/stream_processor.py b/sdk/python/feast/infra/contrib/stream_processor.py index df4e144f8c..c4620f4ca1 100644 --- a/sdk/python/feast/infra/contrib/stream_processor.py +++ b/sdk/python/feast/infra/contrib/stream_processor.py @@ -1,4 +1,4 @@ -from abc import ABC +from abc import ABC, abstractmethod from types import MethodType from typing import TYPE_CHECKING, Optional @@ -50,6 +50,7 @@ def __init__( self.sfv = sfv self.data_source = data_source + @abstractmethod def ingest_stream_feature_view(self, to: PushMode = PushMode.ONLINE) -> None: """ Ingests data from the stream source attached to the stream feature view; transforms the data @@ -57,12 +58,14 @@ def ingest_stream_feature_view(self, to: PushMode = PushMode.ONLINE) -> None: """ raise NotImplementedError + @abstractmethod def _ingest_stream_data(self) -> StreamTable: """ Ingests data into a StreamTable. """ raise NotImplementedError + @abstractmethod def _construct_transformation_plan(self, table: StreamTable) -> StreamTable: """ Applies transformations on top of StreamTable object. Since stream engines use lazy @@ -71,6 +74,7 @@ def _construct_transformation_plan(self, table: StreamTable) -> StreamTable: """ raise NotImplementedError + @abstractmethod def _write_stream_data(self, table: StreamTable, to: PushMode) -> None: """ Launches a job to persist stream data to the online store and/or offline store, depending diff --git a/sdk/python/feast/infra/feature_servers/aws_lambda/config.py b/sdk/python/feast/infra/feature_servers/aws_lambda/config.py index 31dd879af6..946831a18f 100644 --- a/sdk/python/feast/infra/feature_servers/aws_lambda/config.py +++ b/sdk/python/feast/infra/feature_servers/aws_lambda/config.py @@ -1,5 +1,6 @@ +from typing import Literal + from pydantic import StrictBool, StrictStr -from pydantic.typing import Literal from feast.infra.feature_servers.base_config import BaseFeatureServerConfig diff --git a/sdk/python/feast/infra/feature_servers/base_config.py b/sdk/python/feast/infra/feature_servers/base_config.py index 756dd79b43..1a348032e1 100644 --- a/sdk/python/feast/infra/feature_servers/base_config.py +++ b/sdk/python/feast/infra/feature_servers/base_config.py @@ -30,5 +30,5 @@ class BaseFeatureServerConfig(FeastConfigBaseModel): enabled: StrictBool = False """Whether the feature server should be launched.""" - feature_logging: Optional[FeatureLoggingConfig] + feature_logging: Optional[FeatureLoggingConfig] = None """ Feature logging configuration """ diff --git a/sdk/python/feast/infra/feature_servers/gcp_cloudrun/config.py b/sdk/python/feast/infra/feature_servers/gcp_cloudrun/config.py index 8d0c269cf5..ddcbde7924 100644 --- a/sdk/python/feast/infra/feature_servers/gcp_cloudrun/config.py +++ b/sdk/python/feast/infra/feature_servers/gcp_cloudrun/config.py @@ -1,5 +1,6 @@ +from typing import Literal + from pydantic import StrictBool -from pydantic.typing import Literal from feast.infra.feature_servers.base_config import BaseFeatureServerConfig diff --git a/sdk/python/feast/infra/feature_servers/local_process/config.py b/sdk/python/feast/infra/feature_servers/local_process/config.py index bb2e7bdf73..3d97912e4b 100644 --- a/sdk/python/feast/infra/feature_servers/local_process/config.py +++ b/sdk/python/feast/infra/feature_servers/local_process/config.py @@ -1,4 +1,4 @@ -from pydantic.typing import Literal +from typing import Literal from feast.infra.feature_servers.base_config import BaseFeatureServerConfig diff --git a/sdk/python/feast/infra/materialization/snowflake_engine.py b/sdk/python/feast/infra/materialization/snowflake_engine.py index 36c42cd390..62b23dfade 100644 --- a/sdk/python/feast/infra/materialization/snowflake_engine.py +++ b/sdk/python/feast/infra/materialization/snowflake_engine.py @@ -7,7 +7,7 @@ import click import pandas as pd from colorama import Fore, Style -from pydantic import Field, StrictStr +from pydantic import ConfigDict, Field, StrictStr from pytz import utc from tqdm import tqdm @@ -72,9 +72,7 @@ class SnowflakeMaterializationEngineConfig(FeastConfigBaseModel): schema_: Optional[str] = Field("PUBLIC", alias="schema") """ Snowflake schema name """ - - class Config: - allow_population_by_field_name = True + model_config = ConfigDict(populate_by_name=True) @dataclass diff --git a/sdk/python/feast/infra/offline_stores/bigquery.py b/sdk/python/feast/infra/offline_stores/bigquery.py index 0ee82a908e..68420c0664 100644 --- a/sdk/python/feast/infra/offline_stores/bigquery.py +++ b/sdk/python/feast/infra/offline_stores/bigquery.py @@ -10,6 +10,7 @@ Dict, Iterator, List, + Literal, Optional, Tuple, Union, @@ -19,8 +20,7 @@ import pandas as pd import pyarrow import pyarrow.parquet -from pydantic import ConstrainedStr, StrictStr, validator -from pydantic.typing import Literal +from pydantic import StrictStr, field_validator from tenacity import Retrying, retry_if_exception_type, stop_after_delay, wait_fixed from feast import flags_helper @@ -72,13 +72,6 @@ def get_http_client_info(): return http_client_info.ClientInfo(user_agent=get_user_agent()) -class BigQueryTableCreateDisposition(ConstrainedStr): - """Custom constraint for table_create_disposition. To understand more, see: - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.create_disposition""" - - values = {"CREATE_NEVER", "CREATE_IF_NEEDED"} - - class BigQueryOfflineStoreConfig(FeastConfigBaseModel): """Offline store config for GCP BigQuery""" @@ -102,10 +95,15 @@ class BigQueryOfflineStoreConfig(FeastConfigBaseModel): gcs_staging_location: Optional[str] = None """ (optional) GCS location used for offloading BigQuery results as parquet files.""" - table_create_disposition: Optional[BigQueryTableCreateDisposition] = None - """ (optional) Specifies whether the job is allowed to create new tables. The default value is CREATE_IF_NEEDED.""" + table_create_disposition: Literal[ + "CREATE_NEVER", "CREATE_IF_NEEDED" + ] = "CREATE_IF_NEEDED" + """ (optional) Specifies whether the job is allowed to create new tables. The default value is CREATE_IF_NEEDED. + Custom constraint for table_create_disposition. To understand more, see: + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.create_disposition + """ - @validator("billing_project_id") + @field_validator("billing_project_id") def project_id_exists(cls, v, values, **kwargs): if v and not values["project_id"]: raise ValueError( diff --git a/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena.py b/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena.py index 85a61106aa..ae510171db 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena.py +++ b/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena.py @@ -8,6 +8,7 @@ Dict, Iterator, List, + Literal, Optional, Tuple, Union, @@ -18,7 +19,6 @@ import pyarrow import pyarrow as pa from pydantic import StrictStr -from pydantic.typing import Literal from pytz import utc from feast import OnDemandFeatureView diff --git a/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/tests/data_source.py b/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/tests/data_source.py index f68e109d6c..6b2238830b 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/tests/data_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/tests/data_source.py @@ -48,10 +48,10 @@ def create_data_source( self, df: pd.DataFrame, destination_name: str, - suffix: Optional[str] = None, - timestamp_field="ts", + event_timestamp_column="ts", created_timestamp_column="created_ts", field_mapping: Optional[Dict[str, str]] = None, + timestamp_field: Optional[str] = "ts", ) -> DataSource: table_name = destination_name diff --git a/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/mssql.py b/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/mssql.py index 849d5cc797..67bae292c3 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/mssql.py +++ b/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/mssql.py @@ -3,7 +3,7 @@ import warnings from datetime import datetime from pathlib import Path -from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union +from typing import Any, Callable, Dict, List, Literal, Optional, Set, Tuple, Union import numpy as np import pandas @@ -11,7 +11,6 @@ import pyarrow as pa import sqlalchemy from pydantic.types import StrictStr -from pydantic.typing import Literal from sqlalchemy import create_engine from sqlalchemy.engine import Engine from sqlalchemy.orm import sessionmaker @@ -32,7 +31,7 @@ from feast.infra.provider import RetrievalJob from feast.infra.registry.base_registry import BaseRegistry from feast.on_demand_feature_view import OnDemandFeatureView -from feast.repo_config import FeastBaseModel, RepoConfig +from feast.repo_config import FeastConfigBaseModel, RepoConfig from feast.saved_dataset import SavedDatasetStorage from feast.type_map import pa_to_mssql_type from feast.usage import log_exceptions_and_usage @@ -43,7 +42,7 @@ EntitySchema = Dict[str, np.dtype] -class MsSqlServerOfflineStoreConfig(FeastBaseModel): +class MsSqlServerOfflineStoreConfig(FeastConfigBaseModel): """Offline store config for SQL Server""" type: Literal["mssql"] = "mssql" diff --git a/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/tests/data_source.py b/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/tests/data_source.py index 2604cf7c18..71ce56bdef 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/tests/data_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/tests/data_source.py @@ -64,10 +64,10 @@ def create_data_source( self, df: pd.DataFrame, destination_name: str, - timestamp_field="ts", + event_timestamp_column="ts", created_timestamp_column="created_ts", field_mapping: Optional[Dict[str, str]] = None, - **kwargs, + timestamp_field: Optional[str] = "ts", ) -> DataSource: # Make sure the field mapping is correct and convert the datetime datasources. if timestamp_field in df: diff --git a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py index c2e95a8648..9b300d7bf4 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py +++ b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py @@ -9,6 +9,7 @@ Iterator, KeysView, List, + Literal, Optional, Tuple, Union, @@ -19,7 +20,6 @@ import pyarrow as pa from jinja2 import BaseLoader, Environment from psycopg2 import sql -from pydantic.typing import Literal from pytz import utc from feast.data_source import DataSource diff --git a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/data_source.py b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/data_source.py index 224fcea30f..46d5c20e97 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/data_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/data_source.py @@ -82,10 +82,10 @@ def create_data_source( self, df: pd.DataFrame, destination_name: str, - suffix: Optional[str] = None, - timestamp_field="ts", + event_timestamp_column="ts", created_timestamp_column="created_ts", field_mapping: Optional[Dict[str, str]] = None, + timestamp_field: Optional[str] = "ts", ) -> DataSource: destination_name = self.get_prefixed_table_name(destination_name) diff --git a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/tests/data_source.py b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/tests/data_source.py index 7b4fda3b5f..b978521885 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/tests/data_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/tests/data_source.py @@ -9,6 +9,7 @@ from pyspark.sql import SparkSession from feast.data_source import DataSource +from feast.feature_logging import LoggingDestination from feast.infra.offline_stores.contrib.spark_offline_store.spark import ( SparkOfflineStoreConfig, ) @@ -68,10 +69,10 @@ def create_data_source( self, df: pd.DataFrame, destination_name: str, - timestamp_field="ts", + event_timestamp_column="ts", created_timestamp_column="created_ts", field_mapping: Optional[Dict[str, str]] = None, - **kwargs, + timestamp_field: Optional[str] = "ts", ) -> DataSource: if timestamp_field in df: df[timestamp_field] = pd.to_datetime(df[timestamp_field], utc=True) @@ -119,3 +120,7 @@ def create_saved_dataset_destination(self) -> SavedDatasetSparkStorage: def get_prefixed_table_name(self, suffix: str) -> str: return f"{self.project_name}_{suffix}" + + def create_logged_features_destination(self) -> LoggingDestination: + # No implementation of LoggingDestination for Spark offline store. + return None # type: ignore diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/test_config/manual_tests.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/test_config/manual_tests.py index 7d31aa90fb..a31d368ea1 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/test_config/manual_tests.py +++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/test_config/manual_tests.py @@ -8,6 +8,6 @@ FULL_REPO_CONFIGS = [ IntegrationTestRepoConfig( provider="local", - offline_store_creator=TrinoSourceCreator, + offline_store_creator=TrinoSourceCreator, # type: ignore ), ] diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/tests/data_source.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/tests/data_source.py index a5aa53df7a..fcc0c8d0fa 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/tests/data_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/tests/data_source.py @@ -81,10 +81,10 @@ def create_data_source( self, df: pd.DataFrame, destination_name: str, - suffix: Optional[str] = None, - timestamp_field="ts", + event_timestamp_column="ts", created_timestamp_column="created_ts", field_mapping: Optional[Dict[str, str]] = None, + timestamp_field: Optional[str] = "ts", ) -> DataSource: destination_name = self.get_prefixed_table_name(destination_name) self.client.execute_query( @@ -128,4 +128,6 @@ def create_offline_store_config(self) -> FeastConfigBaseModel: catalog="memory", dataset=self.project_name, connector={"type": "memory"}, + user="test", + auth=None, ) diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino.py index d4cfdb6632..cdc9435024 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino.py +++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino.py @@ -5,7 +5,7 @@ import numpy as np import pandas as pd import pyarrow -from pydantic import Field, FilePath, SecretStr, StrictBool, StrictStr, root_validator +from pydantic import Field, FilePath, SecretStr, StrictBool, StrictStr, model_validator from trino.auth import ( BasicAuthentication, CertificateAuthentication, @@ -98,14 +98,14 @@ class AuthConfig(FeastConfigBaseModel): type: Literal["kerberos", "basic", "jwt", "oauth2", "certificate"] config: Optional[Dict[StrictStr, Any]] - @root_validator - def config_only_nullable_for_oauth2(cls, values): - auth_type = values["type"] - auth_config = values["config"] + @model_validator(mode="after") + def config_only_nullable_for_oauth2(self): + auth_type = self.type + auth_config = self.config if auth_type != "oauth2" and auth_config is None: raise ValueError(f"config cannot be null for auth type '{auth_type}'") - return values + return self def to_trino_auth(self): auth_type = self.type diff --git a/sdk/python/feast/infra/offline_stores/file.py b/sdk/python/feast/infra/offline_stores/file.py index 5e4107545f..0e5064ba78 100644 --- a/sdk/python/feast/infra/offline_stores/file.py +++ b/sdk/python/feast/infra/offline_stores/file.py @@ -2,7 +2,7 @@ import uuid from datetime import datetime from pathlib import Path -from typing import Any, Callable, List, Optional, Tuple, Union +from typing import Any, Callable, List, Literal, Optional, Tuple, Union import dask.dataframe as dd import pandas as pd @@ -10,7 +10,6 @@ import pyarrow.dataset import pyarrow.parquet import pytz -from pydantic.typing import Literal from feast.data_source import DataSource from feast.errors import ( diff --git a/sdk/python/feast/infra/offline_stores/redshift.py b/sdk/python/feast/infra/offline_stores/redshift.py index 6034bf5ac7..2565a569ad 100644 --- a/sdk/python/feast/infra/offline_stores/redshift.py +++ b/sdk/python/feast/infra/offline_stores/redshift.py @@ -9,6 +9,7 @@ Dict, Iterator, List, + Literal, Optional, Tuple, Union, @@ -19,8 +20,7 @@ import pyarrow import pyarrow as pa from dateutil import parser -from pydantic import StrictStr, root_validator -from pydantic.typing import Literal +from pydantic import StrictStr, model_validator from pytz import utc from feast import OnDemandFeatureView, RedshiftSource @@ -72,16 +72,16 @@ class RedshiftOfflineStoreConfig(FeastConfigBaseModel): iam_role: StrictStr """ IAM Role for Redshift, granting it access to S3 """ - @root_validator - def require_cluster_and_user_or_workgroup(cls, values): + @model_validator(mode="after") + def require_cluster_and_user_or_workgroup(self): """ Provisioned Redshift clusters: Require cluster_id and user, ignore workgroup Serverless Redshift: Require workgroup, ignore cluster_id and user """ cluster_id, user, workgroup = ( - values.get("cluster_id"), - values.get("user"), - values.get("workgroup"), + self.cluster_id, + self.user, + self.workgroup, ) if not (cluster_id and user) and not workgroup: raise ValueError( @@ -90,7 +90,7 @@ def require_cluster_and_user_or_workgroup(cls, values): elif cluster_id and workgroup: raise ValueError("cannot specify both cluster_id and workgroup") - return values + return self class RedshiftOfflineStore(OfflineStore): diff --git a/sdk/python/feast/infra/offline_stores/snowflake.py b/sdk/python/feast/infra/offline_stores/snowflake.py index dd13ffc96c..66e7e78651 100644 --- a/sdk/python/feast/infra/offline_stores/snowflake.py +++ b/sdk/python/feast/infra/offline_stores/snowflake.py @@ -14,6 +14,7 @@ Dict, Iterator, List, + Literal, Optional, Tuple, Union, @@ -23,8 +24,7 @@ import numpy as np import pandas as pd import pyarrow -from pydantic import Field, StrictStr -from pydantic.typing import Literal +from pydantic import ConfigDict, Field, StrictStr from pytz import utc from feast import OnDemandFeatureView @@ -119,9 +119,7 @@ class SnowflakeOfflineStoreConfig(FeastConfigBaseModel): convert_timestamp_columns: Optional[bool] = None """ Convert timestamp columns on export to a Parquet-supported format """ - - class Config: - allow_population_by_field_name = True + model_config = ConfigDict(populate_by_name=True) class SnowflakeOfflineStore(OfflineStore): diff --git a/sdk/python/feast/infra/offline_stores/snowflake_source.py b/sdk/python/feast/infra/offline_stores/snowflake_source.py index e29197c68d..9a2c6e09bc 100644 --- a/sdk/python/feast/infra/offline_stores/snowflake_source.py +++ b/sdk/python/feast/infra/offline_stores/snowflake_source.py @@ -1,5 +1,5 @@ import warnings -from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple +from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, no_type_check from typeguard import typechecked @@ -202,6 +202,7 @@ def get_table_query_string(self) -> str: def source_datatype_to_feast_value_type() -> Callable[[str], ValueType]: return type_map.snowflake_type_to_feast_value_type + @no_type_check def get_table_column_names_and_types( self, config: RepoConfig ) -> Iterable[Tuple[str, str]]: @@ -292,7 +293,8 @@ def get_table_column_names_and_types( ) return [ - (column["column_name"], column["snowflake_type"]) for column in metadata + (str(column["column_name"]), str(column["snowflake_type"])) + for column in metadata ] diff --git a/sdk/python/feast/infra/online_stores/bigtable.py b/sdk/python/feast/infra/online_stores/bigtable.py index 30561d0840..3a83d23ced 100644 --- a/sdk/python/feast/infra/online_stores/bigtable.py +++ b/sdk/python/feast/infra/online_stores/bigtable.py @@ -2,13 +2,12 @@ import logging from concurrent import futures from datetime import datetime -from typing import Any, Callable, Dict, List, Optional, Sequence, Set, Tuple +from typing import Any, Callable, Dict, List, Literal, Optional, Sequence, Set, Tuple import google from google.cloud import bigtable from google.cloud.bigtable import row_filters from pydantic import StrictStr -from pydantic.typing import Literal from feast import Entity, FeatureView, utils from feast.feature_view import DUMMY_ENTITY_NAME diff --git a/sdk/python/feast/infra/online_stores/contrib/cassandra_online_store/cassandra_online_store.py b/sdk/python/feast/infra/online_stores/contrib/cassandra_online_store/cassandra_online_store.py index 34a8cab036..c672e18db0 100644 --- a/sdk/python/feast/infra/online_stores/contrib/cassandra_online_store/cassandra_online_store.py +++ b/sdk/python/feast/infra/online_stores/contrib/cassandra_online_store/cassandra_online_store.py @@ -20,7 +20,17 @@ import logging from datetime import datetime -from typing import Any, Callable, Dict, Iterable, List, Optional, Sequence, Tuple +from typing import ( + Any, + Callable, + Dict, + Iterable, + List, + Literal, + Optional, + Sequence, + Tuple, +) from cassandra.auth import PlainTextAuthProvider from cassandra.cluster import ( @@ -34,7 +44,6 @@ from cassandra.policies import DCAwareRoundRobinPolicy, TokenAwarePolicy from cassandra.query import PreparedStatement from pydantic import StrictFloat, StrictInt, StrictStr -from pydantic.typing import Literal from feast import Entity, FeatureView, RepoConfig from feast.infra.key_encoding_utils import serialize_entity_key diff --git a/sdk/python/feast/infra/online_stores/contrib/hbase_online_store/hbase.py b/sdk/python/feast/infra/online_stores/contrib/hbase_online_store/hbase.py index 1da9de89a8..4b2d8ae39c 100644 --- a/sdk/python/feast/infra/online_stores/contrib/hbase_online_store/hbase.py +++ b/sdk/python/feast/infra/online_stores/contrib/hbase_online_store/hbase.py @@ -1,12 +1,11 @@ import calendar import struct from datetime import datetime -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple +from typing import Any, Callable, Dict, List, Literal, Optional, Sequence, Tuple from happybase import ConnectionPool from happybase.connection import DEFAULT_PROTOCOL, DEFAULT_TRANSPORT from pydantic import StrictStr -from pydantic.typing import Literal from feast import Entity from feast.feature_view import FeatureView diff --git a/sdk/python/feast/infra/online_stores/contrib/mysql_online_store/mysql.py b/sdk/python/feast/infra/online_stores/contrib/mysql_online_store/mysql.py index c09cb126f0..cf07d5fef1 100644 --- a/sdk/python/feast/infra/online_stores/contrib/mysql_online_store/mysql.py +++ b/sdk/python/feast/infra/online_stores/contrib/mysql_online_store/mysql.py @@ -1,7 +1,7 @@ from __future__ import absolute_import from datetime import datetime -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple +from typing import Any, Callable, Dict, List, Literal, Optional, Sequence, Tuple import pymysql import pytz @@ -23,7 +23,7 @@ class MySQLOnlineStoreConfig(FeastConfigBaseModel): NOTE: The class *must* end with the `OnlineStoreConfig` suffix. """ - type = "mysql" + type: Literal["mysql"] = "mysql" host: Optional[StrictStr] = None user: Optional[StrictStr] = None diff --git a/sdk/python/feast/infra/online_stores/contrib/postgres.py b/sdk/python/feast/infra/online_stores/contrib/postgres.py index 49f87ddb0a..308528aaec 100644 --- a/sdk/python/feast/infra/online_stores/contrib/postgres.py +++ b/sdk/python/feast/infra/online_stores/contrib/postgres.py @@ -2,14 +2,13 @@ import logging from collections import defaultdict from datetime import datetime -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple +from typing import Any, Callable, Dict, List, Literal, Optional, Sequence, Tuple import psycopg2 import pytz from psycopg2 import sql from psycopg2.extras import execute_values from psycopg2.pool import SimpleConnectionPool -from pydantic.schema import Literal from feast import Entity from feast.feature_view import FeatureView diff --git a/sdk/python/feast/infra/online_stores/datastore.py b/sdk/python/feast/infra/online_stores/datastore.py index ed4e7612ba..ae96e16c64 100644 --- a/sdk/python/feast/infra/online_stores/datastore.py +++ b/sdk/python/feast/infra/online_stores/datastore.py @@ -17,10 +17,19 @@ from multiprocessing.pool import ThreadPool from queue import Empty, Queue from threading import Lock, Thread -from typing import Any, Callable, Dict, Iterator, List, Optional, Sequence, Tuple +from typing import ( + Any, + Callable, + Dict, + Iterator, + List, + Literal, + Optional, + Sequence, + Tuple, +) from pydantic import PositiveInt, StrictStr -from pydantic.typing import Literal from feast import Entity, utils from feast.errors import FeastProviderLoginError diff --git a/sdk/python/feast/infra/online_stores/dynamodb.py b/sdk/python/feast/infra/online_stores/dynamodb.py index a1eef16f40..a049189de7 100644 --- a/sdk/python/feast/infra/online_stores/dynamodb.py +++ b/sdk/python/feast/infra/online_stores/dynamodb.py @@ -14,10 +14,9 @@ import itertools import logging from datetime import datetime -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple +from typing import Any, Callable, Dict, List, Literal, Optional, Sequence, Tuple, Union from pydantic import StrictBool, StrictStr -from pydantic.typing import Literal, Union from feast import Entity, FeatureView, utils from feast.infra.infra_object import DYNAMODB_INFRA_OBJECT_CLASS_TYPE, InfraObject diff --git a/sdk/python/feast/infra/online_stores/redis.py b/sdk/python/feast/infra/online_stores/redis.py index 9561705aaa..ad84e8db7c 100644 --- a/sdk/python/feast/infra/online_stores/redis.py +++ b/sdk/python/feast/infra/online_stores/redis.py @@ -21,6 +21,7 @@ Callable, Dict, List, + Literal, Optional, Sequence, Tuple, @@ -30,7 +31,6 @@ import pytz from google.protobuf.timestamp_pb2 import Timestamp from pydantic import StrictStr -from pydantic.typing import Literal from feast import Entity, FeatureView, RepoConfig, utils from feast.infra.online_stores.helpers import _mmh3, _redis_key, _redis_key_prefix diff --git a/sdk/python/feast/infra/online_stores/snowflake.py b/sdk/python/feast/infra/online_stores/snowflake.py index c1a03a2862..f5600249c9 100644 --- a/sdk/python/feast/infra/online_stores/snowflake.py +++ b/sdk/python/feast/infra/online_stores/snowflake.py @@ -2,11 +2,10 @@ import os from binascii import hexlify from datetime import datetime -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple +from typing import Any, Callable, Dict, List, Literal, Optional, Sequence, Tuple import pandas as pd -from pydantic import Field, StrictStr -from pydantic.schema import Literal +from pydantic import ConfigDict, Field, StrictStr from feast.entity import Entity from feast.feature_view import FeatureView @@ -57,9 +56,7 @@ class SnowflakeOnlineStoreConfig(FeastConfigBaseModel): schema_: Optional[str] = Field("PUBLIC", alias="schema") """ Snowflake schema name """ - - class Config: - allow_population_by_field_name = True + model_config = ConfigDict(populate_by_name=True) class SnowflakeOnlineStore(OnlineStore): diff --git a/sdk/python/feast/infra/online_stores/sqlite.py b/sdk/python/feast/infra/online_stores/sqlite.py index 6949b2bf24..4a6aa28889 100644 --- a/sdk/python/feast/infra/online_stores/sqlite.py +++ b/sdk/python/feast/infra/online_stores/sqlite.py @@ -16,10 +16,9 @@ import sqlite3 from datetime import datetime from pathlib import Path -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple +from typing import Any, Callable, Dict, List, Literal, Optional, Sequence, Tuple from pydantic import StrictStr -from pydantic.schema import Literal from feast import Entity from feast.feature_view import FeatureView diff --git a/sdk/python/feast/infra/passthrough_provider.py b/sdk/python/feast/infra/passthrough_provider.py index 811abe106c..aca18f4856 100644 --- a/sdk/python/feast/infra/passthrough_provider.py +++ b/sdk/python/feast/infra/passthrough_provider.py @@ -70,7 +70,7 @@ def batch_engine(self) -> BatchMaterializationEngine: if self._batch_engine: return self._batch_engine else: - engine_config = self.repo_config._batch_engine_config + engine_config = self.repo_config.batch_engine_config config_is_dict = False if isinstance(engine_config, str): engine_config_type = engine_config diff --git a/sdk/python/feast/infra/registry/base_registry.py b/sdk/python/feast/infra/registry/base_registry.py index f89b079478..f23a820d23 100644 --- a/sdk/python/feast/infra/registry/base_registry.py +++ b/sdk/python/feast/infra/registry/base_registry.py @@ -51,6 +51,7 @@ def apply_entity(self, entity: Entity, project: str, commit: bool = True): project: Feast project that this entity belongs to commit: Whether the change should be persisted immediately """ + raise NotImplementedError @abstractmethod def delete_entity(self, name: str, project: str, commit: bool = True): @@ -62,6 +63,7 @@ def delete_entity(self, name: str, project: str, commit: bool = True): project: Feast project that this entity belongs to commit: Whether the change should be persisted immediately """ + raise NotImplementedError @abstractmethod def get_entity(self, name: str, project: str, allow_cache: bool = False) -> Entity: @@ -77,6 +79,7 @@ def get_entity(self, name: str, project: str, allow_cache: bool = False) -> Enti Returns either the specified entity, or raises an exception if none is found """ + raise NotImplementedError @abstractmethod def list_entities(self, project: str, allow_cache: bool = False) -> List[Entity]: @@ -90,6 +93,7 @@ def list_entities(self, project: str, allow_cache: bool = False) -> List[Entity] Returns: List of entities """ + raise NotImplementedError # Data source operations @abstractmethod @@ -104,6 +108,7 @@ def apply_data_source( project: Feast project that this data source belongs to commit: Whether to immediately commit to the registry """ + raise NotImplementedError @abstractmethod def delete_data_source(self, name: str, project: str, commit: bool = True): @@ -115,6 +120,7 @@ def delete_data_source(self, name: str, project: str, commit: bool = True): project: Feast project that this data source belongs to commit: Whether the change should be persisted immediately """ + raise NotImplementedError @abstractmethod def get_data_source( @@ -131,6 +137,7 @@ def get_data_source( Returns: Returns either the specified data source, or raises an exception if none is found """ + raise NotImplementedError @abstractmethod def list_data_sources( @@ -146,6 +153,7 @@ def list_data_sources( Returns: List of data sources """ + raise NotImplementedError # Feature service operations @abstractmethod @@ -159,6 +167,7 @@ def apply_feature_service( feature_service: A feature service that will be registered project: Feast project that this entity belongs to """ + raise NotImplementedError @abstractmethod def delete_feature_service(self, name: str, project: str, commit: bool = True): @@ -170,6 +179,7 @@ def delete_feature_service(self, name: str, project: str, commit: bool = True): project: Feast project that this feature service belongs to commit: Whether the change should be persisted immediately """ + raise NotImplementedError @abstractmethod def get_feature_service( @@ -187,6 +197,7 @@ def get_feature_service( Returns either the specified feature service, or raises an exception if none is found """ + raise NotImplementedError @abstractmethod def list_feature_services( @@ -202,6 +213,7 @@ def list_feature_services( Returns: List of feature services """ + raise NotImplementedError # Feature view operations @abstractmethod @@ -216,6 +228,7 @@ def apply_feature_view( project: Feast project that this feature view belongs to commit: Whether the change should be persisted immediately """ + raise NotImplementedError @abstractmethod def delete_feature_view(self, name: str, project: str, commit: bool = True): @@ -227,6 +240,7 @@ def delete_feature_view(self, name: str, project: str, commit: bool = True): project: Feast project that this feature view belongs to commit: Whether the change should be persisted immediately """ + raise NotImplementedError # stream feature view operations @abstractmethod @@ -245,6 +259,7 @@ def get_stream_feature_view( Returns either the specified feature view, or raises an exception if none is found """ + raise NotImplementedError @abstractmethod def list_stream_feature_views( @@ -260,6 +275,7 @@ def list_stream_feature_views( Returns: List of stream feature views """ + raise NotImplementedError # on demand feature view operations @abstractmethod @@ -278,6 +294,7 @@ def get_on_demand_feature_view( Returns either the specified on demand feature view, or raises an exception if none is found """ + raise NotImplementedError @abstractmethod def list_on_demand_feature_views( @@ -293,6 +310,7 @@ def list_on_demand_feature_views( Returns: List of on demand feature views """ + raise NotImplementedError # regular feature view operations @abstractmethod @@ -311,6 +329,7 @@ def get_feature_view( Returns either the specified feature view, or raises an exception if none is found """ + raise NotImplementedError @abstractmethod def list_feature_views( @@ -326,6 +345,7 @@ def list_feature_views( Returns: List of feature views """ + raise NotImplementedError # request feature view operations @abstractmethod @@ -344,6 +364,7 @@ def get_request_feature_view( Returns either the specified feature view, or raises an exception if none is found """ + raise NotImplementedError @abstractmethod def list_request_feature_views( @@ -359,6 +380,7 @@ def list_request_feature_views( Returns: List of request feature views """ + raise NotImplementedError @abstractmethod def apply_materialization( @@ -379,6 +401,7 @@ def apply_materialization( end_date (datetime): End date of the materialization interval to track commit: Whether the change should be persisted immediately """ + raise NotImplementedError # Saved dataset operations @abstractmethod @@ -396,6 +419,7 @@ def apply_saved_dataset( project: Feast project that this dataset belongs to commit: Whether the change should be persisted immediately """ + raise NotImplementedError @abstractmethod def get_saved_dataset( @@ -413,6 +437,7 @@ def get_saved_dataset( Returns either the specified SavedDataset, or raises an exception if none is found """ + raise NotImplementedError def delete_saved_dataset(self, name: str, project: str, allow_cache: bool = False): """ @@ -427,6 +452,7 @@ def delete_saved_dataset(self, name: str, project: str, allow_cache: bool = Fals Returns either the specified SavedDataset, or raises an exception if none is found """ + raise NotImplementedError @abstractmethod def list_saved_datasets( @@ -442,6 +468,7 @@ def list_saved_datasets( Returns: Returns the list of SavedDatasets """ + raise NotImplementedError # Validation reference operations @abstractmethod @@ -459,6 +486,7 @@ def apply_validation_reference( project: Feast project that this dataset belongs to commit: Whether the change should be persisted immediately """ + raise NotImplementedError @abstractmethod def delete_validation_reference(self, name: str, project: str, commit: bool = True): @@ -470,6 +498,7 @@ def delete_validation_reference(self, name: str, project: str, commit: bool = Tr project: Feast project that this object belongs to commit: Whether the change should be persisted immediately """ + raise NotImplementedError @abstractmethod def get_validation_reference( @@ -487,6 +516,7 @@ def get_validation_reference( Returns either the specified ValidationReference, or raises an exception if none is found """ + raise NotImplementedError # TODO: Needs to be implemented. def list_validation_references( @@ -519,6 +549,7 @@ def list_project_metadata( Returns: List of project metadata """ + raise NotImplementedError @abstractmethod def update_infra(self, infra: Infra, project: str, commit: bool = True): @@ -530,6 +561,7 @@ def update_infra(self, infra: Infra, project: str, commit: bool = True): project: Feast project that the Infra object refers to commit: Whether the change should be persisted immediately """ + raise NotImplementedError @abstractmethod def get_infra(self, project: str, allow_cache: bool = False) -> Infra: @@ -543,6 +575,7 @@ def get_infra(self, project: str, allow_cache: bool = False) -> Infra: Returns: The stored Infra object. """ + raise NotImplementedError @abstractmethod def apply_user_metadata( @@ -567,14 +600,17 @@ def proto(self) -> RegistryProto: Returns: The registry proto object. """ + raise NotImplementedError @abstractmethod def commit(self): """Commits the state of the registry cache to the remote registry store.""" + raise NotImplementedError @abstractmethod def refresh(self, project: Optional[str] = None): """Refreshes the state of the registry cache by fetching the registry state from the remote registry store.""" + raise NotImplementedError @staticmethod def _message_to_sorted_dict(message: Message) -> Dict[str, Any]: diff --git a/sdk/python/feast/infra/registry/snowflake.py b/sdk/python/feast/infra/registry/snowflake.py index c1ebf13d6b..cdf79c78b5 100644 --- a/sdk/python/feast/infra/registry/snowflake.py +++ b/sdk/python/feast/infra/registry/snowflake.py @@ -5,10 +5,9 @@ from datetime import datetime, timedelta from enum import Enum from threading import Lock -from typing import Any, Callable, List, Optional, Set, Union +from typing import Any, Callable, List, Literal, Optional, Set, Union -from pydantic import Field, StrictStr -from pydantic.schema import Literal +from pydantic import ConfigDict, Field, StrictStr import feast from feast import usage @@ -103,9 +102,7 @@ class SnowflakeRegistryConfig(RegistryConfig): schema_: Optional[str] = Field("PUBLIC", alias="schema") """ Snowflake schema name """ - - class Config: - allow_population_by_field_name = True + model_config = ConfigDict(populate_by_name=True) class SnowflakeRegistry(BaseRegistry): diff --git a/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py b/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py index 8eb5177ac2..8548e4dbd8 100644 --- a/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py +++ b/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py @@ -43,11 +43,7 @@ class GetSnowflakeConnection: - def __init__( - self, - config: str, - autocommit=True, - ): + def __init__(self, config: Any, autocommit=True): self.config = config self.autocommit = autocommit diff --git a/sdk/python/feast/repo_config.py b/sdk/python/feast/repo_config.py index 3461ae058b..c69bb4d1e7 100644 --- a/sdk/python/feast/repo_config.py +++ b/sdk/python/feast/repo_config.py @@ -2,20 +2,19 @@ import os import warnings from pathlib import Path -from typing import Any +from typing import Any, Dict, Optional import yaml from pydantic import ( BaseModel, + ConfigDict, Field, StrictInt, StrictStr, ValidationError, - root_validator, - validator, + field_validator, + model_validator, ) -from pydantic.error_wrappers import ErrorWrapper -from pydantic.typing import Dict, Optional from feast.errors import ( FeastFeatureServerTypeInvalidError, @@ -93,17 +92,13 @@ class FeastBaseModel(BaseModel): """Feast Pydantic Configuration Class""" - class Config: - arbitrary_types_allowed = True - extra = "allow" + model_config = ConfigDict(arbitrary_types_allowed=True, extra="allow") class FeastConfigBaseModel(BaseModel): """Feast Pydantic Configuration Class""" - class Config: - arbitrary_types_allowed = True - extra = "forbid" + model_config = ConfigDict(arbitrary_types_allowed=True, extra="forbid") class RegistryConfig(FeastBaseModel): @@ -112,7 +107,7 @@ class RegistryConfig(FeastBaseModel): registry_type: StrictStr = "file" """ str: Provider name or a class name that implements Registry.""" - registry_store_type: Optional[StrictStr] + registry_store_type: Optional[StrictStr] = None """ str: Provider name or a class name that implements RegistryStore. """ path: StrictStr = "" @@ -126,7 +121,7 @@ class RegistryConfig(FeastBaseModel): set to infinity by setting TTL to 0 seconds, which means the cache will only be loaded once and will never expire. Users can manually refresh the cache by calling feature_store.refresh_registry() """ - s3_additional_kwargs: Optional[Dict[str, str]] + s3_additional_kwargs: Optional[Dict[str, str]] = None """ Dict[str, str]: Extra arguments to pass to boto3 when writing the registry file to S3. """ @@ -142,7 +137,7 @@ class RepoConfig(FeastBaseModel): provider: StrictStr """ str: local or gcp or aws """ - _registry_config: Any = Field(alias="registry", default="data/registry.db") + registry_config: Any = Field(alias="registry", default="data/registry.db") """ Configures the registry. Can be: 1. str: a path to a file based registry (a local path, or remote object storage path, e.g. a GCS URI) @@ -150,19 +145,19 @@ class RepoConfig(FeastBaseModel): 3. SnowflakeRegistryConfig: Using a Snowflake table to store the registry """ - _online_config: Any = Field(alias="online_store") + online_config: Any = Field(None, alias="online_store") """ OnlineStoreConfig: Online store configuration (optional depending on provider) """ - _offline_config: Any = Field(alias="offline_store") + offline_config: Any = Field(None, alias="offline_store") """ OfflineStoreConfig: Offline store configuration (optional depending on provider) """ - _batch_engine_config: Any = Field(alias="batch_engine") + batch_engine_config: Any = Field(None, alias="batch_engine") """ BatchMaterializationEngine: Batch materialization configuration (optional depending on provider)""" - feature_server: Optional[Any] + feature_server: Optional[Any] = None """ FeatureServerConfig: Feature server configuration (optional depending on provider) """ - flags: Any + flags: Any = None """ Flags (deprecated field): Feature flags for experimental features """ repo_path: Optional[Path] = None @@ -187,42 +182,42 @@ def __init__(self, **data: Any): self._registry = None if "registry" not in data: raise FeastRegistryNotSetError() - self._registry_config = data["registry"] + self.registry_config = data["registry"] self._offline_store = None if "offline_store" in data: - self._offline_config = data["offline_store"] + self.offline_config = data["offline_store"] else: if data["provider"] == "local": - self._offline_config = "file" + self.offline_config = "file" elif data["provider"] == "gcp": - self._offline_config = "bigquery" + self.offline_config = "bigquery" elif data["provider"] == "aws": - self._offline_config = "redshift" + self.offline_config = "redshift" elif data["provider"] == "azure": - self._offline_config = "mssql" + self.offline_config = "mssql" self._online_store = None if "online_store" in data: - self._online_config = data["online_store"] + self.online_config = data["online_store"] else: if data["provider"] == "local": - self._online_config = "sqlite" + self.online_config = "sqlite" elif data["provider"] == "gcp": - self._online_config = "datastore" + self.online_config = "datastore" elif data["provider"] == "aws": - self._online_config = "dynamodb" + self.online_config = "dynamodb" elif data["provider"] == "rockset": - self._online_config = "rockset" + self.online_config = "rockset" self._batch_engine = None if "batch_engine" in data: - self._batch_engine_config = data["batch_engine"] + self.batch_engine_config = data["batch_engine"] elif "batch_engine_config" in data: - self._batch_engine_config = data["batch_engine_config"] + self.batch_engine_config = data["batch_engine_config"] else: # Defaults to using local in-process materialization engine. - self._batch_engine_config = "local" + self.batch_engine_config = "local" if isinstance(self.feature_server, Dict): self.feature_server = get_feature_server_config_from_type( @@ -242,71 +237,71 @@ def __init__(self, **data: Any): @property def registry(self): if not self._registry: - if isinstance(self._registry_config, Dict): - if "registry_type" in self._registry_config: + if isinstance(self.registry_config, Dict): + if "registry_type" in self.registry_config: self._registry = get_registry_config_from_type( - self._registry_config["registry_type"] - )(**self._registry_config) + self.registry_config["registry_type"] + )(**self.registry_config) else: # This may be a custom registry store, which does not need a 'registry_type' - self._registry = RegistryConfig(**self._registry_config) - elif isinstance(self._registry_config, str): + self._registry = RegistryConfig(**self.registry_config) + elif isinstance(self.registry_config, str): # User passed in just a path to file registry self._registry = get_registry_config_from_type("file")( - path=self._registry_config + path=self.registry_config ) - elif self._registry_config: - self._registry = self._registry_config + elif self.registry_config: + self._registry = self.registry_config return self._registry @property def offline_store(self): if not self._offline_store: - if isinstance(self._offline_config, Dict): + if isinstance(self.offline_config, Dict): self._offline_store = get_offline_config_from_type( - self._offline_config["type"] - )(**self._offline_config) - elif isinstance(self._offline_config, str): + self.offline_config["type"] + )(**self.offline_config) + elif isinstance(self.offline_config, str): self._offline_store = get_offline_config_from_type( - self._offline_config + self.offline_config )() - elif self._offline_config: - self._offline_store = self._offline_config + elif self.offline_config: + self._offline_store = self.offline_config return self._offline_store @property def online_store(self): if not self._online_store: - if isinstance(self._online_config, Dict): + if isinstance(self.online_config, Dict): self._online_store = get_online_config_from_type( - self._online_config["type"] - )(**self._online_config) - elif isinstance(self._online_config, str): - self._online_store = get_online_config_from_type(self._online_config)() - elif self._online_config: - self._online_store = self._online_config + self.online_config["type"] + )(**self.online_config) + elif isinstance(self.online_config, str): + self._online_store = get_online_config_from_type(self.online_config)() + elif self.online_config: + self._online_store = self.online_config return self._online_store @property def batch_engine(self): if not self._batch_engine: - if isinstance(self._batch_engine_config, Dict): + if isinstance(self.batch_engine_config, Dict): self._batch_engine = get_batch_engine_config_from_type( - self._batch_engine_config["type"] - )(**self._batch_engine_config) - elif isinstance(self._batch_engine_config, str): + self.batch_engine_config["type"] + )(**self.batch_engine_config) + elif isinstance(self.batch_engine_config, str): self._batch_engine = get_batch_engine_config_from_type( - self._batch_engine_config + self.batch_engine_config )() - elif self._batch_engine_config: + elif self.batch_engine_config: self._batch_engine = self._batch_engine return self._batch_engine - @root_validator(pre=True) + @model_validator(mode="before") @log_exceptions - def _validate_online_store_config(cls, values): + def _validate_online_store_config(cls, values: Any) -> Any: # This method will validate whether the online store configurations are set correctly. This explicit validation # is necessary because Pydantic Unions throw very verbose and cryptic exceptions. We also use this method to # impute the default online store type based on the selected provider. For the time being this method should be @@ -347,14 +342,12 @@ def _validate_online_store_config(cls, values): online_config_class = get_online_config_from_type(online_store_type) online_config_class(**values["online_store"]) except ValidationError as e: - raise ValidationError( - [ErrorWrapper(e, loc="online_store")], - model=RepoConfig, - ) + raise e return values - @root_validator(pre=True) - def _validate_offline_store_config(cls, values): + @model_validator(mode="before") + @classmethod + def _validate_offline_store_config(cls, values: Any) -> Any: # Set empty offline_store config if it isn't set explicitly if "offline_store" not in values: values["offline_store"] = dict() @@ -385,15 +378,13 @@ def _validate_offline_store_config(cls, values): offline_config_class = get_offline_config_from_type(offline_store_type) offline_config_class(**values["offline_store"]) except ValidationError as e: - raise ValidationError( - [ErrorWrapper(e, loc="offline_store")], - model=RepoConfig, - ) + raise e return values - @root_validator(pre=True) - def _validate_feature_server_config(cls, values): + @model_validator(mode="before") + @classmethod + def _validate_feature_server_config(cls, values: Any) -> Any: # Having no feature server is the default. if "feature_server" not in values: return values @@ -420,15 +411,13 @@ def _validate_feature_server_config(cls, values): ) feature_server_config_class(**values["feature_server"]) except ValidationError as e: - raise ValidationError( - [ErrorWrapper(e, loc="feature_server")], - model=RepoConfig, - ) + raise e return values - @validator("project") - def _validate_project_name(cls, v): + @field_validator("project") + @classmethod + def _validate_project_name(cls, v: str) -> str: from feast.repo_operations import is_valid_name if not is_valid_name(v): @@ -438,10 +427,11 @@ def _validate_project_name(cls, v): ) return v - @validator("flags") - def _validate_flags(cls, v): - if not isinstance(v, Dict): - return + @field_validator("flags") + @classmethod + def _validate_flags(cls, v: Optional[dict]) -> Optional[dict]: + if not isinstance(v, dict): + return v _logger.warning( "Flags are no longer necessary in Feast. Experimental features will log warnings instead." @@ -463,8 +453,7 @@ def write_to_path(self, repo_path: Path): sort_keys=False, ) - class Config: - allow_population_by_field_name = True + model_config = ConfigDict(populate_by_name=True) class FeastConfigError(Exception): diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index ffb4662eb1..34d0b0c284 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -4,11 +4,12 @@ # # pip-compile --extra=ci --output-file=sdk/python/requirements/py3.10-ci-requirements.txt # - alabaster==0.7.16 # via sphinx -altair==4.2.0 +altair==4.2.2 # via great-expectations +annotated-types==0.6.0 + # via pydantic anyio==4.2.0 # via # httpx @@ -225,6 +226,10 @@ google-auth==2.27.0 google-auth-httplib2==0.2.0 # via google-api-python-client google-cloud-bigquery[pandas]==3.12.0 + # via + # feast (setup.py) + # google-cloud-bigquery +google-cloud-bigquery-storage==2.24.0 # via feast (setup.py) google-cloud-bigquery-storage==2.24.0 # via feast (setup.py) @@ -259,7 +264,7 @@ googleapis-common-protos[grpc]==1.62.0 # google-api-core # grpc-google-iam-v1 # grpcio-status -great-expectations==0.15.50 +great-expectations==0.18.8 # via feast (setup.py) greenlet==3.0.3 # via sqlalchemy @@ -326,7 +331,6 @@ importlib-metadata==6.11.0 # via # dask # feast (setup.py) - # great-expectations importlib-resources==6.1.1 # via feast (setup.py) iniconfig==2.0.0 @@ -630,11 +634,13 @@ pycodestyle==2.10.0 # via flake8 pycparser==2.21 # via cffi -pydantic==1.10.14 +pydantic==2.6.1 # via # fastapi # feast (setup.py) # great-expectations +pydantic-core==2.16.2 + # via pydantic pyflakes==3.0.1 # via flake8 pygments==2.17.2 @@ -927,6 +933,7 @@ typing-extensions==4.9.0 # great-expectations # mypy # pydantic + # pydantic-core # snowflake-connector-python # sqlalchemy2-stubs # uvicorn diff --git a/sdk/python/requirements/py3.10-requirements.txt b/sdk/python/requirements/py3.10-requirements.txt index d38a287d72..ba474f6120 100644 --- a/sdk/python/requirements/py3.10-requirements.txt +++ b/sdk/python/requirements/py3.10-requirements.txt @@ -4,7 +4,8 @@ # # pip-compile --output-file=sdk/python/requirements/py3.10-requirements.txt # - +annotated-types==0.6.0 + # via pydantic anyio==4.2.0 # via # httpx @@ -140,10 +141,12 @@ protobuf==4.23.3 # proto-plus pyarrow==15.0.0 # via feast (setup.py) -pydantic==1.10.14 +pydantic==2.6.1 # via # fastapi # feast (setup.py) +pydantic-core==2.16.2 + # via pydantic pygments==2.17.2 # via feast (setup.py) python-dateutil==2.8.2 @@ -176,7 +179,9 @@ sniffio==1.3.0 # anyio # httpx sqlalchemy[mypy]==1.4.51 - # via feast (setup.py) + # via + # feast (setup.py) + # sqlalchemy sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy starlette==0.36.3 @@ -205,12 +210,15 @@ typing-extensions==4.9.0 # fastapi # mypy # pydantic + # pydantic-core # sqlalchemy2-stubs # uvicorn urllib3==2.2.0 # via requests uvicorn[standard]==0.27.1 - # via feast (setup.py) + # via + # feast (setup.py) + # uvicorn uvloop==0.19.0 # via uvicorn volatile==2.1.0 diff --git a/sdk/python/requirements/py3.8-ci-requirements.txt b/sdk/python/requirements/py3.8-ci-requirements.txt index 33dd89c362..bf8f4fbc42 100644 --- a/sdk/python/requirements/py3.8-ci-requirements.txt +++ b/sdk/python/requirements/py3.8-ci-requirements.txt @@ -7,8 +7,10 @@ alabaster==0.7.13 # via sphinx -altair==4.2.0 +altair==4.2.2 # via great-expectations +annotated-types==0.6.0 + # via pydantic anyio==4.2.0 # via # httpx @@ -230,6 +232,10 @@ google-auth==2.27.0 google-auth-httplib2==0.2.0 # via google-api-python-client google-cloud-bigquery[pandas]==3.12.0 + # via + # feast (setup.py) + # google-cloud-bigquery +google-cloud-bigquery-storage==2.24.0 # via feast (setup.py) google-cloud-bigquery-storage==2.24.0 # via feast (setup.py) @@ -264,7 +270,7 @@ googleapis-common-protos[grpc]==1.62.0 # google-api-core # grpc-google-iam-v1 # grpcio-status -great-expectations==0.15.50 +great-expectations==0.18.8 # via feast (setup.py) greenlet==3.0.3 # via sqlalchemy @@ -332,7 +338,6 @@ importlib-metadata==6.11.0 # build # dask # feast (setup.py) - # great-expectations # jupyter-client # jupyter-lsp # jupyterlab @@ -650,11 +655,13 @@ pycodestyle==2.10.0 # via flake8 pycparser==2.21 # via cffi -pydantic==1.10.14 +pydantic==2.6.1 # via # fastapi # feast (setup.py) # great-expectations +pydantic-core==2.16.2 + # via pydantic pyflakes==3.0.1 # via flake8 pygments==2.17.2 @@ -952,6 +959,7 @@ typing-extensions==4.9.0 # ipython # mypy # pydantic + # pydantic-core # snowflake-connector-python # sqlalchemy2-stubs # starlette diff --git a/sdk/python/requirements/py3.8-requirements.txt b/sdk/python/requirements/py3.8-requirements.txt index 388bb3143f..5e8481e770 100644 --- a/sdk/python/requirements/py3.8-requirements.txt +++ b/sdk/python/requirements/py3.8-requirements.txt @@ -4,7 +4,8 @@ # # pip-compile --output-file=sdk/python/requirements/py3.8-requirements.txt # - +annotated-types==0.6.0 + # via pydantic anyio==4.2.0 # via # httpx @@ -145,10 +146,12 @@ protobuf==4.23.3 # proto-plus pyarrow==15.0.0 # via feast (setup.py) -pydantic==1.10.14 +pydantic==2.6.1 # via # fastapi # feast (setup.py) +pydantic-core==2.16.2 + # via pydantic pygments==2.17.2 # via feast (setup.py) python-dateutil==2.8.2 @@ -210,13 +213,16 @@ typing-extensions==4.9.0 # fastapi # mypy # pydantic + # pydantic-core # sqlalchemy2-stubs # starlette # uvicorn urllib3==2.2.0 # via requests uvicorn[standard]==0.27.1 - # via feast (setup.py) + # via + # feast (setup.py) + # uvicorn uvloop==0.19.0 # via uvicorn volatile==2.1.0 diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index 9cb322d2f6..670ba1c07d 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -4,11 +4,12 @@ # # pip-compile --extra=ci --output-file=sdk/python/requirements/py3.9-ci-requirements.txt # - alabaster==0.7.16 # via sphinx -altair==4.2.0 +altair==4.2.2 # via great-expectations +annotated-types==0.6.0 + # via pydantic anyio==4.2.0 # via # httpx @@ -225,6 +226,10 @@ google-auth==2.27.0 google-auth-httplib2==0.2.0 # via google-api-python-client google-cloud-bigquery[pandas]==3.12.0 + # via + # feast (setup.py) + # google-cloud-bigquery +google-cloud-bigquery-storage==2.24.0 # via feast (setup.py) google-cloud-bigquery-storage==2.24.0 # via feast (setup.py) @@ -259,7 +264,7 @@ googleapis-common-protos[grpc]==1.62.0 # google-api-core # grpc-google-iam-v1 # grpcio-status -great-expectations==0.15.50 +great-expectations==0.18.8 # via feast (setup.py) greenlet==3.0.3 # via sqlalchemy @@ -327,7 +332,6 @@ importlib-metadata==6.11.0 # build # dask # feast (setup.py) - # great-expectations # jupyter-client # jupyter-lsp # jupyterlab @@ -637,11 +641,13 @@ pycodestyle==2.10.0 # via flake8 pycparser==2.21 # via cffi -pydantic==1.10.14 +pydantic==2.6.1 # via # fastapi # feast (setup.py) # great-expectations +pydantic-core==2.16.2 + # via pydantic pyflakes==3.0.1 # via flake8 pygments==2.17.2 @@ -938,6 +944,7 @@ typing-extensions==4.9.0 # ipython # mypy # pydantic + # pydantic-core # snowflake-connector-python # sqlalchemy2-stubs # starlette diff --git a/sdk/python/requirements/py3.9-requirements.txt b/sdk/python/requirements/py3.9-requirements.txt index 012dac6f81..2815ed0d78 100644 --- a/sdk/python/requirements/py3.9-requirements.txt +++ b/sdk/python/requirements/py3.9-requirements.txt @@ -4,7 +4,8 @@ # # pip-compile --output-file=sdk/python/requirements/py3.9-requirements.txt # - +annotated-types==0.6.0 + # via pydantic anyio==4.2.0 # via # httpx @@ -140,10 +141,12 @@ protobuf==4.23.3 # proto-plus pyarrow==15.0.0 # via feast (setup.py) -pydantic==1.10.14 +pydantic==2.6.1 # via # fastapi # feast (setup.py) +pydantic-core==2.16.2 + # via pydantic pygments==2.17.2 # via feast (setup.py) python-dateutil==2.8.2 @@ -205,6 +208,7 @@ typing-extensions==4.9.0 # fastapi # mypy # pydantic + # pydantic-core # sqlalchemy2-stubs # starlette # uvicorn diff --git a/sdk/python/tests/conftest.py b/sdk/python/tests/conftest.py index 728bd9b34f..743a1ce4a0 100644 --- a/sdk/python/tests/conftest.py +++ b/sdk/python/tests/conftest.py @@ -18,7 +18,7 @@ from datetime import datetime, timedelta from multiprocessing import Process from sys import platform -from typing import Any, Dict, List, Tuple +from typing import Any, Dict, List, Tuple, no_type_check import pandas as pd import pytest @@ -187,9 +187,10 @@ def environment(request, worker_id): e.online_store_creator.teardown() -_config_cache = {} +_config_cache: Any = {} +@no_type_check def pytest_generate_tests(metafunc: pytest.Metafunc): """ This function receives each test function (wrapped in Metafunc) diff --git a/sdk/python/tests/integration/feature_repos/repo_configuration.py b/sdk/python/tests/integration/feature_repos/repo_configuration.py index 027dea2c58..f745bafa13 100644 --- a/sdk/python/tests/integration/feature_repos/repo_configuration.py +++ b/sdk/python/tests/integration/feature_repos/repo_configuration.py @@ -99,7 +99,7 @@ "host": os.getenv("ROCKSET_APISERVER", "api.rs2.usw2.rockset.com"), } -OFFLINE_STORE_TO_PROVIDER_CONFIG: Dict[str, DataSourceCreator] = { +OFFLINE_STORE_TO_PROVIDER_CONFIG: Dict[str, Tuple[str, Type[DataSourceCreator]]] = { "file": ("local", FileDataSourceCreator), "bigquery": ("gcp", BigQueryDataSourceCreator), "redshift": ("aws", RedshiftDataSourceCreator), @@ -111,7 +111,7 @@ ] AVAILABLE_ONLINE_STORES: Dict[ - str, Tuple[Union[str, Dict[str, str]], Optional[Type[OnlineStoreCreator]]] + str, Tuple[Union[str, Dict[Any, Any]], Optional[Type[OnlineStoreCreator]]] ] = { "sqlite": ({"type": "sqlite"}, None), } @@ -169,7 +169,7 @@ AVAILABLE_ONLINE_STORES = { c.online_store["type"] if isinstance(c.online_store, dict) - else c.online_store: (c.online_store, c.online_store_creator) + else c.online_store: (c.online_store, c.online_store_creator) # type: ignore for c in FULL_REPO_CONFIGS } @@ -328,7 +328,7 @@ class UniversalFeatureViews: customer: FeatureView global_fv: FeatureView driver: FeatureView - driver_odfv: OnDemandFeatureView + driver_odfv: Optional[OnDemandFeatureView] order: FeatureView location: FeatureView field_mapping: FeatureView @@ -410,9 +410,7 @@ def construct_test_environment( online_creator = test_repo_config.online_store_creator( project, fixture_request=fixture_request ) - online_store = ( - test_repo_config.online_store - ) = online_creator.create_online_store() + online_store = online_creator.create_online_store() else: online_creator = None online_store = test_repo_config.online_store @@ -422,7 +420,7 @@ def construct_test_environment( AwsLambdaFeatureServerConfig, ) - feature_server = AwsLambdaFeatureServerConfig( + feature_server: Any = AwsLambdaFeatureServerConfig( enabled=True, execution_role_name=os.getenv( "AWS_LAMBDA_ROLE", @@ -465,7 +463,7 @@ def construct_test_environment( # Create feature_store.yaml out of the config with open(Path(repo_dir_name) / "feature_store.yaml", "w") as f: - yaml.safe_dump(json.loads(config.json()), f) + yaml.safe_dump(json.loads(config.model_dump_json(by_alias=True)), f) fs = FeatureStore(repo_dir_name) # We need to initialize the registry, because if nothing is applied in the test before tearing down diff --git a/sdk/python/tests/integration/feature_repos/universal/data_source_creator.py b/sdk/python/tests/integration/feature_repos/universal/data_source_creator.py index d64463606f..5e5062291d 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_source_creator.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_source_creator.py @@ -42,19 +42,20 @@ def create_data_source( A Data source object, pointing to a table or file that is uploaded/persisted for the purpose of the test. """ - ... + raise NotImplementedError @abstractmethod def create_offline_store_config(self) -> FeastConfigBaseModel: - ... + raise NotImplementedError @abstractmethod def create_saved_dataset_destination(self) -> SavedDatasetStorage: - ... + raise NotImplementedError + @abstractmethod def create_logged_features_destination(self) -> LoggingDestination: raise NotImplementedError @abstractmethod def teardown(self): - ... + raise NotImplementedError diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py index 215d19ba7f..066497a0bc 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py @@ -64,10 +64,9 @@ def create_data_source( self, df: pd.DataFrame, destination_name: str, - timestamp_field="ts", created_timestamp_column="created_ts", field_mapping: Optional[Dict[str, str]] = None, - **kwargs, + timestamp_field: Optional[str] = "ts", ) -> DataSource: destination_name = self.get_prefixed_table_name(destination_name) diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py index 3263785683..008bb8d881 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py @@ -39,9 +39,9 @@ def create_data_source( self, df: pd.DataFrame, destination_name: str, - timestamp_field="ts", created_timestamp_column="created_ts", field_mapping: Optional[Dict[str, str]] = None, + timestamp_field: Optional[str] = "ts", ) -> DataSource: destination_name = self.get_prefixed_table_name(destination_name) @@ -94,9 +94,9 @@ def create_data_source( self, df: pd.DataFrame, destination_name: str, - timestamp_field="ts", created_timestamp_column="created_ts", field_mapping: Optional[Dict[str, str]] = None, + timestamp_field: Optional[str] = "ts", ) -> DataSource: destination_name = self.get_prefixed_table_name(destination_name) @@ -167,11 +167,10 @@ def _upload_parquet_file(self, df, file_name, minio_endpoint): def create_data_source( self, df: pd.DataFrame, - destination_name: Optional[str] = None, - suffix: Optional[str] = None, - timestamp_field="ts", + destination_name: str, created_timestamp_column="created_ts", field_mapping: Optional[Dict[str, str]] = None, + timestamp_field: Optional[str] = "ts", ) -> DataSource: filename = f"{destination_name}.parquet" port = self.minio.get_exposed_port("9000") diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py index e6f20d6125..5a4e3f1085 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py @@ -42,16 +42,17 @@ def __init__(self, project_name: str, *args, **kwargs): iam_role=os.getenv( "AWS_IAM_ROLE", "arn:aws:iam::402087665549:role/redshift_s3_access_role" ), + workgroup="", ) def create_data_source( self, df: pd.DataFrame, destination_name: str, - suffix: Optional[str] = None, - timestamp_field="ts", + event_timestamp_column="ts", created_timestamp_column="created_ts", field_mapping: Optional[Dict[str, str]] = None, + timestamp_field: Optional[str] = "ts", ) -> DataSource: destination_name = self.get_prefixed_table_name(destination_name) diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py index 1414291a18..1481b11a10 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py @@ -48,10 +48,10 @@ def create_data_source( self, df: pd.DataFrame, destination_name: str, - suffix: Optional[str] = None, - timestamp_field="ts", + event_timestamp_column="ts", created_timestamp_column="created_ts", field_mapping: Optional[Dict[str, str]] = None, + timestamp_field: Optional[str] = "ts", ) -> DataSource: destination_name = self.get_prefixed_table_name(destination_name) diff --git a/sdk/python/tests/integration/feature_repos/universal/feature_views.py b/sdk/python/tests/integration/feature_repos/universal/feature_views.py index 5938a0c936..9bb8aae77f 100644 --- a/sdk/python/tests/integration/feature_repos/universal/feature_views.py +++ b/sdk/python/tests/integration/feature_repos/universal/feature_views.py @@ -14,6 +14,7 @@ StreamFeatureView, ) from feast.data_source import DataSource, RequestSource +from feast.feature_view_projection import FeatureViewProjection from feast.types import Array, FeastType, Float32, Float64, Int32, Int64 from tests.integration.feature_repos.universal.entities import ( customer, @@ -55,7 +56,7 @@ def conv_rate_plus_100(features_df: pd.DataFrame) -> pd.DataFrame: def conv_rate_plus_100_feature_view( - sources: Dict[str, Union[RequestSource, FeatureView]], + sources: List[Union[FeatureView, RequestSource, FeatureViewProjection]], infer_features: bool = False, features: Optional[List[Field]] = None, ) -> OnDemandFeatureView: diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store_creator.py b/sdk/python/tests/integration/feature_repos/universal/online_store_creator.py index 10a8143739..4932001e76 100644 --- a/sdk/python/tests/integration/feature_repos/universal/online_store_creator.py +++ b/sdk/python/tests/integration/feature_repos/universal/online_store_creator.py @@ -1,4 +1,4 @@ -from abc import ABC +from abc import ABC, abstractmethod from feast.repo_config import FeastConfigBaseModel @@ -10,5 +10,6 @@ def __init__(self, project_name: str, **kwargs): def create_online_store(self) -> FeastConfigBaseModel: raise NotImplementedError + @abstractmethod def teardown(self): raise NotImplementedError diff --git a/sdk/python/tests/unit/cli/test_cli_chdir.py b/sdk/python/tests/unit/cli/test_cli_chdir.py index cf1d031227..12ca8f6b08 100644 --- a/sdk/python/tests/unit/cli/test_cli_chdir.py +++ b/sdk/python/tests/unit/cli/test_cli_chdir.py @@ -15,7 +15,7 @@ def test_cli_chdir() -> None: # Make sure the path is absolute by resolving any symlinks temp_path = Path(temp_dir).resolve() result = runner.run(["init", "my_project"], cwd=temp_path) - repo_path = temp_path / "my_project" / "feature_repo" + repo_path = str(temp_path / "my_project" / "feature_repo") assert result.returncode == 0 result = runner.run(["--chdir", repo_path, "apply"], cwd=temp_path) @@ -44,7 +44,12 @@ def test_cli_chdir() -> None: assert result.returncode == 0 result = runner.run( - ["--chdir", repo_path, "materialize-incremental", end_date.isoformat()], + [ + "--chdir", + repo_path, + "materialize-incremental", + end_date.isoformat(), + ], cwd=temp_path, ) assert result.returncode == 0 diff --git a/sdk/python/tests/unit/infra/offline_stores/test_offline_store.py b/sdk/python/tests/unit/infra/offline_stores/test_offline_store.py index 220bdba0da..f93237fce5 100644 --- a/sdk/python/tests/unit/infra/offline_stores/test_offline_store.py +++ b/sdk/python/tests/unit/infra/offline_stores/test_offline_store.py @@ -61,12 +61,12 @@ def _to_arrow_internal(self, timeout: Optional[int] = None) -> pyarrow.Table: return pyarrow.Table() @property - def full_feature_names(self) -> bool: + def full_feature_names(self) -> bool: # type: ignore """Returns True if full feature names should be applied to the results of the query.""" return False @property - def on_demand_feature_views(self) -> List[OnDemandFeatureView]: + def on_demand_feature_views(self) -> List[OnDemandFeatureView]: # type: ignore """Returns a list containing all the on demand feature views to be handled.""" return [] @@ -118,6 +118,7 @@ def retrieval_job(request, environment): database="feast", s3_staging_location="s3://feast-integration-tests/redshift/tests/ingestion", iam_role="arn:aws:iam::402087665549:role/redshift_s3_access_role", + workgroup="", ) environment.test_repo_config.offline_store = offline_store_config return RedshiftRetrievalJob( diff --git a/sdk/python/tests/unit/infra/offline_stores/test_redshift.py b/sdk/python/tests/unit/infra/offline_stores/test_redshift.py index 049977489b..48ee99e89f 100644 --- a/sdk/python/tests/unit/infra/offline_stores/test_redshift.py +++ b/sdk/python/tests/unit/infra/offline_stores/test_redshift.py @@ -31,6 +31,7 @@ def test_offline_write_batch( user="user", iam_role="abcdef", s3_staging_location="s3://bucket/path", + workgroup="", ), ) diff --git a/sdk/python/tests/unit/infra/scaffolding/test_repo_config.py b/sdk/python/tests/unit/infra/scaffolding/test_repo_config.py index 42229f8683..ca4ed6472b 100644 --- a/sdk/python/tests/unit/infra/scaffolding/test_repo_config.py +++ b/sdk/python/tests/unit/infra/scaffolding/test_repo_config.py @@ -45,8 +45,7 @@ def test_nullable_online_store_aws(): entity_key_serialization_version: 2 """ ), - expect_error="__root__ -> offline_store -> __root__\n" - " please specify either cluster_id & user if using provisioned clusters, or workgroup if using serverless (type=value_error)", + expect_error="4 validation errors for RepoConfig\nregion\n Field required", ) @@ -154,8 +153,7 @@ def test_extra_field(): path: "online_store.db" """ ), - expect_error="__root__ -> online_store -> that_field_should_not_be_here\n" - " extra fields not permitted (type=value_error.extra)", + expect_error="1 validation error for RepoConfig\nthat_field_should_not_be_here\n Extra inputs are not permitted", ) @@ -186,7 +184,7 @@ def test_bad_type(): path: 100500 """ ), - expect_error="__root__ -> online_store -> path\n str type expected", + expect_error="1 validation error for RepoConfig\npath\n Input should be a valid string", ) @@ -201,9 +199,7 @@ def test_no_project(): entity_key_serialization_version: 2 """ ), - expect_error="1 validation error for RepoConfig\n" - "project\n" - " field required (type=value_error.missing)", + expect_error="1 validation error for RepoConfig\nproject\n Field required", ) diff --git a/sdk/python/tests/utils/e2e_test_validation.py b/sdk/python/tests/utils/e2e_test_validation.py index bacc8c1720..d8c769f12c 100644 --- a/sdk/python/tests/utils/e2e_test_validation.py +++ b/sdk/python/tests/utils/e2e_test_validation.py @@ -193,7 +193,7 @@ def make_feature_store_yaml( repo_path=str(Path(repo_dir_name)), entity_key_serialization_version=2, ) - config_dict = config.dict() + config_dict = config.model_dump(by_alias=True) if ( isinstance(config_dict["online_store"], dict) and "redis_type" in config_dict["online_store"] diff --git a/setup.py b/setup.py index ebc4df31a8..a73ef31b06 100644 --- a/setup.py +++ b/setup.py @@ -61,7 +61,7 @@ "protobuf<4.23.4,>3.20", "proto-plus>=1.20.0,<2", "pyarrow>=4", - "pydantic>=1,<2", + "pydantic>=2.0.0", "pygments>=2.12.0,<3", "PyYAML>=5.4.0,<7", "requests", @@ -126,7 +126,7 @@ "cassandra-driver>=3.24.0,<4", ] -GE_REQUIRED = ["great_expectations>=0.15.41,<0.16.0"] +GE_REQUIRED = ["great_expectations>=0.15.41"] AZURE_REQUIRED = [ "azure-storage-blob>=0.37.0", From dd96150e2a5829401f793a51da4b3594677e570d Mon Sep 17 00:00:00 2001 From: Tyler Rhodes <767526+trhodeos@users.noreply.github.com> Date: Sat, 17 Feb 2024 11:41:43 -0600 Subject: [PATCH 037/122] fix: Update typeguard version to >=4.0.0 (#3837) --- sdk/python/requirements/py3.10-ci-requirements.txt | 2 +- sdk/python/requirements/py3.10-requirements.txt | 2 +- sdk/python/requirements/py3.8-ci-requirements.txt | 2 +- sdk/python/requirements/py3.8-requirements.txt | 2 +- sdk/python/requirements/py3.9-ci-requirements.txt | 2 +- sdk/python/requirements/py3.9-requirements.txt | 2 +- sdk/python/tests/unit/test_feature_views.py | 3 ++- setup.py | 2 +- 8 files changed, 9 insertions(+), 8 deletions(-) diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index 34d0b0c284..f20bc05df9 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -895,7 +895,7 @@ traitlets==5.14.1 # nbformat trino==0.327.0 # via feast (setup.py) -typeguard==2.13.3 +typeguard==4.1.5 # via feast (setup.py) types-protobuf==3.19.22 # via diff --git a/sdk/python/requirements/py3.10-requirements.txt b/sdk/python/requirements/py3.10-requirements.txt index ba474f6120..3943662d01 100644 --- a/sdk/python/requirements/py3.10-requirements.txt +++ b/sdk/python/requirements/py3.10-requirements.txt @@ -200,7 +200,7 @@ toolz==0.12.1 # partd tqdm==4.66.2 # via feast (setup.py) -typeguard==2.13.3 +typeguard==4.1.5 # via feast (setup.py) types-protobuf==4.24.0.20240129 # via mypy-protobuf diff --git a/sdk/python/requirements/py3.8-ci-requirements.txt b/sdk/python/requirements/py3.8-ci-requirements.txt index bf8f4fbc42..afa43ec2a2 100644 --- a/sdk/python/requirements/py3.8-ci-requirements.txt +++ b/sdk/python/requirements/py3.8-ci-requirements.txt @@ -919,7 +919,7 @@ traitlets==5.14.1 # nbformat trino==0.327.0 # via feast (setup.py) -typeguard==2.13.3 +typeguard==4.1.5 # via feast (setup.py) types-protobuf==3.19.22 # via diff --git a/sdk/python/requirements/py3.8-requirements.txt b/sdk/python/requirements/py3.8-requirements.txt index 5e8481e770..079064a9ec 100644 --- a/sdk/python/requirements/py3.8-requirements.txt +++ b/sdk/python/requirements/py3.8-requirements.txt @@ -203,7 +203,7 @@ toolz==0.12.1 # partd tqdm==4.66.2 # via feast (setup.py) -typeguard==2.13.3 +typeguard==4.1.5 # via feast (setup.py) types-protobuf==4.24.0.20240129 # via mypy-protobuf diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index 670ba1c07d..6c26f889e2 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -904,7 +904,7 @@ traitlets==5.14.1 # nbformat trino==0.327.0 # via feast (setup.py) -typeguard==2.13.3 +typeguard==4.1.5 # via feast (setup.py) types-protobuf==3.19.22 # via diff --git a/sdk/python/requirements/py3.9-requirements.txt b/sdk/python/requirements/py3.9-requirements.txt index 2815ed0d78..182cb7ad07 100644 --- a/sdk/python/requirements/py3.9-requirements.txt +++ b/sdk/python/requirements/py3.9-requirements.txt @@ -198,7 +198,7 @@ toolz==0.12.1 # partd tqdm==4.66.2 # via feast (setup.py) -typeguard==2.13.3 +typeguard==4.1.5 # via feast (setup.py) types-protobuf==4.24.0.20240129 # via mypy-protobuf diff --git a/sdk/python/tests/unit/test_feature_views.py b/sdk/python/tests/unit/test_feature_views.py index afef332d37..20863645b7 100644 --- a/sdk/python/tests/unit/test_feature_views.py +++ b/sdk/python/tests/unit/test_feature_views.py @@ -1,6 +1,7 @@ from datetime import timedelta import pytest +from typeguard import TypeCheckError from feast.aggregation import Aggregation from feast.batch_feature_view import BatchFeatureView @@ -278,7 +279,7 @@ def test_hash(): def test_field_types(): - with pytest.raises(TypeError): + with pytest.raises(TypeCheckError): Field(name="name", dtype=ValueType.INT32) diff --git a/setup.py b/setup.py index a73ef31b06..c14d64557a 100644 --- a/setup.py +++ b/setup.py @@ -70,7 +70,7 @@ "tenacity>=7,<9", "toml>=0.10.0,<1", "tqdm>=4,<5", - "typeguard==2.13.3", + "typeguard>=4.0.0", "fastapi>=0.68.0", "uvicorn[standard]>=0.14.0,<1", "gunicorn", From dd79dbbac90caaf0617a5046c84a2618e532980b Mon Sep 17 00:00:00 2001 From: Shuchu Han Date: Sat, 17 Feb 2024 22:30:13 -0500 Subject: [PATCH 038/122] fix: Fix typo as the cli does not support shortcut-f option. (#3954) * fix: Fix typo as the cli does not support shortcut-f option. Signed-off-by: Shuchu Han * fix: add -f option as a shortcut of feature-store-yaml. Signed-off-by: Shuchu Han --------- Signed-off-by: Shuchu Han --- sdk/python/feast/cli.py | 1 + 1 file changed, 1 insertion(+) diff --git a/sdk/python/feast/cli.py b/sdk/python/feast/cli.py index 985c44b821..7ce8aaef2b 100644 --- a/sdk/python/feast/cli.py +++ b/sdk/python/feast/cli.py @@ -76,6 +76,7 @@ def format_options(self, ctx: click.Context, formatter: click.HelpFormatter): ) @click.option( "--feature-store-yaml", + "-f", help="Override the directory where the CLI should look for the feature_store.yaml file.", ) @click.pass_context From 6b8e96c982a50587a13216666085fc61494cdfc9 Mon Sep 17 00:00:00 2001 From: Chester Date: Tue, 20 Feb 2024 09:38:52 +0800 Subject: [PATCH 039/122] fix: Revert mypy config (#3952) * fetch_arrow_all returns empty table Signed-off-by: Chester Ong * fix spark_kafka_processor typing errors Signed-off-by: Chester Ong * fix correct return type Signed-off-by: Chester Ong * revert _to_arrow_internal Signed-off-by: Chester Ong * revert kafkaStreamProcessor changes, change base type instead Signed-off-by: Chester Ong --------- Signed-off-by: Chester Ong --- Makefile | 2 +- .../feast/infra/contrib/stream_processor.py | 8 +++--- .../tests/data_source.py | 27 +++++++++++-------- .../feast/infra/offline_stores/snowflake.py | 2 +- 4 files changed, 23 insertions(+), 16 deletions(-) diff --git a/Makefile b/Makefile index 6736e64078..1598664f83 100644 --- a/Makefile +++ b/Makefile @@ -310,7 +310,7 @@ format-python: cd ${ROOT_DIR}/sdk/python; python -m black --target-version py38 feast tests lint-python: - cd ${ROOT_DIR}/sdk/python; python -m mypy --exclude=/tests/ --follow-imports=skip feast + cd ${ROOT_DIR}/sdk/python; python -m mypy feast cd ${ROOT_DIR}/sdk/python; python -m isort feast/ tests/ --check-only cd ${ROOT_DIR}/sdk/python; python -m flake8 feast/ tests/ cd ${ROOT_DIR}/sdk/python; python -m black --check feast tests diff --git a/sdk/python/feast/infra/contrib/stream_processor.py b/sdk/python/feast/infra/contrib/stream_processor.py index c4620f4ca1..3f1fe08510 100644 --- a/sdk/python/feast/infra/contrib/stream_processor.py +++ b/sdk/python/feast/infra/contrib/stream_processor.py @@ -1,6 +1,6 @@ from abc import ABC, abstractmethod from types import MethodType -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING, Any, Optional from pyspark.sql import DataFrame from typing_extensions import TypeAlias @@ -51,7 +51,9 @@ def __init__( self.data_source = data_source @abstractmethod - def ingest_stream_feature_view(self, to: PushMode = PushMode.ONLINE) -> None: + def ingest_stream_feature_view( + self, to: PushMode = PushMode.ONLINE + ) -> Optional[Any]: """ Ingests data from the stream source attached to the stream feature view; transforms the data and then persists it to the online store and/or offline store, depending on the 'to' parameter. @@ -75,7 +77,7 @@ def _construct_transformation_plan(self, table: StreamTable) -> StreamTable: raise NotImplementedError @abstractmethod - def _write_stream_data(self, table: StreamTable, to: PushMode) -> None: + def _write_stream_data(self, table: StreamTable, to: PushMode) -> Optional[Any]: """ Launches a job to persist stream data to the online store and/or offline store, depending on the 'to' parameter, and returns a handle for the job. diff --git a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/data_source.py b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/data_source.py index 46d5c20e97..f50cdc4c41 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/data_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/data_source.py @@ -1,5 +1,5 @@ import logging -from typing import Dict, Optional +from typing import Dict, Literal, Optional import pandas as pd import pytest @@ -12,6 +12,7 @@ PostgreSQLSource, ) from feast.infra.utils.postgres.connection_utils import df_to_postgres_table +from feast.infra.utils.postgres.postgres_config import PostgreSQLConfig from tests.integration.feature_repos.universal.data_source_creator import ( DataSourceCreator, ) @@ -26,6 +27,10 @@ POSTGRES_DB = "test" +class PostgreSQLOnlineStoreConfig(PostgreSQLConfig): + type: Literal["postgres"] = "postgres" + + @pytest.fixture(scope="session") def postgres_container(): container = ( @@ -106,17 +111,17 @@ def create_offline_store_config(self) -> PostgreSQLOfflineStoreConfig: def get_prefixed_table_name(self, suffix: str) -> str: return f"{self.project_name}_{suffix}" - def create_online_store(self) -> Dict[str, str]: + def create_online_store(self) -> PostgreSQLOnlineStoreConfig: assert self.container - return { - "type": "postgres", - "host": "localhost", - "port": self.container.get_exposed_port(5432), - "database": POSTGRES_DB, - "db_schema": "feature_store", - "user": POSTGRES_USER, - "password": POSTGRES_PASSWORD, - } + return PostgreSQLOnlineStoreConfig( + type="postgres", + host="localhost", + port=self.container.get_exposed_port(5432), + database=POSTGRES_DB, + db_schema="feature_store", + user=POSTGRES_USER, + password=POSTGRES_PASSWORD, + ) def create_saved_dataset_destination(self): # FIXME: ... diff --git a/sdk/python/feast/infra/offline_stores/snowflake.py b/sdk/python/feast/infra/offline_stores/snowflake.py index 66e7e78651..32cda2d6b6 100644 --- a/sdk/python/feast/infra/offline_stores/snowflake.py +++ b/sdk/python/feast/infra/offline_stores/snowflake.py @@ -470,7 +470,7 @@ def _to_df_internal(self, timeout: Optional[int] = None) -> pd.DataFrame: def _to_arrow_internal(self, timeout: Optional[int] = None) -> pyarrow.Table: pa_table = execute_snowflake_statement( self.snowflake_conn, self.to_sql() - ).fetch_arrow_all() + ).fetch_arrow_all(force_return_table=False) if pa_table: return pa_table From b83a70227c6afe7258328ff5847a26b526d0b5df Mon Sep 17 00:00:00 2001 From: Chester Date: Tue, 20 Feb 2024 09:42:46 +0800 Subject: [PATCH 040/122] fix: Using version args to install the correct feast version (#3953) * using version args to install the correct feast version Signed-off-by: Chester Ong * revert the COPY command Signed-off-by: Chester Ong --------- Signed-off-by: Chester Ong --- .../feast/infra/feature_servers/multicloud/Dockerfile | 7 +++++-- .../feast/infra/feature_servers/multicloud/Dockerfile.dev | 8 ++++++-- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile b/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile index fdd8e3ac51..4527c5b156 100644 --- a/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile +++ b/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile @@ -1,5 +1,9 @@ FROM python:3.8 +# Input the feast version to install +# This requires feast package to be available in pypi before building this image +ARG VERSION + RUN apt update && \ apt install -y \ jq \ @@ -7,8 +11,7 @@ RUN apt update && \ build-essential RUN pip install pip --upgrade -RUN pip install "feast[aws,gcp,snowflake,redis,go,mysql,postgres]" - +RUN pip install "feast[aws,gcp,snowflake,redis,go,mysql,postgres]==${VERSION}" RUN apt update RUN apt install -y -V ca-certificates lsb-release wget diff --git a/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile.dev b/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile.dev index 3fc1355d7a..015e3c7ee8 100644 --- a/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile.dev +++ b/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile.dev @@ -1,5 +1,9 @@ FROM python:3.8 +# Input the feast version to install +# This requires feast package to be available in pypi before building this image +ARG VERSION + RUN apt update && \ apt install -y \ jq \ @@ -9,11 +13,11 @@ RUN apt update && \ RUN pip install pip --upgrade COPY . . -RUN pip install "feast[aws,gcp,snowflake,redis,go,mysql,postgres]" +RUN pip install "feast[aws,gcp,snowflake,redis,go,mysql,postgres]==${VERSION}" RUN apt update RUN apt install -y -V ca-certificates lsb-release wget RUN wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb RUN apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb RUN apt update -RUN apt -y install libarrow-dev \ No newline at end of file +RUN apt -y install libarrow-dev From 1cc94f2d23f88e0d9412b2fab8761abc81f5d35c Mon Sep 17 00:00:00 2001 From: John Lemmon <137814163+JohnLemmonMedely@users.noreply.github.com> Date: Wed, 21 Feb 2024 22:50:37 -0600 Subject: [PATCH 041/122] fix: Bugfix for grabbing historical data from Snowflake with array type features. (#3964) Bugfix for grabbing historical data from Snowflake with array type features that are null for an entity. Update docs to reflect array support in Snowflake Signed-off-by: john.lemmon --- docs/reference/data-sources/overview.md | 20 +++++++------- docs/reference/data-sources/snowflake.md | 2 +- .../feast/infra/offline_stores/snowflake.py | 4 ++- .../infra/offline_stores/test_snowflake.py | 26 +++++++++++++++++++ 4 files changed, 40 insertions(+), 12 deletions(-) diff --git a/docs/reference/data-sources/overview.md b/docs/reference/data-sources/overview.md index 112d4168d3..302c19b049 100644 --- a/docs/reference/data-sources/overview.md +++ b/docs/reference/data-sources/overview.md @@ -19,13 +19,13 @@ Details for each specific data source can be found [here](README.md). Below is a matrix indicating which data sources support which types. | | File | BigQuery | Snowflake | Redshift | Postgres | Spark | Trino | -| :-------------------------------- | :-- | :-- | :-- | :-- | :-- | :-- | :-- | -| `bytes` | yes | yes | yes | yes | yes | yes | yes | -| `string` | yes | yes | yes | yes | yes | yes | yes | -| `int32` | yes | yes | yes | yes | yes | yes | yes | -| `int64` | yes | yes | yes | yes | yes | yes | yes | -| `float32` | yes | yes | yes | yes | yes | yes | yes | -| `float64` | yes | yes | yes | yes | yes | yes | yes | -| `bool` | yes | yes | yes | yes | yes | yes | yes | -| `timestamp` | yes | yes | yes | yes | yes | yes | yes | -| array types | yes | yes | no | no | yes | yes | no | \ No newline at end of file +| :-------------------------------- | :-- | :-- |:----------| :-- | :-- | :-- | :-- | +| `bytes` | yes | yes | yes | yes | yes | yes | yes | +| `string` | yes | yes | yes | yes | yes | yes | yes | +| `int32` | yes | yes | yes | yes | yes | yes | yes | +| `int64` | yes | yes | yes | yes | yes | yes | yes | +| `float32` | yes | yes | yes | yes | yes | yes | yes | +| `float64` | yes | yes | yes | yes | yes | yes | yes | +| `bool` | yes | yes | yes | yes | yes | yes | yes | +| `timestamp` | yes | yes | yes | yes | yes | yes | yes | +| array types | yes | yes | yes | no | yes | yes | no | \ No newline at end of file diff --git a/docs/reference/data-sources/snowflake.md b/docs/reference/data-sources/snowflake.md index 82bf5cb4d4..98a56e09f8 100644 --- a/docs/reference/data-sources/snowflake.md +++ b/docs/reference/data-sources/snowflake.md @@ -46,5 +46,5 @@ The full set of configuration options is available [here](https://rtd.feast.dev/ ## Supported Types -Snowflake data sources support all eight primitive types, but currently do not support array types. +Snowflake data sources support all eight primitive types. Array types are also supported but not with type inference. For a comparison against other batch data sources, please see [here](overview.md#functionality-matrix). diff --git a/sdk/python/feast/infra/offline_stores/snowflake.py b/sdk/python/feast/infra/offline_stores/snowflake.py index 32cda2d6b6..14752fd857 100644 --- a/sdk/python/feast/infra/offline_stores/snowflake.py +++ b/sdk/python/feast/infra/offline_stores/snowflake.py @@ -463,7 +463,9 @@ def _to_df_internal(self, timeout: Optional[int] = None) -> pd.DataFrame: Array(Float32), Array(Bool), ]: - df[feature.name] = [json.loads(x) for x in df[feature.name]] + df[feature.name] = [ + json.loads(x) if x else None for x in df[feature.name] + ] return df diff --git a/sdk/python/tests/unit/infra/offline_stores/test_snowflake.py b/sdk/python/tests/unit/infra/offline_stores/test_snowflake.py index afc3ae97ae..ac55f123bb 100644 --- a/sdk/python/tests/unit/infra/offline_stores/test_snowflake.py +++ b/sdk/python/tests/unit/infra/offline_stores/test_snowflake.py @@ -1,14 +1,18 @@ import re from unittest.mock import ANY, MagicMock, patch +import pandas as pd import pytest +from pytest_mock import MockFixture +from feast import FeatureView, Field, FileSource from feast.infra.offline_stores.snowflake import ( SnowflakeOfflineStoreConfig, SnowflakeRetrievalJob, ) from feast.infra.online_stores.sqlite import SqliteOnlineStoreConfig from feast.repo_config import RepoConfig +from feast.types import Array, String @pytest.fixture(params=["s3", "s3gov"]) @@ -55,3 +59,25 @@ def test_to_remote_storage(retrieval_job): mock_get_file_names_from_copy.assert_called_once_with(ANY, ANY) native_path = mock_get_file_names_from_copy.call_args[0][1] assert re.match("^s3://.*", native_path), "path should be s3://*" + + +def test_snowflake_to_df_internal( + retrieval_job: SnowflakeRetrievalJob, mocker: MockFixture +): + mock_execute = mocker.patch( + "feast.infra.offline_stores.snowflake.execute_snowflake_statement" + ) + mock_execute.return_value.fetch_pandas_all.return_value = pd.DataFrame.from_dict( + {"feature1": ['["1", "2", "3"]', None, "[]"]} # For Valid, Null, and Empty + ) + + feature_view = FeatureView( + name="my-feature-view", + entities=[], + schema=[ + Field(name="feature1", dtype=Array(String)), + ], + source=FileSource(path="dummy.path"), # Dummy value + ) + retrieval_job._feature_views = [feature_view] + retrieval_job._to_df_internal() From 59639dbb0272aacd2201cb5f65b01445013db6e6 Mon Sep 17 00:00:00 2001 From: lokeshrangineni Date: Fri, 23 Feb 2024 22:48:08 -0500 Subject: [PATCH 042/122] feat: Updating dependencies so that feast can be run on 3.11. (#3958) --- .../requirements/py3.11-ci-requirements.txt | 1000 +++++++++++++++++ .../requirements/py3.11-requirements.txt | 234 ++++ setup.py | 4 +- 3 files changed, 1236 insertions(+), 2 deletions(-) create mode 100644 sdk/python/requirements/py3.11-ci-requirements.txt create mode 100644 sdk/python/requirements/py3.11-requirements.txt diff --git a/sdk/python/requirements/py3.11-ci-requirements.txt b/sdk/python/requirements/py3.11-ci-requirements.txt new file mode 100644 index 0000000000..bc9ddbe8e7 --- /dev/null +++ b/sdk/python/requirements/py3.11-ci-requirements.txt @@ -0,0 +1,1000 @@ +# +# This file is autogenerated by pip-compile with Python 3.10 +# by the following command: +# +# pip-compile --extra=ci --output-file=sdk/python/requirements/py3.10-ci-requirements.txt +# +alabaster==0.7.16 + # via sphinx +altair==4.2.2 + # via great-expectations +annotated-types==0.6.0 + # via pydantic +anyio==4.2.0 + # via + # httpx + # jupyter-server + # starlette + # watchfiles +appdirs==1.4.4 + # via fissix +argon2-cffi==23.1.0 + # via jupyter-server +argon2-cffi-bindings==21.2.0 + # via argon2-cffi +arrow==1.3.0 + # via isoduration +asn1crypto==1.5.1 + # via snowflake-connector-python +assertpy==1.1 + # via feast (setup.py) +asttokens==2.4.1 + # via stack-data +async-lru==2.0.4 + # via jupyterlab +async-timeout==4.0.3 + # via redis +attrs==23.2.0 + # via + # bowler + # jsonschema + # referencing +avro==1.10.0 + # via feast (setup.py) +azure-core==1.30.0 + # via + # azure-identity + # azure-storage-blob +azure-identity==1.15.0 + # via feast (setup.py) +azure-storage-blob==12.19.0 + # via feast (setup.py) +babel==2.14.0 + # via + # jupyterlab-server + # sphinx +beautifulsoup4==4.12.3 + # via nbconvert +black==22.12.0 + # via feast (setup.py) +bleach==6.1.0 + # via nbconvert +boto3==1.34.42 + # via + # feast (setup.py) + # moto +botocore==1.34.42 + # via + # boto3 + # moto + # s3transfer +bowler==0.9.0 + # via feast (setup.py) +build==1.0.3 + # via + # feast (setup.py) + # pip-tools +bytewax==0.18.2 + # via feast (setup.py) +cachecontrol==0.14.0 + # via firebase-admin +cachetools==5.3.2 + # via google-auth +cassandra-driver==3.29.0 + # via feast (setup.py) +certifi==2024.2.2 + # via + # httpcore + # httpx + # kubernetes + # minio + # requests + # snowflake-connector-python +cffi==1.16.0 + # via + # argon2-cffi-bindings + # cryptography + # snowflake-connector-python +cfgv==3.4.0 + # via pre-commit +charset-normalizer==3.3.2 + # via + # requests + # snowflake-connector-python +click==8.1.7 + # via + # black + # bowler + # dask + # feast (setup.py) + # geomet + # great-expectations + # moreorless + # pip-tools + # uvicorn +cloudpickle==3.0.0 + # via dask +colorama==0.4.6 + # via + # feast (setup.py) + # great-expectations +comm==0.2.1 + # via + # ipykernel + # ipywidgets +coverage[toml]==7.4.1 + # via pytest-cov +cryptography==41.0.7 + # via + # azure-identity + # azure-storage-blob + # feast (setup.py) + # great-expectations + # moto + # msal + # pyjwt + # pyopenssl + # snowflake-connector-python + # types-pyopenssl + # types-redis +dask==2024.2.0 + # via feast (setup.py) +db-dtypes==1.2.0 + # via google-cloud-bigquery +debugpy==1.8.1 + # via ipykernel +decorator==5.1.1 + # via ipython +defusedxml==0.7.1 + # via nbconvert +deprecation==2.1.0 + # via testcontainers +dill==0.3.8 + # via + # bytewax + # feast (setup.py) + # multiprocess +distlib==0.3.8 + # via virtualenv +docker==7.0.0 + # via + # feast (setup.py) + # testcontainers +docutils==0.19 + # via sphinx +entrypoints==0.4 + # via altair +exceptiongroup==1.2.0 + # via + # anyio + # ipython + # pytest +execnet==2.0.2 + # via pytest-xdist +executing==2.0.1 + # via stack-data +fastapi==0.109.2 + # via feast (setup.py) +fastavro==1.9.4 + # via + # feast (setup.py) + # pandavro +fastjsonschema==2.19.1 + # via nbformat +filelock==3.13.1 + # via + # snowflake-connector-python + # virtualenv +firebase-admin==5.4.0 + # via feast (setup.py) +fissix==21.11.13 + # via bowler +flake8==6.0.0 + # via feast (setup.py) +fqdn==1.5.1 + # via jsonschema +fsspec==2023.12.2 + # via + # dask + # feast (setup.py) +geojson==2.5.0 + # via rockset +geomet==0.2.1.post1 + # via cassandra-driver +google-api-core[grpc]==2.17.1 + # via + # feast (setup.py) + # firebase-admin + # google-api-python-client + # google-cloud-bigquery + # google-cloud-bigquery-storage + # google-cloud-bigtable + # google-cloud-core + # google-cloud-datastore + # google-cloud-firestore + # google-cloud-storage +google-api-python-client==2.118.0 + # via firebase-admin +google-auth==2.27.0 + # via + # google-api-core + # google-api-python-client + # google-auth-httplib2 + # google-cloud-core + # google-cloud-storage + # kubernetes +google-auth-httplib2==0.2.0 + # via google-api-python-client +google-cloud-bigquery[pandas]==3.12.0 + # via + # feast (setup.py) + # google-cloud-bigquery +google-cloud-bigquery-storage==2.24.0 + # via feast (setup.py) +google-cloud-bigquery-storage==2.24.0 + # via feast (setup.py) +google-cloud-bigtable==2.23.0 + # via feast (setup.py) +google-cloud-core==2.4.1 + # via + # google-cloud-bigquery + # google-cloud-bigtable + # google-cloud-datastore + # google-cloud-firestore + # google-cloud-storage +google-cloud-datastore==2.19.0 + # via feast (setup.py) +google-cloud-firestore==2.14.0 + # via firebase-admin +google-cloud-storage==2.14.0 + # via + # feast (setup.py) + # firebase-admin +google-crc32c==1.5.0 + # via + # google-cloud-storage + # google-resumable-media +google-resumable-media==2.7.0 + # via + # google-cloud-bigquery + # google-cloud-storage +googleapis-common-protos[grpc]==1.62.0 + # via + # feast (setup.py) + # google-api-core + # grpc-google-iam-v1 + # grpcio-status +great-expectations==0.18.8 + # via feast (setup.py) +greenlet==3.0.3 + # via sqlalchemy +grpc-google-iam-v1==0.13.0 + # via google-cloud-bigtable +grpcio==1.60.1 + # via + # feast (setup.py) + # google-api-core + # google-cloud-bigquery + # googleapis-common-protos + # grpc-google-iam-v1 + # grpcio-health-checking + # grpcio-reflection + # grpcio-status + # grpcio-testing + # grpcio-tools +grpcio-health-checking==1.60.1 + # via feast (setup.py) +grpcio-reflection==1.60.1 + # via feast (setup.py) +grpcio-status==1.60.1 + # via google-api-core +grpcio-testing==1.60.1 + # via feast (setup.py) +grpcio-tools==1.60.1 + # via feast (setup.py) +gunicorn==21.2.0 + # via feast (setup.py) +h11==0.14.0 + # via + # httpcore + # uvicorn +happybase==1.2.0 + # via feast (setup.py) +hazelcast-python-client==5.3.0 + # via feast (setup.py) +hiredis==2.3.2 + # via feast (setup.py) +httpcore==1.0.3 + # via httpx +httplib2==0.22.0 + # via + # google-api-python-client + # google-auth-httplib2 +httptools==0.6.1 + # via uvicorn +httpx==0.26.0 + # via + # feast (setup.py) + # jupyterlab +identify==2.5.34 + # via pre-commit +idna==3.6 + # via + # anyio + # httpx + # jsonschema + # requests + # snowflake-connector-python +imagesize==1.4.1 + # via sphinx +importlib-metadata==6.11.0 + # via + # dask + # feast (setup.py) +importlib-resources==6.1.1 + # via feast (setup.py) +iniconfig==2.0.0 + # via pytest +ipykernel==6.29.2 + # via jupyterlab +ipython==8.21.0 + # via + # great-expectations + # ipykernel + # ipywidgets +ipywidgets==8.1.2 + # via great-expectations +isodate==0.6.1 + # via azure-storage-blob +isoduration==20.11.0 + # via jsonschema +isort==5.13.2 + # via feast (setup.py) +jedi==0.19.1 + # via ipython +jinja2==3.1.3 + # via + # altair + # feast (setup.py) + # great-expectations + # jupyter-server + # jupyterlab + # jupyterlab-server + # moto + # nbconvert + # sphinx +jmespath==1.0.1 + # via + # boto3 + # botocore +json5==0.9.14 + # via jupyterlab-server +jsonpatch==1.33 + # via great-expectations +jsonpointer==2.4 + # via + # jsonpatch + # jsonschema +jsonschema[format-nongpl]==4.21.1 + # via + # altair + # feast (setup.py) + # great-expectations + # jupyter-events + # jupyterlab-server + # nbformat +jsonschema-specifications==2023.12.1 + # via jsonschema +jupyter-client==8.6.0 + # via + # ipykernel + # jupyter-server + # nbclient +jupyter-core==5.7.1 + # via + # ipykernel + # jupyter-client + # jupyter-server + # jupyterlab + # nbclient + # nbconvert + # nbformat +jupyter-events==0.9.0 + # via jupyter-server +jupyter-lsp==2.2.2 + # via jupyterlab +jupyter-server==2.12.5 + # via + # jupyter-lsp + # jupyterlab + # jupyterlab-server + # notebook + # notebook-shim +jupyter-server-terminals==0.5.2 + # via jupyter-server +jupyterlab==4.1.1 + # via notebook +jupyterlab-pygments==0.3.0 + # via nbconvert +jupyterlab-server==2.25.3 + # via + # jupyterlab + # notebook +jupyterlab-widgets==3.0.10 + # via ipywidgets +kubernetes==20.13.0 + # via feast (setup.py) +locket==1.0.0 + # via partd +makefun==1.15.2 + # via great-expectations +markupsafe==2.1.5 + # via + # jinja2 + # nbconvert + # werkzeug +marshmallow==3.20.2 + # via great-expectations +matplotlib-inline==0.1.6 + # via + # ipykernel + # ipython +mccabe==0.7.0 + # via flake8 +minio==7.1.0 + # via feast (setup.py) +mistune==3.0.2 + # via + # great-expectations + # nbconvert +mmh3==4.1.0 + # via feast (setup.py) +mock==2.0.0 + # via feast (setup.py) +moreorless==0.4.0 + # via bowler +moto==4.2.14 + # via feast (setup.py) +msal==1.26.0 + # via + # azure-identity + # msal-extensions +msal-extensions==1.1.0 + # via azure-identity +msgpack==1.0.7 + # via cachecontrol +multiprocess==0.70.16 + # via bytewax +mypy==1.8.0 + # via + # feast (setup.py) + # sqlalchemy +mypy-extensions==1.0.0 + # via + # black + # mypy +mypy-protobuf==3.1.0 + # via feast (setup.py) +nbclient==0.9.0 + # via nbconvert +nbconvert==7.16.0 + # via jupyter-server +nbformat==5.9.2 + # via + # great-expectations + # jupyter-server + # nbclient + # nbconvert +nest-asyncio==1.6.0 + # via ipykernel +nodeenv==1.8.0 + # via pre-commit +notebook==7.1.0 + # via great-expectations +notebook-shim==0.2.3 + # via + # jupyterlab + # notebook +numpy==1.24.4 + # via + # altair + # db-dtypes + # feast (setup.py) + # great-expectations + # pandas + # pandavro + # pyarrow + # scipy +oauthlib==3.2.2 + # via requests-oauthlib +overrides==7.7.0 + # via jupyter-server +packaging==23.2 + # via + # build + # dask + # db-dtypes + # deprecation + # docker + # google-cloud-bigquery + # great-expectations + # gunicorn + # ipykernel + # jupyter-server + # jupyterlab + # jupyterlab-server + # marshmallow + # msal-extensions + # nbconvert + # pytest + # snowflake-connector-python + # sphinx +pandas==1.5.3 + # via + # altair + # db-dtypes + # feast (setup.py) + # google-cloud-bigquery + # great-expectations + # pandavro + # snowflake-connector-python +pandavro==1.5.2 + # via feast (setup.py) +pandocfilters==1.5.1 + # via nbconvert +parso==0.8.3 + # via jedi +partd==1.4.1 + # via dask +pathspec==0.12.1 + # via black +pbr==6.0.0 + # via mock +pexpect==4.9.0 + # via ipython +pip-tools==7.3.0 + # via feast (setup.py) +platformdirs==3.11.0 + # via + # black + # jupyter-core + # snowflake-connector-python + # virtualenv +pluggy==1.4.0 + # via pytest +ply==3.11 + # via thriftpy2 +portalocker==2.8.2 + # via msal-extensions +pre-commit==3.3.1 + # via feast (setup.py) +prometheus-client==0.20.0 + # via jupyter-server +prompt-toolkit==3.0.43 + # via ipython +proto-plus==1.23.0 + # via + # feast (setup.py) + # google-cloud-bigquery + # google-cloud-bigquery-storage + # google-cloud-bigtable + # google-cloud-datastore + # google-cloud-firestore +protobuf==4.23.3 + # via + # feast (setup.py) + # google-api-core + # google-cloud-bigquery + # google-cloud-bigquery-storage + # google-cloud-bigtable + # google-cloud-datastore + # google-cloud-firestore + # googleapis-common-protos + # grpc-google-iam-v1 + # grpcio-health-checking + # grpcio-reflection + # grpcio-status + # grpcio-testing + # grpcio-tools + # mypy-protobuf + # proto-plus +psutil==5.9.0 + # via + # feast (setup.py) + # ipykernel +psycopg2-binary==2.9.9 + # via feast (setup.py) +ptyprocess==0.7.0 + # via + # pexpect + # terminado +pure-eval==0.2.2 + # via stack-data +py==1.11.0 + # via feast (setup.py) +py-cpuinfo==9.0.0 + # via pytest-benchmark +py4j==0.10.9.7 + # via pyspark +pyarrow==15.0.0 + # via + # db-dtypes + # feast (setup.py) + # google-cloud-bigquery + # snowflake-connector-python +pyasn1==0.5.1 + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.3.0 + # via google-auth +pybindgen==0.22.1 + # via feast (setup.py) +pycodestyle==2.10.0 + # via flake8 +pycparser==2.21 + # via cffi +pydantic==2.6.1 + # via + # fastapi + # feast (setup.py) + # great-expectations +pydantic-core==2.16.2 + # via pydantic +pyflakes==3.0.1 + # via flake8 +pygments==2.17.2 + # via + # feast (setup.py) + # ipython + # nbconvert + # sphinx +pyjwt[crypto]==2.8.0 + # via + # msal + # snowflake-connector-python +pymssql==2.2.11 + # via feast (setup.py) +pymysql==1.1.0 + # via feast (setup.py) +pyodbc==5.1.0 + # via feast (setup.py) +pyopenssl==23.3.0 + # via snowflake-connector-python +pyparsing==3.1.1 + # via + # great-expectations + # httplib2 +pyproject-hooks==1.0.0 + # via build +pyspark==3.5.0 + # via feast (setup.py) +pytest==7.4.4 + # via + # feast (setup.py) + # pytest-benchmark + # pytest-cov + # pytest-lazy-fixture + # pytest-mock + # pytest-ordering + # pytest-timeout + # pytest-xdist +pytest-benchmark==3.4.1 + # via feast (setup.py) +pytest-cov==4.1.0 + # via feast (setup.py) +pytest-lazy-fixture==0.6.3 + # via feast (setup.py) +pytest-mock==1.10.4 + # via feast (setup.py) +pytest-ordering==0.6 + # via feast (setup.py) +pytest-timeout==1.4.2 + # via feast (setup.py) +pytest-xdist==3.5.0 + # via feast (setup.py) +python-dateutil==2.8.2 + # via + # arrow + # botocore + # google-cloud-bigquery + # great-expectations + # jupyter-client + # kubernetes + # moto + # pandas + # rockset + # trino +python-dotenv==1.0.1 + # via uvicorn +python-json-logger==2.0.7 + # via jupyter-events +pytz==2024.1 + # via + # great-expectations + # pandas + # snowflake-connector-python + # trino +pyyaml==6.0.1 + # via + # dask + # feast (setup.py) + # jupyter-events + # kubernetes + # pre-commit + # responses + # uvicorn +pyzmq==25.1.2 + # via + # ipykernel + # jupyter-client + # jupyter-server +redis==4.6.0 + # via feast (setup.py) +referencing==0.33.0 + # via + # jsonschema + # jsonschema-specifications + # jupyter-events +regex==2023.12.25 + # via feast (setup.py) +requests==2.31.0 + # via + # azure-core + # cachecontrol + # docker + # feast (setup.py) + # google-api-core + # google-cloud-bigquery + # google-cloud-storage + # great-expectations + # jupyterlab-server + # kubernetes + # moto + # msal + # requests-oauthlib + # responses + # snowflake-connector-python + # sphinx + # trino +requests-oauthlib==1.3.1 + # via kubernetes +responses==0.25.0 + # via moto +rfc3339-validator==0.1.4 + # via + # jsonschema + # jupyter-events +rfc3986-validator==0.1.1 + # via + # jsonschema + # jupyter-events +rockset==2.1.0 + # via feast (setup.py) +rpds-py==0.18.0 + # via + # jsonschema + # referencing +rsa==4.9 + # via google-auth +ruamel-yaml==0.17.17 + # via great-expectations +s3transfer==0.10.0 + # via boto3 +scipy==1.12.0 + # via great-expectations +send2trash==1.8.2 + # via jupyter-server +six==1.16.0 + # via + # asttokens + # azure-core + # bleach + # geomet + # happybase + # isodate + # kubernetes + # mock + # pandavro + # python-dateutil + # rfc3339-validator + # thriftpy2 +sniffio==1.3.0 + # via + # anyio + # httpx +snowballstemmer==2.2.0 + # via sphinx +snowflake-connector-python[pandas]==3.7.0 + # via feast (setup.py) +sortedcontainers==2.4.0 + # via snowflake-connector-python +soupsieve==2.5 + # via beautifulsoup4 +sphinx==6.2.1 + # via feast (setup.py) +sphinxcontrib-applehelp==1.0.8 + # via sphinx +sphinxcontrib-devhelp==1.0.6 + # via sphinx +sphinxcontrib-htmlhelp==2.0.5 + # via sphinx +sphinxcontrib-jsmath==1.0.1 + # via sphinx +sphinxcontrib-qthelp==1.0.7 + # via sphinx +sphinxcontrib-serializinghtml==1.1.10 + # via sphinx +sqlalchemy[mypy]==1.4.51 + # via feast (setup.py) +sqlalchemy2-stubs==0.0.2a38 + # via sqlalchemy +stack-data==0.6.3 + # via ipython +starlette==0.36.3 + # via fastapi +tabulate==0.9.0 + # via feast (setup.py) +tenacity==8.2.3 + # via feast (setup.py) +terminado==0.18.0 + # via + # jupyter-server + # jupyter-server-terminals +testcontainers==3.7.1 + # via feast (setup.py) +thriftpy2==0.4.17 + # via happybase +tinycss2==1.2.1 + # via nbconvert +toml==0.10.2 + # via feast (setup.py) +tomli==2.0.1 + # via + # black + # build + # coverage + # jupyterlab + # mypy + # pip-tools + # pyproject-hooks + # pytest +tomlkit==0.12.3 + # via snowflake-connector-python +toolz==0.12.1 + # via + # altair + # dask + # partd +tornado==6.4 + # via + # ipykernel + # jupyter-client + # jupyter-server + # jupyterlab + # notebook + # terminado +tqdm==4.66.2 + # via + # feast (setup.py) + # great-expectations +traitlets==5.14.1 + # via + # comm + # ipykernel + # ipython + # ipywidgets + # jupyter-client + # jupyter-core + # jupyter-events + # jupyter-server + # jupyterlab + # matplotlib-inline + # nbclient + # nbconvert + # nbformat +trino==0.327.0 + # via feast (setup.py) +typeguard==4.1.5 + # via feast (setup.py) +types-protobuf==3.19.22 + # via + # feast (setup.py) + # mypy-protobuf +types-pymysql==1.1.0.1 + # via feast (setup.py) +types-pyopenssl==24.0.0.20240130 + # via types-redis +types-python-dateutil==2.8.19.20240106 + # via + # arrow + # feast (setup.py) +types-pytz==2024.1.0.20240203 + # via feast (setup.py) +types-pyyaml==6.0.12.12 + # via feast (setup.py) +types-redis==4.6.0.20240106 + # via feast (setup.py) +types-requests==2.30.0.0 + # via feast (setup.py) +types-setuptools==69.0.0.20240125 + # via feast (setup.py) +types-tabulate==0.9.0.20240106 + # via feast (setup.py) +types-urllib3==1.26.25.14 + # via types-requests +typing-extensions==4.9.0 + # via + # anyio + # async-lru + # azure-core + # azure-storage-blob + # fastapi + # great-expectations + # mypy + # pydantic + # pydantic-core + # snowflake-connector-python + # sqlalchemy2-stubs + # uvicorn +tzlocal==5.2 + # via + # great-expectations + # trino +uri-template==1.3.0 + # via jsonschema +uritemplate==4.1.1 + # via google-api-python-client +urllib3==1.26.18 + # via + # botocore + # docker + # feast (setup.py) + # great-expectations + # kubernetes + # minio + # requests + # responses + # rockset +uvicorn[standard]==0.27.1 + # via feast (setup.py) +uvloop==0.19.0 + # via uvicorn +virtualenv==20.23.0 + # via + # feast (setup.py) + # pre-commit +volatile==2.1.0 + # via bowler +watchfiles==0.21.0 + # via uvicorn +wcwidth==0.2.13 + # via prompt-toolkit +webcolors==1.13 + # via jsonschema +webencodings==0.5.1 + # via + # bleach + # tinycss2 +websocket-client==1.7.0 + # via + # jupyter-server + # kubernetes +websockets==12.0 + # via uvicorn +werkzeug==3.0.1 + # via moto +wheel==0.42.0 + # via pip-tools +widgetsnbextension==4.0.10 + # via ipywidgets +wrapt==1.16.0 + # via testcontainers +xmltodict==0.13.0 + # via moto +zipp==3.17.0 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# pip +# setuptools diff --git a/sdk/python/requirements/py3.11-requirements.txt b/sdk/python/requirements/py3.11-requirements.txt new file mode 100644 index 0000000000..3943662d01 --- /dev/null +++ b/sdk/python/requirements/py3.11-requirements.txt @@ -0,0 +1,234 @@ +# +# This file is autogenerated by pip-compile with Python 3.10 +# by the following command: +# +# pip-compile --output-file=sdk/python/requirements/py3.10-requirements.txt +# +annotated-types==0.6.0 + # via pydantic +anyio==4.2.0 + # via + # httpx + # starlette + # watchfiles +appdirs==1.4.4 + # via fissix +attrs==23.2.0 + # via + # bowler + # jsonschema + # referencing +bowler==0.9.0 + # via feast (setup.py) +certifi==2024.2.2 + # via + # httpcore + # httpx + # requests +charset-normalizer==3.3.2 + # via requests +click==8.1.7 + # via + # bowler + # dask + # feast (setup.py) + # moreorless + # uvicorn +cloudpickle==3.0.0 + # via dask +colorama==0.4.6 + # via feast (setup.py) +dask==2024.2.0 + # via feast (setup.py) +dill==0.3.8 + # via feast (setup.py) +exceptiongroup==1.2.0 + # via anyio +fastapi==0.109.2 + # via feast (setup.py) +fastavro==1.9.4 + # via + # feast (setup.py) + # pandavro +fissix==21.11.13 + # via bowler +fsspec==2024.2.0 + # via dask +greenlet==3.0.3 + # via sqlalchemy +grpcio==1.60.1 + # via + # feast (setup.py) + # grpcio-health-checking + # grpcio-reflection + # grpcio-tools +grpcio-health-checking==1.60.1 + # via feast (setup.py) +grpcio-reflection==1.60.1 + # via feast (setup.py) +grpcio-tools==1.60.1 + # via feast (setup.py) +gunicorn==21.2.0 + # via feast (setup.py) +h11==0.14.0 + # via + # httpcore + # uvicorn +httpcore==1.0.3 + # via httpx +httptools==0.6.1 + # via uvicorn +httpx==0.26.0 + # via feast (setup.py) +idna==3.6 + # via + # anyio + # httpx + # requests +importlib-metadata==6.11.0 + # via + # dask + # feast (setup.py) +importlib-resources==6.1.1 + # via feast (setup.py) +jinja2==3.1.3 + # via feast (setup.py) +jsonschema==4.21.1 + # via feast (setup.py) +jsonschema-specifications==2023.12.1 + # via jsonschema +locket==1.0.0 + # via partd +markupsafe==2.1.5 + # via jinja2 +mmh3==4.1.0 + # via feast (setup.py) +moreorless==0.4.0 + # via bowler +mypy==1.8.0 + # via sqlalchemy +mypy-extensions==1.0.0 + # via mypy +mypy-protobuf==3.1.0 + # via feast (setup.py) +numpy==1.24.4 + # via + # feast (setup.py) + # pandas + # pandavro + # pyarrow +packaging==23.2 + # via + # dask + # gunicorn +pandas==1.5.3 + # via + # feast (setup.py) + # pandavro +pandavro==1.5.2 + # via feast (setup.py) +partd==1.4.1 + # via dask +proto-plus==1.23.0 + # via feast (setup.py) +protobuf==4.23.3 + # via + # feast (setup.py) + # grpcio-health-checking + # grpcio-reflection + # grpcio-tools + # mypy-protobuf + # proto-plus +pyarrow==15.0.0 + # via feast (setup.py) +pydantic==2.6.1 + # via + # fastapi + # feast (setup.py) +pydantic-core==2.16.2 + # via pydantic +pygments==2.17.2 + # via feast (setup.py) +python-dateutil==2.8.2 + # via pandas +python-dotenv==1.0.1 + # via uvicorn +pytz==2024.1 + # via pandas +pyyaml==6.0.1 + # via + # dask + # feast (setup.py) + # uvicorn +referencing==0.33.0 + # via + # jsonschema + # jsonschema-specifications +requests==2.31.0 + # via feast (setup.py) +rpds-py==0.18.0 + # via + # jsonschema + # referencing +six==1.16.0 + # via + # pandavro + # python-dateutil +sniffio==1.3.0 + # via + # anyio + # httpx +sqlalchemy[mypy]==1.4.51 + # via + # feast (setup.py) + # sqlalchemy +sqlalchemy2-stubs==0.0.2a38 + # via sqlalchemy +starlette==0.36.3 + # via fastapi +tabulate==0.9.0 + # via feast (setup.py) +tenacity==8.2.3 + # via feast (setup.py) +toml==0.10.2 + # via feast (setup.py) +tomli==2.0.1 + # via mypy +toolz==0.12.1 + # via + # dask + # partd +tqdm==4.66.2 + # via feast (setup.py) +typeguard==4.1.5 + # via feast (setup.py) +types-protobuf==4.24.0.20240129 + # via mypy-protobuf +typing-extensions==4.9.0 + # via + # anyio + # fastapi + # mypy + # pydantic + # pydantic-core + # sqlalchemy2-stubs + # uvicorn +urllib3==2.2.0 + # via requests +uvicorn[standard]==0.27.1 + # via + # feast (setup.py) + # uvicorn +uvloop==0.19.0 + # via uvicorn +volatile==2.1.0 + # via bowler +watchfiles==0.21.0 + # via uvicorn +websockets==12.0 + # via uvicorn +zipp==3.17.0 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/setup.py b/setup.py index c14d64557a..818b9d66b1 100644 --- a/setup.py +++ b/setup.py @@ -58,7 +58,7 @@ # For some reason pandavro higher than 1.5.* only support pandas less than 1.3. "pandavro~=1.5.0", # Higher than 4.23.4 seems to cause a seg fault - "protobuf<4.23.4,>3.20", + "protobuf<=4.23.4,>3.20", "proto-plus>=1.20.0,<2", "pyarrow>=4", "pydantic>=2.0.0", @@ -100,7 +100,7 @@ AWS_REQUIRED = ["boto3>=1.17.0,<2", "docker>=5.0.2", "fsspec<=2024.1.0"] -BYTEWAX_REQUIRED = ["bytewax==0.15.1", "docker>=5.0.2", "kubernetes<=20.13.0"] +BYTEWAX_REQUIRED = ["bytewax==0.18.2", "docker>=5.0.2", "kubernetes<=20.13.0"] SNOWFLAKE_REQUIRED = [ "snowflake-connector-python[pandas]>=3,<4", From 0a9fae8fd42e7348365ef902038f3f71f977ef3e Mon Sep 17 00:00:00 2001 From: Tornike Gurgenidze Date: Sat, 24 Feb 2024 12:22:46 +0400 Subject: [PATCH 043/122] feat: Decouple transformation types from ODFVs (#3949) * decouple transformation from odfvs Signed-off-by: tokoko * OnDemandFeatureView: keep udf and udf_string parameters for backwards compatibility Signed-off-by: tokoko * fix linting issues Signed-off-by: tokoko * remove unused import in registry protos Signed-off-by: tokoko --------- Signed-off-by: tokoko --- protos/feast/core/OnDemandFeatureView.proto | 4 +- protos/feast/registry/RegistryServer.proto | 1 - .../feast/infra/registry/base_registry.py | 2 +- sdk/python/feast/on_demand_feature_view.py | 81 +++++++++++-------- .../feast/on_demand_pandas_transformation.py | 56 +++++++++++++ .../feature_repos/universal/feature_views.py | 11 ++- .../tests/unit/test_on_demand_feature_view.py | 36 +++++++-- 7 files changed, 145 insertions(+), 46 deletions(-) create mode 100644 sdk/python/feast/on_demand_pandas_transformation.py diff --git a/protos/feast/core/OnDemandFeatureView.proto b/protos/feast/core/OnDemandFeatureView.proto index 50bf8b6f55..741d46e39e 100644 --- a/protos/feast/core/OnDemandFeatureView.proto +++ b/protos/feast/core/OnDemandFeatureView.proto @@ -48,7 +48,9 @@ message OnDemandFeatureViewSpec { // Map of sources for this feature view. map sources = 4; - UserDefinedFunction user_defined_function = 5; + oneof transformation { + UserDefinedFunction user_defined_function = 5; + } // Description of the on demand feature view. string description = 6; diff --git a/protos/feast/registry/RegistryServer.proto b/protos/feast/registry/RegistryServer.proto index 3e7773e89a..ab324f9bd1 100644 --- a/protos/feast/registry/RegistryServer.proto +++ b/protos/feast/registry/RegistryServer.proto @@ -2,7 +2,6 @@ syntax = "proto3"; package feast.registry; -import "google/protobuf/timestamp.proto"; import "google/protobuf/empty.proto"; import "feast/core/Registry.proto"; import "feast/core/Entity.proto"; diff --git a/sdk/python/feast/infra/registry/base_registry.py b/sdk/python/feast/infra/registry/base_registry.py index f23a820d23..d0ab74812e 100644 --- a/sdk/python/feast/infra/registry/base_registry.py +++ b/sdk/python/feast/infra/registry/base_registry.py @@ -665,7 +665,7 @@ def to_dict(self, project: str) -> Dict[str, List[Any]]: odfv_dict["spec"]["userDefinedFunction"][ "body" - ] = on_demand_feature_view.udf_string + ] = on_demand_feature_view.transformation.udf_string registry_dict["onDemandFeatureViews"].append(odfv_dict) for request_feature_view in sorted( self.list_request_feature_views(project=project), diff --git a/sdk/python/feast/on_demand_feature_view.py b/sdk/python/feast/on_demand_feature_view.py index fcafeaa2bc..706f2ec4e4 100644 --- a/sdk/python/feast/on_demand_feature_view.py +++ b/sdk/python/feast/on_demand_feature_view.py @@ -16,6 +16,7 @@ from feast.feature_view import FeatureView from feast.feature_view_projection import FeatureViewProjection from feast.field import Field, from_value_type +from feast.on_demand_pandas_transformation import OnDemandPandasTransformation from feast.protos.feast.core.OnDemandFeatureView_pb2 import ( OnDemandFeatureView as OnDemandFeatureViewProto, ) @@ -24,9 +25,6 @@ OnDemandFeatureViewSpec, OnDemandSource, ) -from feast.protos.feast.core.OnDemandFeatureView_pb2 import ( - UserDefinedFunction as UserDefinedFunctionProto, -) from feast.type_map import ( feast_value_type_to_pandas_type, python_type_to_feast_value_type, @@ -51,8 +49,7 @@ class OnDemandFeatureView(BaseFeatureView): sources with type FeatureViewProjection. source_request_sources: A map from input source names to the actual input sources with type RequestSource. - udf: The user defined transformation function, which must take pandas dataframes - as inputs. + transformation: The user defined transformation. description: A human-readable description. tags: A dictionary of key-value pairs to store arbitrary metadata. owner: The owner of the on demand feature view, typically the email of the primary @@ -63,8 +60,7 @@ class OnDemandFeatureView(BaseFeatureView): features: List[Field] source_feature_view_projections: Dict[str, FeatureViewProjection] source_request_sources: Dict[str, RequestSource] - udf: FunctionType - udf_string: str + transformation: Union[OnDemandPandasTransformation] description: str tags: Dict[str, str] owner: str @@ -82,8 +78,9 @@ def __init__( # noqa: C901 FeatureViewProjection, ] ], - udf: FunctionType, + udf: Optional[FunctionType] = None, udf_string: str = "", + transformation: Optional[Union[OnDemandPandasTransformation]] = None, description: str = "", tags: Optional[Dict[str, str]] = None, owner: str = "", @@ -98,9 +95,10 @@ def __init__( # noqa: C901 sources: A map from input source names to the actual input sources, which may be feature views, or request data sources. These sources serve as inputs to the udf, which will refer to them by name. - udf: The user defined transformation function, which must take pandas + udf (deprecated): The user defined transformation function, which must take pandas dataframes as inputs. - udf_string: The source code version of the udf (for diffing and displaying in Web UI) + udf_string (deprecated): The source code version of the udf (for diffing and displaying in Web UI) + transformation: The user defined transformation. description (optional): A human-readable description. tags (optional): A dictionary of key-value pairs to store arbitrary metadata. owner (optional): The owner of the on demand feature view, typically the email @@ -114,6 +112,18 @@ def __init__( # noqa: C901 owner=owner, ) + if not transformation: + if udf: + warnings.warn( + "udf and udf_string parameters are deprecated. Please use transformation=OnDemandPandasTransformation(udf, udf_string) instead.", + DeprecationWarning, + ) + transformation = OnDemandPandasTransformation(udf, udf_string) + else: + raise Exception( + "OnDemandFeatureView needs to be initialized with either transformation or udf arguments" + ) + self.source_feature_view_projections: Dict[str, FeatureViewProjection] = {} self.source_request_sources: Dict[str, RequestSource] = {} for odfv_source in sources: @@ -126,8 +136,7 @@ def __init__( # noqa: C901 odfv_source.name ] = odfv_source.projection - self.udf = udf # type: ignore - self.udf_string = udf_string + self.transformation = transformation @property def proto_class(self) -> Type[OnDemandFeatureViewProto]: @@ -139,8 +148,7 @@ def __copy__(self): schema=self.features, sources=list(self.source_feature_view_projections.values()) + list(self.source_request_sources.values()), - udf=self.udf, - udf_string=self.udf_string, + transformation=self.transformation, description=self.description, tags=self.tags, owner=self.owner, @@ -161,8 +169,7 @@ def __eq__(self, other): self.source_feature_view_projections != other.source_feature_view_projections or self.source_request_sources != other.source_request_sources - or self.udf_string != other.udf_string - or self.udf.__code__.co_code != other.udf.__code__.co_code + or self.transformation != other.transformation ): return False @@ -200,11 +207,9 @@ def to_proto(self) -> OnDemandFeatureViewProto: name=self.name, features=[feature.to_proto() for feature in self.features], sources=sources, - user_defined_function=UserDefinedFunctionProto( - name=self.udf.__name__, - body=dill.dumps(self.udf, recurse=True), - body_text=self.udf_string, - ), + user_defined_function=self.transformation.to_proto() + if type(self.transformation) == OnDemandPandasTransformation + else None, description=self.description, tags=self.tags, owner=self.owner, @@ -243,6 +248,16 @@ def from_proto(cls, on_demand_feature_view_proto: OnDemandFeatureViewProto): RequestSource.from_proto(on_demand_source.request_data_source) ) + if ( + on_demand_feature_view_proto.spec.WhichOneof("transformation") + == "user_defined_function" + ): + transformation = OnDemandPandasTransformation.from_proto( + on_demand_feature_view_proto.spec.user_defined_function + ) + else: + raise Exception("At least one transformation type needs to be provided") + on_demand_feature_view_obj = cls( name=on_demand_feature_view_proto.spec.name, schema=[ @@ -253,10 +268,7 @@ def from_proto(cls, on_demand_feature_view_proto: OnDemandFeatureViewProto): for feature in on_demand_feature_view_proto.spec.features ], sources=sources, - udf=dill.loads( - on_demand_feature_view_proto.spec.user_defined_function.body - ), - udf_string=on_demand_feature_view_proto.spec.user_defined_function.body_text, + transformation=transformation, description=on_demand_feature_view_proto.spec.description, tags=dict(on_demand_feature_view_proto.spec.tags), owner=on_demand_feature_view_proto.spec.owner, @@ -315,7 +327,8 @@ def get_transformed_features_df( columns_to_cleanup.append(full_feature_ref) # Compute transformed values and apply to each result row - df_with_transformed_features = self.udf.__call__(df_with_features) + + df_with_transformed_features = self.transformation.transform(df_with_features) # Work out whether the correct columns names are used. rename_columns: Dict[str, str] = {} @@ -335,7 +348,7 @@ def get_transformed_features_df( df_with_features.drop(columns=columns_to_cleanup, inplace=True) return df_with_transformed_features.rename(columns=rename_columns) - def infer_features(self): + def infer_features(self) -> None: """ Infers the set of features associated to this feature view from the input source. @@ -365,7 +378,7 @@ def infer_features(self): dtype = feast_value_type_to_pandas_type(field.dtype.to_value_type()) sample_val = rand_df_value[dtype] if dtype in rand_df_value else None df[f"{field.name}"] = pd.Series(sample_val, dtype=dtype) - output_df: pd.DataFrame = self.udf.__call__(df) + output_df: pd.DataFrame = self.transformation.transform(df) inferred_features = [] for f, dt in zip(output_df.columns, output_df.dtypes): inferred_features.append( @@ -396,7 +409,9 @@ def infer_features(self): ) @staticmethod - def get_requested_odfvs(feature_refs, project, registry): + def get_requested_odfvs( + feature_refs, project, registry + ) -> List["OnDemandFeatureView"]: all_on_demand_feature_views = registry.list_on_demand_feature_views( project, allow_cache=True ) @@ -438,7 +453,7 @@ def on_demand_feature_view( of the primary maintainer. """ - def mainify(obj): + def mainify(obj) -> None: # Needed to allow dill to properly serialize the udf. Otherwise, clients will need to have a file with the same # name as the original file defining the ODFV. if obj.__module__ != "__main__": @@ -447,15 +462,17 @@ def mainify(obj): def decorator(user_function): udf_string = dill.source.getsource(user_function) mainify(user_function) + + transformation = OnDemandPandasTransformation(user_function, udf_string) + on_demand_feature_view_obj = OnDemandFeatureView( name=user_function.__name__, sources=sources, schema=schema, - udf=user_function, + transformation=transformation, description=description, tags=tags, owner=owner, - udf_string=udf_string, ) functools.update_wrapper( wrapper=on_demand_feature_view_obj, wrapped=user_function diff --git a/sdk/python/feast/on_demand_pandas_transformation.py b/sdk/python/feast/on_demand_pandas_transformation.py new file mode 100644 index 0000000000..52d45893c5 --- /dev/null +++ b/sdk/python/feast/on_demand_pandas_transformation.py @@ -0,0 +1,56 @@ +from types import FunctionType + +import dill +import pandas as pd + +from feast.protos.feast.core.OnDemandFeatureView_pb2 import ( + UserDefinedFunction as UserDefinedFunctionProto, +) + + +class OnDemandPandasTransformation: + def __init__(self, udf: FunctionType, udf_string: str = ""): + """ + Creates an OnDemandPandasTransformation object. + + Args: + udf: The user defined transformation function, which must take pandas + dataframes as inputs. + udf_string: The source code version of the udf (for diffing and displaying in Web UI) + """ + self.udf = udf + self.udf_string = udf_string + + def transform(self, df: pd.DataFrame) -> pd.DataFrame: + return self.udf.__call__(df) + + def __eq__(self, other): + if not isinstance(other, OnDemandPandasTransformation): + raise TypeError( + "Comparisons should only involve OnDemandPandasTransformation class objects." + ) + + if not super().__eq__(other): + return False + + if ( + self.udf_string != other.udf_string + or self.udf.__code__.co_code != other.udf.__code__.co_code + ): + return False + + return True + + def to_proto(self) -> UserDefinedFunctionProto: + return UserDefinedFunctionProto( + name=self.udf.__name__, + body=dill.dumps(self.udf, recurse=True), + body_text=self.udf_string, + ) + + @classmethod + def from_proto(cls, user_defined_function_proto: UserDefinedFunctionProto): + return OnDemandPandasTransformation( + udf=dill.loads(user_defined_function_proto.body), + udf_string=user_defined_function_proto.body_text, + ) diff --git a/sdk/python/tests/integration/feature_repos/universal/feature_views.py b/sdk/python/tests/integration/feature_repos/universal/feature_views.py index 9bb8aae77f..421ef41601 100644 --- a/sdk/python/tests/integration/feature_repos/universal/feature_views.py +++ b/sdk/python/tests/integration/feature_repos/universal/feature_views.py @@ -15,6 +15,7 @@ ) from feast.data_source import DataSource, RequestSource from feast.feature_view_projection import FeatureViewProjection +from feast.on_demand_feature_view import OnDemandPandasTransformation from feast.types import Array, FeastType, Float32, Float64, Int32, Int64 from tests.integration.feature_repos.universal.entities import ( customer, @@ -70,8 +71,9 @@ def conv_rate_plus_100_feature_view( name=conv_rate_plus_100.__name__, schema=[] if infer_features else _features, sources=sources, - udf=conv_rate_plus_100, - udf_string="raw udf source", + transformation=OnDemandPandasTransformation( + udf=conv_rate_plus_100, udf_string="raw udf source" + ), ) @@ -108,8 +110,9 @@ def similarity_feature_view( name=similarity.__name__, sources=sources, schema=[] if infer_features else _fields, - udf=similarity, - udf_string="similarity raw udf", + transformation=OnDemandPandasTransformation( + udf=similarity, udf_string="similarity raw udf" + ), ) diff --git a/sdk/python/tests/unit/test_on_demand_feature_view.py b/sdk/python/tests/unit/test_on_demand_feature_view.py index ca8e7b25cb..721026ea46 100644 --- a/sdk/python/tests/unit/test_on_demand_feature_view.py +++ b/sdk/python/tests/unit/test_on_demand_feature_view.py @@ -17,7 +17,10 @@ from feast.feature_view import FeatureView from feast.field import Field from feast.infra.offline_stores.file_source import FileSource -from feast.on_demand_feature_view import OnDemandFeatureView +from feast.on_demand_feature_view import ( + OnDemandFeatureView, + OnDemandPandasTransformation, +) from feast.types import Float32 @@ -54,8 +57,9 @@ def test_hash(): Field(name="output1", dtype=Float32), Field(name="output2", dtype=Float32), ], - udf=udf1, - udf_string="udf1 source code", + transformation=OnDemandPandasTransformation( + udf=udf1, udf_string="udf1 source code" + ), ) on_demand_feature_view_2 = OnDemandFeatureView( name="my-on-demand-feature-view", @@ -64,8 +68,9 @@ def test_hash(): Field(name="output1", dtype=Float32), Field(name="output2", dtype=Float32), ], - udf=udf1, - udf_string="udf1 source code", + transformation=OnDemandPandasTransformation( + udf=udf1, udf_string="udf1 source code" + ), ) on_demand_feature_view_3 = OnDemandFeatureView( name="my-on-demand-feature-view", @@ -74,10 +79,23 @@ def test_hash(): Field(name="output1", dtype=Float32), Field(name="output2", dtype=Float32), ], - udf=udf2, - udf_string="udf2 source code", + transformation=OnDemandPandasTransformation( + udf=udf2, udf_string="udf2 source code" + ), ) on_demand_feature_view_4 = OnDemandFeatureView( + name="my-on-demand-feature-view", + sources=sources, + schema=[ + Field(name="output1", dtype=Float32), + Field(name="output2", dtype=Float32), + ], + transformation=OnDemandPandasTransformation( + udf=udf2, udf_string="udf2 source code" + ), + description="test", + ) + on_demand_feature_view_5 = OnDemandFeatureView( name="my-on-demand-feature-view", sources=sources, schema=[ @@ -105,3 +123,7 @@ def test_hash(): on_demand_feature_view_4, } assert len(s4) == 3 + + assert on_demand_feature_view_5.transformation == OnDemandPandasTransformation( + udf2, "udf2 source code" + ) From d3c68fb8646b29032cb67b8c8e6a8c0aa7a821c7 Mon Sep 17 00:00:00 2001 From: Francisco Javier Arceo Date: Sat, 24 Feb 2024 15:16:09 -0500 Subject: [PATCH 044/122] feat: Revert updating dependencies so that feast can be run on 3.11. (#3968) Revert "feat: Updating dependencies so that feast can be run on 3.11. (#3958)" This reverts commit 59639dbb0272aacd2201cb5f65b01445013db6e6. Signed-off-by: franciscojavierarceo --- .../requirements/py3.11-ci-requirements.txt | 1000 ----------------- .../requirements/py3.11-requirements.txt | 234 ---- setup.py | 4 +- 3 files changed, 2 insertions(+), 1236 deletions(-) delete mode 100644 sdk/python/requirements/py3.11-ci-requirements.txt delete mode 100644 sdk/python/requirements/py3.11-requirements.txt diff --git a/sdk/python/requirements/py3.11-ci-requirements.txt b/sdk/python/requirements/py3.11-ci-requirements.txt deleted file mode 100644 index bc9ddbe8e7..0000000000 --- a/sdk/python/requirements/py3.11-ci-requirements.txt +++ /dev/null @@ -1,1000 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile --extra=ci --output-file=sdk/python/requirements/py3.10-ci-requirements.txt -# -alabaster==0.7.16 - # via sphinx -altair==4.2.2 - # via great-expectations -annotated-types==0.6.0 - # via pydantic -anyio==4.2.0 - # via - # httpx - # jupyter-server - # starlette - # watchfiles -appdirs==1.4.4 - # via fissix -argon2-cffi==23.1.0 - # via jupyter-server -argon2-cffi-bindings==21.2.0 - # via argon2-cffi -arrow==1.3.0 - # via isoduration -asn1crypto==1.5.1 - # via snowflake-connector-python -assertpy==1.1 - # via feast (setup.py) -asttokens==2.4.1 - # via stack-data -async-lru==2.0.4 - # via jupyterlab -async-timeout==4.0.3 - # via redis -attrs==23.2.0 - # via - # bowler - # jsonschema - # referencing -avro==1.10.0 - # via feast (setup.py) -azure-core==1.30.0 - # via - # azure-identity - # azure-storage-blob -azure-identity==1.15.0 - # via feast (setup.py) -azure-storage-blob==12.19.0 - # via feast (setup.py) -babel==2.14.0 - # via - # jupyterlab-server - # sphinx -beautifulsoup4==4.12.3 - # via nbconvert -black==22.12.0 - # via feast (setup.py) -bleach==6.1.0 - # via nbconvert -boto3==1.34.42 - # via - # feast (setup.py) - # moto -botocore==1.34.42 - # via - # boto3 - # moto - # s3transfer -bowler==0.9.0 - # via feast (setup.py) -build==1.0.3 - # via - # feast (setup.py) - # pip-tools -bytewax==0.18.2 - # via feast (setup.py) -cachecontrol==0.14.0 - # via firebase-admin -cachetools==5.3.2 - # via google-auth -cassandra-driver==3.29.0 - # via feast (setup.py) -certifi==2024.2.2 - # via - # httpcore - # httpx - # kubernetes - # minio - # requests - # snowflake-connector-python -cffi==1.16.0 - # via - # argon2-cffi-bindings - # cryptography - # snowflake-connector-python -cfgv==3.4.0 - # via pre-commit -charset-normalizer==3.3.2 - # via - # requests - # snowflake-connector-python -click==8.1.7 - # via - # black - # bowler - # dask - # feast (setup.py) - # geomet - # great-expectations - # moreorless - # pip-tools - # uvicorn -cloudpickle==3.0.0 - # via dask -colorama==0.4.6 - # via - # feast (setup.py) - # great-expectations -comm==0.2.1 - # via - # ipykernel - # ipywidgets -coverage[toml]==7.4.1 - # via pytest-cov -cryptography==41.0.7 - # via - # azure-identity - # azure-storage-blob - # feast (setup.py) - # great-expectations - # moto - # msal - # pyjwt - # pyopenssl - # snowflake-connector-python - # types-pyopenssl - # types-redis -dask==2024.2.0 - # via feast (setup.py) -db-dtypes==1.2.0 - # via google-cloud-bigquery -debugpy==1.8.1 - # via ipykernel -decorator==5.1.1 - # via ipython -defusedxml==0.7.1 - # via nbconvert -deprecation==2.1.0 - # via testcontainers -dill==0.3.8 - # via - # bytewax - # feast (setup.py) - # multiprocess -distlib==0.3.8 - # via virtualenv -docker==7.0.0 - # via - # feast (setup.py) - # testcontainers -docutils==0.19 - # via sphinx -entrypoints==0.4 - # via altair -exceptiongroup==1.2.0 - # via - # anyio - # ipython - # pytest -execnet==2.0.2 - # via pytest-xdist -executing==2.0.1 - # via stack-data -fastapi==0.109.2 - # via feast (setup.py) -fastavro==1.9.4 - # via - # feast (setup.py) - # pandavro -fastjsonschema==2.19.1 - # via nbformat -filelock==3.13.1 - # via - # snowflake-connector-python - # virtualenv -firebase-admin==5.4.0 - # via feast (setup.py) -fissix==21.11.13 - # via bowler -flake8==6.0.0 - # via feast (setup.py) -fqdn==1.5.1 - # via jsonschema -fsspec==2023.12.2 - # via - # dask - # feast (setup.py) -geojson==2.5.0 - # via rockset -geomet==0.2.1.post1 - # via cassandra-driver -google-api-core[grpc]==2.17.1 - # via - # feast (setup.py) - # firebase-admin - # google-api-python-client - # google-cloud-bigquery - # google-cloud-bigquery-storage - # google-cloud-bigtable - # google-cloud-core - # google-cloud-datastore - # google-cloud-firestore - # google-cloud-storage -google-api-python-client==2.118.0 - # via firebase-admin -google-auth==2.27.0 - # via - # google-api-core - # google-api-python-client - # google-auth-httplib2 - # google-cloud-core - # google-cloud-storage - # kubernetes -google-auth-httplib2==0.2.0 - # via google-api-python-client -google-cloud-bigquery[pandas]==3.12.0 - # via - # feast (setup.py) - # google-cloud-bigquery -google-cloud-bigquery-storage==2.24.0 - # via feast (setup.py) -google-cloud-bigquery-storage==2.24.0 - # via feast (setup.py) -google-cloud-bigtable==2.23.0 - # via feast (setup.py) -google-cloud-core==2.4.1 - # via - # google-cloud-bigquery - # google-cloud-bigtable - # google-cloud-datastore - # google-cloud-firestore - # google-cloud-storage -google-cloud-datastore==2.19.0 - # via feast (setup.py) -google-cloud-firestore==2.14.0 - # via firebase-admin -google-cloud-storage==2.14.0 - # via - # feast (setup.py) - # firebase-admin -google-crc32c==1.5.0 - # via - # google-cloud-storage - # google-resumable-media -google-resumable-media==2.7.0 - # via - # google-cloud-bigquery - # google-cloud-storage -googleapis-common-protos[grpc]==1.62.0 - # via - # feast (setup.py) - # google-api-core - # grpc-google-iam-v1 - # grpcio-status -great-expectations==0.18.8 - # via feast (setup.py) -greenlet==3.0.3 - # via sqlalchemy -grpc-google-iam-v1==0.13.0 - # via google-cloud-bigtable -grpcio==1.60.1 - # via - # feast (setup.py) - # google-api-core - # google-cloud-bigquery - # googleapis-common-protos - # grpc-google-iam-v1 - # grpcio-health-checking - # grpcio-reflection - # grpcio-status - # grpcio-testing - # grpcio-tools -grpcio-health-checking==1.60.1 - # via feast (setup.py) -grpcio-reflection==1.60.1 - # via feast (setup.py) -grpcio-status==1.60.1 - # via google-api-core -grpcio-testing==1.60.1 - # via feast (setup.py) -grpcio-tools==1.60.1 - # via feast (setup.py) -gunicorn==21.2.0 - # via feast (setup.py) -h11==0.14.0 - # via - # httpcore - # uvicorn -happybase==1.2.0 - # via feast (setup.py) -hazelcast-python-client==5.3.0 - # via feast (setup.py) -hiredis==2.3.2 - # via feast (setup.py) -httpcore==1.0.3 - # via httpx -httplib2==0.22.0 - # via - # google-api-python-client - # google-auth-httplib2 -httptools==0.6.1 - # via uvicorn -httpx==0.26.0 - # via - # feast (setup.py) - # jupyterlab -identify==2.5.34 - # via pre-commit -idna==3.6 - # via - # anyio - # httpx - # jsonschema - # requests - # snowflake-connector-python -imagesize==1.4.1 - # via sphinx -importlib-metadata==6.11.0 - # via - # dask - # feast (setup.py) -importlib-resources==6.1.1 - # via feast (setup.py) -iniconfig==2.0.0 - # via pytest -ipykernel==6.29.2 - # via jupyterlab -ipython==8.21.0 - # via - # great-expectations - # ipykernel - # ipywidgets -ipywidgets==8.1.2 - # via great-expectations -isodate==0.6.1 - # via azure-storage-blob -isoduration==20.11.0 - # via jsonschema -isort==5.13.2 - # via feast (setup.py) -jedi==0.19.1 - # via ipython -jinja2==3.1.3 - # via - # altair - # feast (setup.py) - # great-expectations - # jupyter-server - # jupyterlab - # jupyterlab-server - # moto - # nbconvert - # sphinx -jmespath==1.0.1 - # via - # boto3 - # botocore -json5==0.9.14 - # via jupyterlab-server -jsonpatch==1.33 - # via great-expectations -jsonpointer==2.4 - # via - # jsonpatch - # jsonschema -jsonschema[format-nongpl]==4.21.1 - # via - # altair - # feast (setup.py) - # great-expectations - # jupyter-events - # jupyterlab-server - # nbformat -jsonschema-specifications==2023.12.1 - # via jsonschema -jupyter-client==8.6.0 - # via - # ipykernel - # jupyter-server - # nbclient -jupyter-core==5.7.1 - # via - # ipykernel - # jupyter-client - # jupyter-server - # jupyterlab - # nbclient - # nbconvert - # nbformat -jupyter-events==0.9.0 - # via jupyter-server -jupyter-lsp==2.2.2 - # via jupyterlab -jupyter-server==2.12.5 - # via - # jupyter-lsp - # jupyterlab - # jupyterlab-server - # notebook - # notebook-shim -jupyter-server-terminals==0.5.2 - # via jupyter-server -jupyterlab==4.1.1 - # via notebook -jupyterlab-pygments==0.3.0 - # via nbconvert -jupyterlab-server==2.25.3 - # via - # jupyterlab - # notebook -jupyterlab-widgets==3.0.10 - # via ipywidgets -kubernetes==20.13.0 - # via feast (setup.py) -locket==1.0.0 - # via partd -makefun==1.15.2 - # via great-expectations -markupsafe==2.1.5 - # via - # jinja2 - # nbconvert - # werkzeug -marshmallow==3.20.2 - # via great-expectations -matplotlib-inline==0.1.6 - # via - # ipykernel - # ipython -mccabe==0.7.0 - # via flake8 -minio==7.1.0 - # via feast (setup.py) -mistune==3.0.2 - # via - # great-expectations - # nbconvert -mmh3==4.1.0 - # via feast (setup.py) -mock==2.0.0 - # via feast (setup.py) -moreorless==0.4.0 - # via bowler -moto==4.2.14 - # via feast (setup.py) -msal==1.26.0 - # via - # azure-identity - # msal-extensions -msal-extensions==1.1.0 - # via azure-identity -msgpack==1.0.7 - # via cachecontrol -multiprocess==0.70.16 - # via bytewax -mypy==1.8.0 - # via - # feast (setup.py) - # sqlalchemy -mypy-extensions==1.0.0 - # via - # black - # mypy -mypy-protobuf==3.1.0 - # via feast (setup.py) -nbclient==0.9.0 - # via nbconvert -nbconvert==7.16.0 - # via jupyter-server -nbformat==5.9.2 - # via - # great-expectations - # jupyter-server - # nbclient - # nbconvert -nest-asyncio==1.6.0 - # via ipykernel -nodeenv==1.8.0 - # via pre-commit -notebook==7.1.0 - # via great-expectations -notebook-shim==0.2.3 - # via - # jupyterlab - # notebook -numpy==1.24.4 - # via - # altair - # db-dtypes - # feast (setup.py) - # great-expectations - # pandas - # pandavro - # pyarrow - # scipy -oauthlib==3.2.2 - # via requests-oauthlib -overrides==7.7.0 - # via jupyter-server -packaging==23.2 - # via - # build - # dask - # db-dtypes - # deprecation - # docker - # google-cloud-bigquery - # great-expectations - # gunicorn - # ipykernel - # jupyter-server - # jupyterlab - # jupyterlab-server - # marshmallow - # msal-extensions - # nbconvert - # pytest - # snowflake-connector-python - # sphinx -pandas==1.5.3 - # via - # altair - # db-dtypes - # feast (setup.py) - # google-cloud-bigquery - # great-expectations - # pandavro - # snowflake-connector-python -pandavro==1.5.2 - # via feast (setup.py) -pandocfilters==1.5.1 - # via nbconvert -parso==0.8.3 - # via jedi -partd==1.4.1 - # via dask -pathspec==0.12.1 - # via black -pbr==6.0.0 - # via mock -pexpect==4.9.0 - # via ipython -pip-tools==7.3.0 - # via feast (setup.py) -platformdirs==3.11.0 - # via - # black - # jupyter-core - # snowflake-connector-python - # virtualenv -pluggy==1.4.0 - # via pytest -ply==3.11 - # via thriftpy2 -portalocker==2.8.2 - # via msal-extensions -pre-commit==3.3.1 - # via feast (setup.py) -prometheus-client==0.20.0 - # via jupyter-server -prompt-toolkit==3.0.43 - # via ipython -proto-plus==1.23.0 - # via - # feast (setup.py) - # google-cloud-bigquery - # google-cloud-bigquery-storage - # google-cloud-bigtable - # google-cloud-datastore - # google-cloud-firestore -protobuf==4.23.3 - # via - # feast (setup.py) - # google-api-core - # google-cloud-bigquery - # google-cloud-bigquery-storage - # google-cloud-bigtable - # google-cloud-datastore - # google-cloud-firestore - # googleapis-common-protos - # grpc-google-iam-v1 - # grpcio-health-checking - # grpcio-reflection - # grpcio-status - # grpcio-testing - # grpcio-tools - # mypy-protobuf - # proto-plus -psutil==5.9.0 - # via - # feast (setup.py) - # ipykernel -psycopg2-binary==2.9.9 - # via feast (setup.py) -ptyprocess==0.7.0 - # via - # pexpect - # terminado -pure-eval==0.2.2 - # via stack-data -py==1.11.0 - # via feast (setup.py) -py-cpuinfo==9.0.0 - # via pytest-benchmark -py4j==0.10.9.7 - # via pyspark -pyarrow==15.0.0 - # via - # db-dtypes - # feast (setup.py) - # google-cloud-bigquery - # snowflake-connector-python -pyasn1==0.5.1 - # via - # pyasn1-modules - # rsa -pyasn1-modules==0.3.0 - # via google-auth -pybindgen==0.22.1 - # via feast (setup.py) -pycodestyle==2.10.0 - # via flake8 -pycparser==2.21 - # via cffi -pydantic==2.6.1 - # via - # fastapi - # feast (setup.py) - # great-expectations -pydantic-core==2.16.2 - # via pydantic -pyflakes==3.0.1 - # via flake8 -pygments==2.17.2 - # via - # feast (setup.py) - # ipython - # nbconvert - # sphinx -pyjwt[crypto]==2.8.0 - # via - # msal - # snowflake-connector-python -pymssql==2.2.11 - # via feast (setup.py) -pymysql==1.1.0 - # via feast (setup.py) -pyodbc==5.1.0 - # via feast (setup.py) -pyopenssl==23.3.0 - # via snowflake-connector-python -pyparsing==3.1.1 - # via - # great-expectations - # httplib2 -pyproject-hooks==1.0.0 - # via build -pyspark==3.5.0 - # via feast (setup.py) -pytest==7.4.4 - # via - # feast (setup.py) - # pytest-benchmark - # pytest-cov - # pytest-lazy-fixture - # pytest-mock - # pytest-ordering - # pytest-timeout - # pytest-xdist -pytest-benchmark==3.4.1 - # via feast (setup.py) -pytest-cov==4.1.0 - # via feast (setup.py) -pytest-lazy-fixture==0.6.3 - # via feast (setup.py) -pytest-mock==1.10.4 - # via feast (setup.py) -pytest-ordering==0.6 - # via feast (setup.py) -pytest-timeout==1.4.2 - # via feast (setup.py) -pytest-xdist==3.5.0 - # via feast (setup.py) -python-dateutil==2.8.2 - # via - # arrow - # botocore - # google-cloud-bigquery - # great-expectations - # jupyter-client - # kubernetes - # moto - # pandas - # rockset - # trino -python-dotenv==1.0.1 - # via uvicorn -python-json-logger==2.0.7 - # via jupyter-events -pytz==2024.1 - # via - # great-expectations - # pandas - # snowflake-connector-python - # trino -pyyaml==6.0.1 - # via - # dask - # feast (setup.py) - # jupyter-events - # kubernetes - # pre-commit - # responses - # uvicorn -pyzmq==25.1.2 - # via - # ipykernel - # jupyter-client - # jupyter-server -redis==4.6.0 - # via feast (setup.py) -referencing==0.33.0 - # via - # jsonschema - # jsonschema-specifications - # jupyter-events -regex==2023.12.25 - # via feast (setup.py) -requests==2.31.0 - # via - # azure-core - # cachecontrol - # docker - # feast (setup.py) - # google-api-core - # google-cloud-bigquery - # google-cloud-storage - # great-expectations - # jupyterlab-server - # kubernetes - # moto - # msal - # requests-oauthlib - # responses - # snowflake-connector-python - # sphinx - # trino -requests-oauthlib==1.3.1 - # via kubernetes -responses==0.25.0 - # via moto -rfc3339-validator==0.1.4 - # via - # jsonschema - # jupyter-events -rfc3986-validator==0.1.1 - # via - # jsonschema - # jupyter-events -rockset==2.1.0 - # via feast (setup.py) -rpds-py==0.18.0 - # via - # jsonschema - # referencing -rsa==4.9 - # via google-auth -ruamel-yaml==0.17.17 - # via great-expectations -s3transfer==0.10.0 - # via boto3 -scipy==1.12.0 - # via great-expectations -send2trash==1.8.2 - # via jupyter-server -six==1.16.0 - # via - # asttokens - # azure-core - # bleach - # geomet - # happybase - # isodate - # kubernetes - # mock - # pandavro - # python-dateutil - # rfc3339-validator - # thriftpy2 -sniffio==1.3.0 - # via - # anyio - # httpx -snowballstemmer==2.2.0 - # via sphinx -snowflake-connector-python[pandas]==3.7.0 - # via feast (setup.py) -sortedcontainers==2.4.0 - # via snowflake-connector-python -soupsieve==2.5 - # via beautifulsoup4 -sphinx==6.2.1 - # via feast (setup.py) -sphinxcontrib-applehelp==1.0.8 - # via sphinx -sphinxcontrib-devhelp==1.0.6 - # via sphinx -sphinxcontrib-htmlhelp==2.0.5 - # via sphinx -sphinxcontrib-jsmath==1.0.1 - # via sphinx -sphinxcontrib-qthelp==1.0.7 - # via sphinx -sphinxcontrib-serializinghtml==1.1.10 - # via sphinx -sqlalchemy[mypy]==1.4.51 - # via feast (setup.py) -sqlalchemy2-stubs==0.0.2a38 - # via sqlalchemy -stack-data==0.6.3 - # via ipython -starlette==0.36.3 - # via fastapi -tabulate==0.9.0 - # via feast (setup.py) -tenacity==8.2.3 - # via feast (setup.py) -terminado==0.18.0 - # via - # jupyter-server - # jupyter-server-terminals -testcontainers==3.7.1 - # via feast (setup.py) -thriftpy2==0.4.17 - # via happybase -tinycss2==1.2.1 - # via nbconvert -toml==0.10.2 - # via feast (setup.py) -tomli==2.0.1 - # via - # black - # build - # coverage - # jupyterlab - # mypy - # pip-tools - # pyproject-hooks - # pytest -tomlkit==0.12.3 - # via snowflake-connector-python -toolz==0.12.1 - # via - # altair - # dask - # partd -tornado==6.4 - # via - # ipykernel - # jupyter-client - # jupyter-server - # jupyterlab - # notebook - # terminado -tqdm==4.66.2 - # via - # feast (setup.py) - # great-expectations -traitlets==5.14.1 - # via - # comm - # ipykernel - # ipython - # ipywidgets - # jupyter-client - # jupyter-core - # jupyter-events - # jupyter-server - # jupyterlab - # matplotlib-inline - # nbclient - # nbconvert - # nbformat -trino==0.327.0 - # via feast (setup.py) -typeguard==4.1.5 - # via feast (setup.py) -types-protobuf==3.19.22 - # via - # feast (setup.py) - # mypy-protobuf -types-pymysql==1.1.0.1 - # via feast (setup.py) -types-pyopenssl==24.0.0.20240130 - # via types-redis -types-python-dateutil==2.8.19.20240106 - # via - # arrow - # feast (setup.py) -types-pytz==2024.1.0.20240203 - # via feast (setup.py) -types-pyyaml==6.0.12.12 - # via feast (setup.py) -types-redis==4.6.0.20240106 - # via feast (setup.py) -types-requests==2.30.0.0 - # via feast (setup.py) -types-setuptools==69.0.0.20240125 - # via feast (setup.py) -types-tabulate==0.9.0.20240106 - # via feast (setup.py) -types-urllib3==1.26.25.14 - # via types-requests -typing-extensions==4.9.0 - # via - # anyio - # async-lru - # azure-core - # azure-storage-blob - # fastapi - # great-expectations - # mypy - # pydantic - # pydantic-core - # snowflake-connector-python - # sqlalchemy2-stubs - # uvicorn -tzlocal==5.2 - # via - # great-expectations - # trino -uri-template==1.3.0 - # via jsonschema -uritemplate==4.1.1 - # via google-api-python-client -urllib3==1.26.18 - # via - # botocore - # docker - # feast (setup.py) - # great-expectations - # kubernetes - # minio - # requests - # responses - # rockset -uvicorn[standard]==0.27.1 - # via feast (setup.py) -uvloop==0.19.0 - # via uvicorn -virtualenv==20.23.0 - # via - # feast (setup.py) - # pre-commit -volatile==2.1.0 - # via bowler -watchfiles==0.21.0 - # via uvicorn -wcwidth==0.2.13 - # via prompt-toolkit -webcolors==1.13 - # via jsonschema -webencodings==0.5.1 - # via - # bleach - # tinycss2 -websocket-client==1.7.0 - # via - # jupyter-server - # kubernetes -websockets==12.0 - # via uvicorn -werkzeug==3.0.1 - # via moto -wheel==0.42.0 - # via pip-tools -widgetsnbextension==4.0.10 - # via ipywidgets -wrapt==1.16.0 - # via testcontainers -xmltodict==0.13.0 - # via moto -zipp==3.17.0 - # via importlib-metadata - -# The following packages are considered to be unsafe in a requirements file: -# pip -# setuptools diff --git a/sdk/python/requirements/py3.11-requirements.txt b/sdk/python/requirements/py3.11-requirements.txt deleted file mode 100644 index 3943662d01..0000000000 --- a/sdk/python/requirements/py3.11-requirements.txt +++ /dev/null @@ -1,234 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile --output-file=sdk/python/requirements/py3.10-requirements.txt -# -annotated-types==0.6.0 - # via pydantic -anyio==4.2.0 - # via - # httpx - # starlette - # watchfiles -appdirs==1.4.4 - # via fissix -attrs==23.2.0 - # via - # bowler - # jsonschema - # referencing -bowler==0.9.0 - # via feast (setup.py) -certifi==2024.2.2 - # via - # httpcore - # httpx - # requests -charset-normalizer==3.3.2 - # via requests -click==8.1.7 - # via - # bowler - # dask - # feast (setup.py) - # moreorless - # uvicorn -cloudpickle==3.0.0 - # via dask -colorama==0.4.6 - # via feast (setup.py) -dask==2024.2.0 - # via feast (setup.py) -dill==0.3.8 - # via feast (setup.py) -exceptiongroup==1.2.0 - # via anyio -fastapi==0.109.2 - # via feast (setup.py) -fastavro==1.9.4 - # via - # feast (setup.py) - # pandavro -fissix==21.11.13 - # via bowler -fsspec==2024.2.0 - # via dask -greenlet==3.0.3 - # via sqlalchemy -grpcio==1.60.1 - # via - # feast (setup.py) - # grpcio-health-checking - # grpcio-reflection - # grpcio-tools -grpcio-health-checking==1.60.1 - # via feast (setup.py) -grpcio-reflection==1.60.1 - # via feast (setup.py) -grpcio-tools==1.60.1 - # via feast (setup.py) -gunicorn==21.2.0 - # via feast (setup.py) -h11==0.14.0 - # via - # httpcore - # uvicorn -httpcore==1.0.3 - # via httpx -httptools==0.6.1 - # via uvicorn -httpx==0.26.0 - # via feast (setup.py) -idna==3.6 - # via - # anyio - # httpx - # requests -importlib-metadata==6.11.0 - # via - # dask - # feast (setup.py) -importlib-resources==6.1.1 - # via feast (setup.py) -jinja2==3.1.3 - # via feast (setup.py) -jsonschema==4.21.1 - # via feast (setup.py) -jsonschema-specifications==2023.12.1 - # via jsonschema -locket==1.0.0 - # via partd -markupsafe==2.1.5 - # via jinja2 -mmh3==4.1.0 - # via feast (setup.py) -moreorless==0.4.0 - # via bowler -mypy==1.8.0 - # via sqlalchemy -mypy-extensions==1.0.0 - # via mypy -mypy-protobuf==3.1.0 - # via feast (setup.py) -numpy==1.24.4 - # via - # feast (setup.py) - # pandas - # pandavro - # pyarrow -packaging==23.2 - # via - # dask - # gunicorn -pandas==1.5.3 - # via - # feast (setup.py) - # pandavro -pandavro==1.5.2 - # via feast (setup.py) -partd==1.4.1 - # via dask -proto-plus==1.23.0 - # via feast (setup.py) -protobuf==4.23.3 - # via - # feast (setup.py) - # grpcio-health-checking - # grpcio-reflection - # grpcio-tools - # mypy-protobuf - # proto-plus -pyarrow==15.0.0 - # via feast (setup.py) -pydantic==2.6.1 - # via - # fastapi - # feast (setup.py) -pydantic-core==2.16.2 - # via pydantic -pygments==2.17.2 - # via feast (setup.py) -python-dateutil==2.8.2 - # via pandas -python-dotenv==1.0.1 - # via uvicorn -pytz==2024.1 - # via pandas -pyyaml==6.0.1 - # via - # dask - # feast (setup.py) - # uvicorn -referencing==0.33.0 - # via - # jsonschema - # jsonschema-specifications -requests==2.31.0 - # via feast (setup.py) -rpds-py==0.18.0 - # via - # jsonschema - # referencing -six==1.16.0 - # via - # pandavro - # python-dateutil -sniffio==1.3.0 - # via - # anyio - # httpx -sqlalchemy[mypy]==1.4.51 - # via - # feast (setup.py) - # sqlalchemy -sqlalchemy2-stubs==0.0.2a38 - # via sqlalchemy -starlette==0.36.3 - # via fastapi -tabulate==0.9.0 - # via feast (setup.py) -tenacity==8.2.3 - # via feast (setup.py) -toml==0.10.2 - # via feast (setup.py) -tomli==2.0.1 - # via mypy -toolz==0.12.1 - # via - # dask - # partd -tqdm==4.66.2 - # via feast (setup.py) -typeguard==4.1.5 - # via feast (setup.py) -types-protobuf==4.24.0.20240129 - # via mypy-protobuf -typing-extensions==4.9.0 - # via - # anyio - # fastapi - # mypy - # pydantic - # pydantic-core - # sqlalchemy2-stubs - # uvicorn -urllib3==2.2.0 - # via requests -uvicorn[standard]==0.27.1 - # via - # feast (setup.py) - # uvicorn -uvloop==0.19.0 - # via uvicorn -volatile==2.1.0 - # via bowler -watchfiles==0.21.0 - # via uvicorn -websockets==12.0 - # via uvicorn -zipp==3.17.0 - # via importlib-metadata - -# The following packages are considered to be unsafe in a requirements file: -# setuptools diff --git a/setup.py b/setup.py index 818b9d66b1..c14d64557a 100644 --- a/setup.py +++ b/setup.py @@ -58,7 +58,7 @@ # For some reason pandavro higher than 1.5.* only support pandas less than 1.3. "pandavro~=1.5.0", # Higher than 4.23.4 seems to cause a seg fault - "protobuf<=4.23.4,>3.20", + "protobuf<4.23.4,>3.20", "proto-plus>=1.20.0,<2", "pyarrow>=4", "pydantic>=2.0.0", @@ -100,7 +100,7 @@ AWS_REQUIRED = ["boto3>=1.17.0,<2", "docker>=5.0.2", "fsspec<=2024.1.0"] -BYTEWAX_REQUIRED = ["bytewax==0.18.2", "docker>=5.0.2", "kubernetes<=20.13.0"] +BYTEWAX_REQUIRED = ["bytewax==0.15.1", "docker>=5.0.2", "kubernetes<=20.13.0"] SNOWFLAKE_REQUIRED = [ "snowflake-connector-python[pandas]>=3,<4", From 9e58bd463f7ca2b4982708cb1e1250f587ecfb68 Mon Sep 17 00:00:00 2001 From: Tornike Gurgenidze Date: Sun, 25 Feb 2024 02:10:02 +0400 Subject: [PATCH 045/122] feat: Add Substrait-based ODFV transformation (#3969) --- protos/feast/core/OnDemandFeatureView.proto | 5 + sdk/python/feast/on_demand_feature_view.py | 55 +++++++- .../on_demand_substrait_transformation.py | 50 ++++++++ .../requirements/py3.10-ci-requirements.txt | 118 ++++++++++++------ .../requirements/py3.10-requirements.txt | 29 +++-- .../requirements/py3.8-ci-requirements.txt | 112 +++++++++++------ .../requirements/py3.8-requirements.txt | 33 ++--- .../requirements/py3.9-ci-requirements.txt | 117 +++++++++++------ .../requirements/py3.9-requirements.txt | 30 +++-- ...test_on_demand_substrait_transformation.py | 112 +++++++++++++++++ setup.py | 7 ++ 11 files changed, 504 insertions(+), 164 deletions(-) create mode 100644 sdk/python/feast/on_demand_substrait_transformation.py create mode 100644 sdk/python/tests/unit/test_on_demand_substrait_transformation.py diff --git a/protos/feast/core/OnDemandFeatureView.proto b/protos/feast/core/OnDemandFeatureView.proto index 741d46e39e..c43b33c1d2 100644 --- a/protos/feast/core/OnDemandFeatureView.proto +++ b/protos/feast/core/OnDemandFeatureView.proto @@ -50,6 +50,7 @@ message OnDemandFeatureViewSpec { oneof transformation { UserDefinedFunction user_defined_function = 5; + OnDemandSubstraitTransformation on_demand_substrait_transformation = 9; } // Description of the on demand feature view. @@ -89,3 +90,7 @@ message UserDefinedFunction { // The string representation of the udf string body_text = 3; } + +message OnDemandSubstraitTransformation { + bytes substrait_plan = 1; +} \ No newline at end of file diff --git a/sdk/python/feast/on_demand_feature_view.py b/sdk/python/feast/on_demand_feature_view.py index 706f2ec4e4..586286a3d4 100644 --- a/sdk/python/feast/on_demand_feature_view.py +++ b/sdk/python/feast/on_demand_feature_view.py @@ -1,5 +1,6 @@ import copy import functools +import inspect import warnings from datetime import datetime from types import FunctionType @@ -17,6 +18,7 @@ from feast.feature_view_projection import FeatureViewProjection from feast.field import Field, from_value_type from feast.on_demand_pandas_transformation import OnDemandPandasTransformation +from feast.on_demand_substrait_transformation import OnDemandSubstraitTransformation from feast.protos.feast.core.OnDemandFeatureView_pb2 import ( OnDemandFeatureView as OnDemandFeatureViewProto, ) @@ -210,6 +212,9 @@ def to_proto(self) -> OnDemandFeatureViewProto: user_defined_function=self.transformation.to_proto() if type(self.transformation) == OnDemandPandasTransformation else None, + on_demand_substrait_transformation=self.transformation.to_proto() # type: ignore + if type(self.transformation) == OnDemandSubstraitTransformation + else None, description=self.description, tags=self.tags, owner=self.owner, @@ -255,6 +260,13 @@ def from_proto(cls, on_demand_feature_view_proto: OnDemandFeatureViewProto): transformation = OnDemandPandasTransformation.from_proto( on_demand_feature_view_proto.spec.user_defined_function ) + elif ( + on_demand_feature_view_proto.spec.WhichOneof("transformation") + == "on_demand_substrait_transformation" + ): + transformation = OnDemandSubstraitTransformation.from_proto( + on_demand_feature_view_proto.spec.on_demand_substrait_transformation + ) else: raise Exception("At least one transformation type needs to be provided") @@ -460,10 +472,47 @@ def mainify(obj) -> None: obj.__module__ = "__main__" def decorator(user_function): - udf_string = dill.source.getsource(user_function) - mainify(user_function) + return_annotation = inspect.signature(user_function).return_annotation + if ( + return_annotation + and return_annotation.__module__ == "ibis.expr.types.relations" + and return_annotation.__name__ == "Table" + ): + import ibis + import ibis.expr.datatypes as dt + from ibis_substrait.compiler.core import SubstraitCompiler + + compiler = SubstraitCompiler() + + input_fields: Field = [] + + for s in sources: + if type(s) == FeatureView: + fields = s.projection.features + else: + fields = s.features + + input_fields.extend( + [ + ( + f.name, + dt.dtype( + feast_value_type_to_pandas_type(f.dtype.to_value_type()) + ), + ) + for f in fields + ] + ) + + expr = user_function(ibis.table(input_fields, "t")) - transformation = OnDemandPandasTransformation(user_function, udf_string) + transformation = OnDemandSubstraitTransformation( + substrait_plan=compiler.compile(expr).SerializeToString() + ) + else: + udf_string = dill.source.getsource(user_function) + mainify(user_function) + transformation = OnDemandPandasTransformation(user_function, udf_string) on_demand_feature_view_obj = OnDemandFeatureView( name=user_function.__name__, diff --git a/sdk/python/feast/on_demand_substrait_transformation.py b/sdk/python/feast/on_demand_substrait_transformation.py new file mode 100644 index 0000000000..4e92e77dc8 --- /dev/null +++ b/sdk/python/feast/on_demand_substrait_transformation.py @@ -0,0 +1,50 @@ +import pandas as pd +import pyarrow +import pyarrow.substrait as substrait # type: ignore # noqa + +from feast.protos.feast.core.OnDemandFeatureView_pb2 import ( + OnDemandSubstraitTransformation as OnDemandSubstraitTransformationProto, +) + + +class OnDemandSubstraitTransformation: + def __init__(self, substrait_plan: bytes): + """ + Creates an OnDemandSubstraitTransformation object. + + Args: + substrait_plan: The user-provided substrait plan. + """ + self.substrait_plan = substrait_plan + + def transform(self, df: pd.DataFrame) -> pd.DataFrame: + def table_provider(names, schema: pyarrow.Schema): + return pyarrow.Table.from_pandas(df[schema.names]) + + table: pyarrow.Table = pyarrow.substrait.run_query( + self.substrait_plan, table_provider=table_provider + ).read_all() + return table.to_pandas() + + def __eq__(self, other): + if not isinstance(other, OnDemandSubstraitTransformation): + raise TypeError( + "Comparisons should only involve OnDemandSubstraitTransformation class objects." + ) + + if not super().__eq__(other): + return False + + return self.substrait_plan == other.substrait_plan + + def to_proto(self) -> OnDemandSubstraitTransformationProto: + return OnDemandSubstraitTransformationProto(substrait_plan=self.substrait_plan) + + @classmethod + def from_proto( + cls, + on_demand_substrait_transformation_proto: OnDemandSubstraitTransformationProto, + ): + return OnDemandSubstraitTransformation( + substrait_plan=on_demand_substrait_transformation_proto.substrait_plan + ) diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index f20bc05df9..45c4eb765d 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -10,7 +10,7 @@ altair==4.2.2 # via great-expectations annotated-types==0.6.0 # via pydantic -anyio==4.2.0 +anyio==4.3.0 # via # httpx # jupyter-server @@ -34,6 +34,8 @@ async-lru==2.0.4 # via jupyterlab async-timeout==4.0.3 # via redis +atpublic==4.0 + # via ibis-framework attrs==23.2.0 # via # bowler @@ -55,15 +57,17 @@ babel==2.14.0 # sphinx beautifulsoup4==4.12.3 # via nbconvert +bidict==0.23.1 + # via ibis-framework black==22.12.0 # via feast (setup.py) bleach==6.1.0 # via nbconvert -boto3==1.34.42 +boto3==1.34.49 # via # feast (setup.py) # moto -botocore==1.34.42 +botocore==1.34.49 # via # boto3 # moto @@ -122,7 +126,7 @@ comm==0.2.1 # via # ipykernel # ipywidgets -coverage[toml]==7.4.1 +coverage[toml]==7.4.3 # via pytest-cov cryptography==41.0.7 # via @@ -137,7 +141,7 @@ cryptography==41.0.7 # snowflake-connector-python # types-pyopenssl # types-redis -dask==2024.2.0 +dask==2024.2.1 # via feast (setup.py) db-dtypes==1.2.0 # via google-cloud-bigquery @@ -213,9 +217,9 @@ google-api-core[grpc]==2.17.1 # google-cloud-datastore # google-cloud-firestore # google-cloud-storage -google-api-python-client==2.118.0 +google-api-python-client==2.119.0 # via firebase-admin -google-auth==2.27.0 +google-auth==2.28.1 # via # google-api-core # google-api-python-client @@ -226,10 +230,6 @@ google-auth==2.27.0 google-auth-httplib2==0.2.0 # via google-api-python-client google-cloud-bigquery[pandas]==3.12.0 - # via - # feast (setup.py) - # google-cloud-bigquery -google-cloud-bigquery-storage==2.24.0 # via feast (setup.py) google-cloud-bigquery-storage==2.24.0 # via feast (setup.py) @@ -244,7 +244,7 @@ google-cloud-core==2.4.1 # google-cloud-storage google-cloud-datastore==2.19.0 # via feast (setup.py) -google-cloud-firestore==2.14.0 +google-cloud-firestore==2.15.0 # via firebase-admin google-cloud-storage==2.14.0 # via @@ -264,13 +264,13 @@ googleapis-common-protos[grpc]==1.62.0 # google-api-core # grpc-google-iam-v1 # grpcio-status -great-expectations==0.18.8 +great-expectations==0.18.9 # via feast (setup.py) greenlet==3.0.3 # via sqlalchemy grpc-google-iam-v1==0.13.0 # via google-cloud-bigtable -grpcio==1.60.1 +grpcio==1.62.0 # via # feast (setup.py) # google-api-core @@ -282,15 +282,15 @@ grpcio==1.60.1 # grpcio-status # grpcio-testing # grpcio-tools -grpcio-health-checking==1.60.1 +grpcio-health-checking==1.62.0 # via feast (setup.py) -grpcio-reflection==1.60.1 +grpcio-reflection==1.62.0 # via feast (setup.py) -grpcio-status==1.60.1 +grpcio-status==1.62.0 # via google-api-core -grpcio-testing==1.60.1 +grpcio-testing==1.62.0 # via feast (setup.py) -grpcio-tools==1.60.1 +grpcio-tools==1.62.0 # via feast (setup.py) gunicorn==21.2.0 # via feast (setup.py) @@ -304,7 +304,7 @@ hazelcast-python-client==5.3.0 # via feast (setup.py) hiredis==2.3.2 # via feast (setup.py) -httpcore==1.0.3 +httpcore==1.0.4 # via httpx httplib2==0.22.0 # via @@ -312,11 +312,17 @@ httplib2==0.22.0 # google-auth-httplib2 httptools==0.6.1 # via uvicorn -httpx==0.26.0 +httpx==0.27.0 # via # feast (setup.py) # jupyterlab -identify==2.5.34 +ibis-framework==8.0.0 + # via + # feast (setup.py) + # ibis-substrait +ibis-substrait==3.2.0 + # via feast (setup.py) +identify==2.5.35 # via pre-commit idna==3.6 # via @@ -337,7 +343,7 @@ iniconfig==2.0.0 # via pytest ipykernel==6.29.2 # via jupyterlab -ipython==8.21.0 +ipython==8.22.1 # via # great-expectations # ipykernel @@ -367,7 +373,7 @@ jmespath==1.0.1 # via # boto3 # botocore -json5==0.9.14 +json5==0.9.17 # via jupyterlab-server jsonpatch==1.33 # via great-expectations @@ -412,7 +418,7 @@ jupyter-server==2.12.5 # notebook-shim jupyter-server-terminals==0.5.2 # via jupyter-server -jupyterlab==4.1.1 +jupyterlab==4.1.2 # via notebook jupyterlab-pygments==0.3.0 # via nbconvert @@ -428,6 +434,8 @@ locket==1.0.0 # via partd makefun==1.15.2 # via great-expectations +markdown-it-py==3.0.0 + # via rich markupsafe==2.1.5 # via # jinja2 @@ -441,6 +449,8 @@ matplotlib-inline==0.1.6 # ipython mccabe==0.7.0 # via flake8 +mdurl==0.1.2 + # via markdown-it-py minio==7.1.0 # via feast (setup.py) mistune==3.0.2 @@ -455,7 +465,7 @@ moreorless==0.4.0 # via bowler moto==4.2.14 # via feast (setup.py) -msal==1.26.0 +msal==1.27.0 # via # azure-identity # msal-extensions @@ -463,6 +473,8 @@ msal-extensions==1.1.0 # via azure-identity msgpack==1.0.7 # via cachecontrol +multipledispatch==1.0.0 + # via ibis-framework multiprocess==0.70.16 # via bytewax mypy==1.8.0 @@ -477,7 +489,7 @@ mypy-protobuf==3.1.0 # via feast (setup.py) nbclient==0.9.0 # via nbconvert -nbconvert==7.16.0 +nbconvert==7.16.1 # via jupyter-server nbformat==5.9.2 # via @@ -491,7 +503,7 @@ nodeenv==1.8.0 # via pre-commit notebook==7.1.0 # via great-expectations -notebook-shim==0.2.3 +notebook-shim==0.2.4 # via # jupyterlab # notebook @@ -501,6 +513,7 @@ numpy==1.24.4 # db-dtypes # feast (setup.py) # great-expectations + # ibis-framework # pandas # pandavro # pyarrow @@ -519,6 +532,7 @@ packaging==23.2 # google-cloud-bigquery # great-expectations # gunicorn + # ibis-substrait # ipykernel # jupyter-server # jupyterlab @@ -536,6 +550,7 @@ pandas==1.5.3 # feast (setup.py) # google-cloud-bigquery # great-expectations + # ibis-framework # pandavro # snowflake-connector-python pandavro==1.5.2 @@ -544,6 +559,8 @@ pandocfilters==1.5.1 # via nbconvert parso==0.8.3 # via jedi +parsy==2.1 + # via ibis-framework partd==1.4.1 # via dask pathspec==0.12.1 @@ -552,7 +569,7 @@ pbr==6.0.0 # via mock pexpect==4.9.0 # via ipython -pip-tools==7.3.0 +pip-tools==7.4.0 # via feast (setup.py) platformdirs==3.11.0 # via @@ -580,7 +597,7 @@ proto-plus==1.23.0 # google-cloud-bigtable # google-cloud-datastore # google-cloud-firestore -protobuf==4.23.3 +protobuf==4.23.4 # via # feast (setup.py) # google-api-core @@ -598,6 +615,7 @@ protobuf==4.23.3 # grpcio-tools # mypy-protobuf # proto-plus + # substrait psutil==5.9.0 # via # feast (setup.py) @@ -621,7 +639,10 @@ pyarrow==15.0.0 # db-dtypes # feast (setup.py) # google-cloud-bigquery + # ibis-framework # snowflake-connector-python +pyarrow-hotfix==0.6 + # via ibis-framework pyasn1==0.5.1 # via # pyasn1-modules @@ -634,12 +655,12 @@ pycodestyle==2.10.0 # via flake8 pycparser==2.21 # via cffi -pydantic==2.6.1 +pydantic==2.6.2 # via # fastapi # feast (setup.py) # great-expectations -pydantic-core==2.16.2 +pydantic-core==2.16.3 # via pydantic pyflakes==3.0.1 # via flake8 @@ -648,6 +669,7 @@ pygments==2.17.2 # feast (setup.py) # ipython # nbconvert + # rich # sphinx pyjwt[crypto]==2.8.0 # via @@ -659,14 +681,16 @@ pymysql==1.1.0 # via feast (setup.py) pyodbc==5.1.0 # via feast (setup.py) -pyopenssl==23.3.0 +pyopenssl==24.0.0 # via snowflake-connector-python pyparsing==3.1.1 # via # great-expectations # httplib2 pyproject-hooks==1.0.0 - # via build + # via + # build + # pip-tools pyspark==3.5.0 # via feast (setup.py) pytest==7.4.4 @@ -699,6 +723,7 @@ python-dateutil==2.8.2 # botocore # google-cloud-bigquery # great-expectations + # ibis-framework # jupyter-client # kubernetes # moto @@ -712,6 +737,7 @@ python-json-logger==2.0.7 pytz==2024.1 # via # great-expectations + # ibis-framework # pandas # snowflake-connector-python # trino @@ -719,6 +745,7 @@ pyyaml==6.0.1 # via # dask # feast (setup.py) + # ibis-substrait # jupyter-events # kubernetes # pre-commit @@ -769,7 +796,9 @@ rfc3986-validator==0.1.1 # via # jsonschema # jupyter-events -rockset==2.1.0 +rich==13.7.0 + # via ibis-framework +rockset==2.1.1 # via feast (setup.py) rpds-py==0.18.0 # via @@ -805,7 +834,7 @@ sniffio==1.3.0 # httpx snowballstemmer==2.2.0 # via sphinx -snowflake-connector-python[pandas]==3.7.0 +snowflake-connector-python[pandas]==3.7.1 # via feast (setup.py) sortedcontainers==2.4.0 # via snowflake-connector-python @@ -826,13 +855,19 @@ sphinxcontrib-qthelp==1.0.7 sphinxcontrib-serializinghtml==1.1.10 # via sphinx sqlalchemy[mypy]==1.4.51 - # via feast (setup.py) + # via + # feast (setup.py) + # sqlalchemy sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy +sqlglot==20.11.0 + # via ibis-framework stack-data==0.6.3 # via ipython starlette==0.36.3 # via fastapi +substrait==0.12.1 + # via ibis-substrait tabulate==0.9.0 # via feast (setup.py) tenacity==8.2.3 @@ -865,6 +900,7 @@ toolz==0.12.1 # via # altair # dask + # ibis-framework # partd tornado==6.4 # via @@ -893,7 +929,7 @@ traitlets==5.14.1 # nbclient # nbconvert # nbformat -trino==0.327.0 +trino==0.328.0 # via feast (setup.py) typeguard==4.1.5 # via feast (setup.py) @@ -913,11 +949,11 @@ types-pytz==2024.1.0.20240203 # via feast (setup.py) types-pyyaml==6.0.12.12 # via feast (setup.py) -types-redis==4.6.0.20240106 +types-redis==4.6.0.20240218 # via feast (setup.py) types-requests==2.30.0.0 # via feast (setup.py) -types-setuptools==69.0.0.20240125 +types-setuptools==69.1.0.20240223 # via feast (setup.py) types-tabulate==0.9.0.20240106 # via feast (setup.py) @@ -931,11 +967,13 @@ typing-extensions==4.9.0 # azure-storage-blob # fastapi # great-expectations + # ibis-framework # mypy # pydantic # pydantic-core # snowflake-connector-python # sqlalchemy2-stubs + # typeguard # uvicorn tzlocal==5.2 # via diff --git a/sdk/python/requirements/py3.10-requirements.txt b/sdk/python/requirements/py3.10-requirements.txt index 3943662d01..7141cd0f25 100644 --- a/sdk/python/requirements/py3.10-requirements.txt +++ b/sdk/python/requirements/py3.10-requirements.txt @@ -6,7 +6,7 @@ # annotated-types==0.6.0 # via pydantic -anyio==4.2.0 +anyio==4.3.0 # via # httpx # starlette @@ -38,7 +38,7 @@ cloudpickle==3.0.0 # via dask colorama==0.4.6 # via feast (setup.py) -dask==2024.2.0 +dask==2024.2.1 # via feast (setup.py) dill==0.3.8 # via feast (setup.py) @@ -56,17 +56,17 @@ fsspec==2024.2.0 # via dask greenlet==3.0.3 # via sqlalchemy -grpcio==1.60.1 +grpcio==1.62.0 # via # feast (setup.py) # grpcio-health-checking # grpcio-reflection # grpcio-tools -grpcio-health-checking==1.60.1 +grpcio-health-checking==1.62.0 # via feast (setup.py) -grpcio-reflection==1.60.1 +grpcio-reflection==1.62.0 # via feast (setup.py) -grpcio-tools==1.60.1 +grpcio-tools==1.62.0 # via feast (setup.py) gunicorn==21.2.0 # via feast (setup.py) @@ -74,11 +74,11 @@ h11==0.14.0 # via # httpcore # uvicorn -httpcore==1.0.3 +httpcore==1.0.4 # via httpx httptools==0.6.1 # via uvicorn -httpx==0.26.0 +httpx==0.27.0 # via feast (setup.py) idna==3.6 # via @@ -131,7 +131,7 @@ partd==1.4.1 # via dask proto-plus==1.23.0 # via feast (setup.py) -protobuf==4.23.3 +protobuf==4.23.4 # via # feast (setup.py) # grpcio-health-checking @@ -141,11 +141,11 @@ protobuf==4.23.3 # proto-plus pyarrow==15.0.0 # via feast (setup.py) -pydantic==2.6.1 +pydantic==2.6.2 # via # fastapi # feast (setup.py) -pydantic-core==2.16.2 +pydantic-core==2.16.3 # via pydantic pygments==2.17.2 # via feast (setup.py) @@ -212,13 +212,12 @@ typing-extensions==4.9.0 # pydantic # pydantic-core # sqlalchemy2-stubs + # typeguard # uvicorn -urllib3==2.2.0 +urllib3==2.2.1 # via requests uvicorn[standard]==0.27.1 - # via - # feast (setup.py) - # uvicorn + # via feast (setup.py) uvloop==0.19.0 # via uvicorn volatile==2.1.0 diff --git a/sdk/python/requirements/py3.8-ci-requirements.txt b/sdk/python/requirements/py3.8-ci-requirements.txt index afa43ec2a2..b006c1c621 100644 --- a/sdk/python/requirements/py3.8-ci-requirements.txt +++ b/sdk/python/requirements/py3.8-ci-requirements.txt @@ -4,14 +4,13 @@ # # pip-compile --extra=ci --output-file=sdk/python/requirements/py3.8-ci-requirements.txt # - alabaster==0.7.13 # via sphinx altair==4.2.2 # via great-expectations annotated-types==0.6.0 # via pydantic -anyio==4.2.0 +anyio==4.3.0 # via # httpx # jupyter-server @@ -35,6 +34,8 @@ async-lru==2.0.4 # via jupyterlab async-timeout==4.0.3 # via redis +atpublic==3.1.2 + # via ibis-framework attrs==23.2.0 # via # bowler @@ -66,11 +67,11 @@ black==22.12.0 # via feast (setup.py) bleach==6.1.0 # via nbconvert -boto3==1.34.42 +boto3==1.34.49 # via # feast (setup.py) # moto -botocore==1.34.42 +botocore==1.34.49 # via # boto3 # moto @@ -129,7 +130,7 @@ comm==0.2.1 # via # ipykernel # ipywidgets -coverage[toml]==7.4.1 +coverage[toml]==7.4.3 # via pytest-cov cryptography==41.0.7 # via @@ -219,9 +220,9 @@ google-api-core[grpc]==2.17.1 # google-cloud-datastore # google-cloud-firestore # google-cloud-storage -google-api-python-client==2.118.0 +google-api-python-client==2.119.0 # via firebase-admin -google-auth==2.27.0 +google-auth==2.28.1 # via # google-api-core # google-api-python-client @@ -232,10 +233,6 @@ google-auth==2.27.0 google-auth-httplib2==0.2.0 # via google-api-python-client google-cloud-bigquery[pandas]==3.12.0 - # via - # feast (setup.py) - # google-cloud-bigquery -google-cloud-bigquery-storage==2.24.0 # via feast (setup.py) google-cloud-bigquery-storage==2.24.0 # via feast (setup.py) @@ -250,7 +247,7 @@ google-cloud-core==2.4.1 # google-cloud-storage google-cloud-datastore==2.19.0 # via feast (setup.py) -google-cloud-firestore==2.14.0 +google-cloud-firestore==2.15.0 # via firebase-admin google-cloud-storage==2.14.0 # via @@ -270,13 +267,13 @@ googleapis-common-protos[grpc]==1.62.0 # google-api-core # grpc-google-iam-v1 # grpcio-status -great-expectations==0.18.8 +great-expectations==0.18.9 # via feast (setup.py) greenlet==3.0.3 # via sqlalchemy grpc-google-iam-v1==0.13.0 # via google-cloud-bigtable -grpcio==1.60.1 +grpcio==1.62.0 # via # feast (setup.py) # google-api-core @@ -288,15 +285,15 @@ grpcio==1.60.1 # grpcio-status # grpcio-testing # grpcio-tools -grpcio-health-checking==1.60.1 +grpcio-health-checking==1.62.0 # via feast (setup.py) -grpcio-reflection==1.60.1 +grpcio-reflection==1.62.0 # via feast (setup.py) -grpcio-status==1.60.1 +grpcio-status==1.62.0 # via google-api-core -grpcio-testing==1.60.1 +grpcio-testing==1.62.0 # via feast (setup.py) -grpcio-tools==1.60.1 +grpcio-tools==1.62.0 # via feast (setup.py) gunicorn==21.2.0 # via feast (setup.py) @@ -310,7 +307,7 @@ hazelcast-python-client==5.3.0 # via feast (setup.py) hiredis==2.3.2 # via feast (setup.py) -httpcore==1.0.3 +httpcore==1.0.4 # via httpx httplib2==0.22.0 # via @@ -318,11 +315,17 @@ httplib2==0.22.0 # google-auth-httplib2 httptools==0.6.1 # via uvicorn -httpx==0.26.0 +httpx==0.27.0 # via # feast (setup.py) # jupyterlab -identify==2.5.34 +ibis-framework==4.1.0 + # via + # feast (setup.py) + # ibis-substrait +ibis-substrait==2.29.1 + # via feast (setup.py) +identify==2.5.35 # via pre-commit idna==3.6 # via @@ -344,6 +347,7 @@ importlib-metadata==6.11.0 # jupyterlab-server # nbconvert # sphinx + # typeguard importlib-resources==6.1.1 # via # feast (setup.py) @@ -384,7 +388,7 @@ jmespath==1.0.1 # via # boto3 # botocore -json5==0.9.14 +json5==0.9.17 # via jupyterlab-server jsonpatch==1.33 # via great-expectations @@ -429,7 +433,7 @@ jupyter-server==2.12.5 # notebook-shim jupyter-server-terminals==0.5.2 # via jupyter-server -jupyterlab==4.1.1 +jupyterlab==4.1.2 # via notebook jupyterlab-pygments==0.3.0 # via nbconvert @@ -445,6 +449,8 @@ locket==1.0.0 # via partd makefun==1.15.2 # via great-expectations +markdown-it-py==3.0.0 + # via rich markupsafe==2.1.5 # via # jinja2 @@ -458,6 +464,8 @@ matplotlib-inline==0.1.6 # ipython mccabe==0.7.0 # via flake8 +mdurl==0.1.2 + # via markdown-it-py minio==7.1.0 # via feast (setup.py) mistune==3.0.2 @@ -472,7 +480,7 @@ moreorless==0.4.0 # via bowler moto==4.2.14 # via feast (setup.py) -msal==1.26.0 +msal==1.27.0 # via # azure-identity # msal-extensions @@ -480,6 +488,8 @@ msal-extensions==1.1.0 # via azure-identity msgpack==1.0.7 # via cachecontrol +multipledispatch==0.6.0 + # via ibis-framework multiprocess==0.70.16 # via bytewax mypy==1.8.0 @@ -494,7 +504,7 @@ mypy-protobuf==3.1.0 # via feast (setup.py) nbclient==0.9.0 # via nbconvert -nbconvert==7.16.0 +nbconvert==7.16.1 # via jupyter-server nbformat==5.9.2 # via @@ -508,7 +518,7 @@ nodeenv==1.8.0 # via pre-commit notebook==7.1.0 # via great-expectations -notebook-shim==0.2.3 +notebook-shim==0.2.4 # via # jupyterlab # notebook @@ -518,6 +528,7 @@ numpy==1.24.4 # db-dtypes # feast (setup.py) # great-expectations + # ibis-framework # pandas # pandavro # pyarrow @@ -536,6 +547,7 @@ packaging==23.2 # google-cloud-bigquery # great-expectations # gunicorn + # ibis-substrait # ipykernel # jupyter-server # jupyterlab @@ -553,6 +565,7 @@ pandas==1.5.3 # feast (setup.py) # google-cloud-bigquery # great-expectations + # ibis-framework # pandavro # snowflake-connector-python pandavro==1.5.2 @@ -561,6 +574,8 @@ pandocfilters==1.5.1 # via nbconvert parso==0.8.3 # via jedi +parsy==2.1 + # via ibis-framework partd==1.4.1 # via dask pathspec==0.12.1 @@ -571,7 +586,7 @@ pexpect==4.9.0 # via ipython pickleshare==0.7.5 # via ipython -pip-tools==7.3.0 +pip-tools==7.4.0 # via feast (setup.py) pkgutil-resolve-name==1.3.10 # via jsonschema @@ -601,7 +616,7 @@ proto-plus==1.23.0 # google-cloud-bigtable # google-cloud-datastore # google-cloud-firestore -protobuf==4.23.3 +protobuf==4.23.4 # via # feast (setup.py) # google-api-core @@ -617,6 +632,7 @@ protobuf==4.23.3 # grpcio-status # grpcio-testing # grpcio-tools + # ibis-substrait # mypy-protobuf # proto-plus psutil==5.9.0 @@ -655,12 +671,12 @@ pycodestyle==2.10.0 # via flake8 pycparser==2.21 # via cffi -pydantic==2.6.1 +pydantic==2.6.2 # via # fastapi # feast (setup.py) # great-expectations -pydantic-core==2.16.2 +pydantic-core==2.16.3 # via pydantic pyflakes==3.0.1 # via flake8 @@ -669,6 +685,7 @@ pygments==2.17.2 # feast (setup.py) # ipython # nbconvert + # rich # sphinx pyjwt[crypto]==2.8.0 # via @@ -680,14 +697,16 @@ pymysql==1.1.0 # via feast (setup.py) pyodbc==5.1.0 # via feast (setup.py) -pyopenssl==23.3.0 +pyopenssl==24.0.0 # via snowflake-connector-python pyparsing==3.1.1 # via # great-expectations # httplib2 pyproject-hooks==1.0.0 - # via build + # via + # build + # pip-tools pyspark==3.5.0 # via feast (setup.py) pytest==7.4.4 @@ -720,6 +739,7 @@ python-dateutil==2.8.2 # botocore # google-cloud-bigquery # great-expectations + # ibis-framework # jupyter-client # kubernetes # moto @@ -734,6 +754,7 @@ pytz==2024.1 # via # babel # great-expectations + # ibis-framework # pandas # snowflake-connector-python # trino @@ -741,6 +762,7 @@ pyyaml==6.0.1 # via # dask # feast (setup.py) + # ibis-substrait # jupyter-events # kubernetes # pre-commit @@ -791,7 +813,9 @@ rfc3986-validator==0.1.1 # via # jsonschema # jupyter-events -rockset==2.1.0 +rich==13.7.0 + # via ibis-framework +rockset==2.1.1 # via feast (setup.py) rpds-py==0.18.0 # via @@ -819,6 +843,7 @@ six==1.16.0 # isodate # kubernetes # mock + # multipledispatch # pandavro # python-dateutil # rfc3339-validator @@ -829,7 +854,7 @@ sniffio==1.3.0 # httpx snowballstemmer==2.2.0 # via sphinx -snowflake-connector-python[pandas]==3.7.0 +snowflake-connector-python[pandas]==3.7.1 # via feast (setup.py) sortedcontainers==2.4.0 # via snowflake-connector-python @@ -850,9 +875,13 @@ sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 # via sphinx sqlalchemy[mypy]==1.4.51 - # via feast (setup.py) + # via + # feast (setup.py) + # sqlalchemy sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy +sqlglot==10.6.4 + # via ibis-framework stack-data==0.6.3 # via ipython starlette==0.36.3 @@ -889,6 +918,7 @@ toolz==0.12.1 # via # altair # dask + # ibis-framework # partd tornado==6.4 # via @@ -917,7 +947,7 @@ traitlets==5.14.1 # nbclient # nbconvert # nbformat -trino==0.327.0 +trino==0.328.0 # via feast (setup.py) typeguard==4.1.5 # via feast (setup.py) @@ -937,11 +967,11 @@ types-pytz==2024.1.0.20240203 # via feast (setup.py) types-pyyaml==6.0.12.12 # via feast (setup.py) -types-redis==4.6.0.20240106 +types-redis==4.6.0.20240218 # via feast (setup.py) types-requests==2.30.0.0 # via feast (setup.py) -types-setuptools==69.0.0.20240125 +types-setuptools==69.1.0.20240223 # via feast (setup.py) types-tabulate==0.9.0.20240106 # via feast (setup.py) @@ -949,6 +979,7 @@ types-urllib3==1.26.25.14 # via types-requests typing-extensions==4.9.0 # via + # annotated-types # anyio # async-lru # azure-core @@ -956,13 +987,16 @@ typing-extensions==4.9.0 # black # fastapi # great-expectations + # ibis-framework # ipython # mypy # pydantic # pydantic-core + # rich # snowflake-connector-python # sqlalchemy2-stubs # starlette + # typeguard # uvicorn tzlocal==5.2 # via diff --git a/sdk/python/requirements/py3.8-requirements.txt b/sdk/python/requirements/py3.8-requirements.txt index 079064a9ec..541beecf0d 100644 --- a/sdk/python/requirements/py3.8-requirements.txt +++ b/sdk/python/requirements/py3.8-requirements.txt @@ -6,7 +6,7 @@ # annotated-types==0.6.0 # via pydantic -anyio==4.2.0 +anyio==4.3.0 # via # httpx # starlette @@ -56,17 +56,17 @@ fsspec==2024.2.0 # via dask greenlet==3.0.3 # via sqlalchemy -grpcio==1.60.1 +grpcio==1.62.0 # via # feast (setup.py) # grpcio-health-checking # grpcio-reflection # grpcio-tools -grpcio-health-checking==1.60.1 +grpcio-health-checking==1.62.0 # via feast (setup.py) -grpcio-reflection==1.60.1 +grpcio-reflection==1.62.0 # via feast (setup.py) -grpcio-tools==1.60.1 +grpcio-tools==1.62.0 # via feast (setup.py) gunicorn==21.2.0 # via feast (setup.py) @@ -74,11 +74,11 @@ h11==0.14.0 # via # httpcore # uvicorn -httpcore==1.0.3 +httpcore==1.0.4 # via httpx httptools==0.6.1 # via uvicorn -httpx==0.26.0 +httpx==0.27.0 # via feast (setup.py) idna==3.6 # via @@ -89,6 +89,7 @@ importlib-metadata==6.11.0 # via # dask # feast (setup.py) + # typeguard importlib-resources==6.1.1 # via # feast (setup.py) @@ -136,7 +137,7 @@ pkgutil-resolve-name==1.3.10 # via jsonschema proto-plus==1.23.0 # via feast (setup.py) -protobuf==4.23.3 +protobuf==4.23.4 # via # feast (setup.py) # grpcio-health-checking @@ -146,11 +147,11 @@ protobuf==4.23.3 # proto-plus pyarrow==15.0.0 # via feast (setup.py) -pydantic==2.6.1 +pydantic==2.6.2 # via # fastapi # feast (setup.py) -pydantic-core==2.16.2 +pydantic-core==2.16.3 # via pydantic pygments==2.17.2 # via feast (setup.py) @@ -184,7 +185,9 @@ sniffio==1.3.0 # anyio # httpx sqlalchemy[mypy]==1.4.51 - # via feast (setup.py) + # via + # feast (setup.py) + # sqlalchemy sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy starlette==0.36.3 @@ -209,6 +212,7 @@ types-protobuf==4.24.0.20240129 # via mypy-protobuf typing-extensions==4.9.0 # via + # annotated-types # anyio # fastapi # mypy @@ -216,13 +220,12 @@ typing-extensions==4.9.0 # pydantic-core # sqlalchemy2-stubs # starlette + # typeguard # uvicorn -urllib3==2.2.0 +urllib3==2.2.1 # via requests uvicorn[standard]==0.27.1 - # via - # feast (setup.py) - # uvicorn + # via feast (setup.py) uvloop==0.19.0 # via uvicorn volatile==2.1.0 diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index 6c26f889e2..e17d545d38 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -10,7 +10,7 @@ altair==4.2.2 # via great-expectations annotated-types==0.6.0 # via pydantic -anyio==4.2.0 +anyio==4.3.0 # via # httpx # jupyter-server @@ -34,6 +34,8 @@ async-lru==2.0.4 # via jupyterlab async-timeout==4.0.3 # via redis +atpublic==4.0 + # via ibis-framework attrs==23.2.0 # via # bowler @@ -55,15 +57,17 @@ babel==2.14.0 # sphinx beautifulsoup4==4.12.3 # via nbconvert +bidict==0.23.1 + # via ibis-framework black==22.12.0 # via feast (setup.py) bleach==6.1.0 # via nbconvert -boto3==1.34.42 +boto3==1.34.49 # via # feast (setup.py) # moto -botocore==1.34.42 +botocore==1.34.49 # via # boto3 # moto @@ -122,7 +126,7 @@ comm==0.2.1 # via # ipykernel # ipywidgets -coverage[toml]==7.4.1 +coverage[toml]==7.4.3 # via pytest-cov cryptography==41.0.7 # via @@ -137,7 +141,7 @@ cryptography==41.0.7 # snowflake-connector-python # types-pyopenssl # types-redis -dask==2024.2.0 +dask==2024.2.1 # via feast (setup.py) db-dtypes==1.2.0 # via google-cloud-bigquery @@ -213,9 +217,9 @@ google-api-core[grpc]==2.17.1 # google-cloud-datastore # google-cloud-firestore # google-cloud-storage -google-api-python-client==2.118.0 +google-api-python-client==2.119.0 # via firebase-admin -google-auth==2.27.0 +google-auth==2.28.1 # via # google-api-core # google-api-python-client @@ -226,10 +230,6 @@ google-auth==2.27.0 google-auth-httplib2==0.2.0 # via google-api-python-client google-cloud-bigquery[pandas]==3.12.0 - # via - # feast (setup.py) - # google-cloud-bigquery -google-cloud-bigquery-storage==2.24.0 # via feast (setup.py) google-cloud-bigquery-storage==2.24.0 # via feast (setup.py) @@ -244,7 +244,7 @@ google-cloud-core==2.4.1 # google-cloud-storage google-cloud-datastore==2.19.0 # via feast (setup.py) -google-cloud-firestore==2.14.0 +google-cloud-firestore==2.15.0 # via firebase-admin google-cloud-storage==2.14.0 # via @@ -264,13 +264,13 @@ googleapis-common-protos[grpc]==1.62.0 # google-api-core # grpc-google-iam-v1 # grpcio-status -great-expectations==0.18.8 +great-expectations==0.18.9 # via feast (setup.py) greenlet==3.0.3 # via sqlalchemy grpc-google-iam-v1==0.13.0 # via google-cloud-bigtable -grpcio==1.60.1 +grpcio==1.62.0 # via # feast (setup.py) # google-api-core @@ -282,15 +282,15 @@ grpcio==1.60.1 # grpcio-status # grpcio-testing # grpcio-tools -grpcio-health-checking==1.60.1 +grpcio-health-checking==1.62.0 # via feast (setup.py) -grpcio-reflection==1.60.1 +grpcio-reflection==1.62.0 # via feast (setup.py) -grpcio-status==1.60.1 +grpcio-status==1.62.0 # via google-api-core -grpcio-testing==1.60.1 +grpcio-testing==1.62.0 # via feast (setup.py) -grpcio-tools==1.60.1 +grpcio-tools==1.62.0 # via feast (setup.py) gunicorn==21.2.0 # via feast (setup.py) @@ -304,7 +304,7 @@ hazelcast-python-client==5.3.0 # via feast (setup.py) hiredis==2.3.2 # via feast (setup.py) -httpcore==1.0.3 +httpcore==1.0.4 # via httpx httplib2==0.22.0 # via @@ -312,11 +312,17 @@ httplib2==0.22.0 # google-auth-httplib2 httptools==0.6.1 # via uvicorn -httpx==0.26.0 +httpx==0.27.0 # via # feast (setup.py) # jupyterlab -identify==2.5.34 +ibis-framework==8.0.0 + # via + # feast (setup.py) + # ibis-substrait +ibis-substrait==3.2.0 + # via feast (setup.py) +identify==2.5.35 # via pre-commit idna==3.6 # via @@ -338,6 +344,7 @@ importlib-metadata==6.11.0 # jupyterlab-server # nbconvert # sphinx + # typeguard importlib-resources==6.1.1 # via feast (setup.py) iniconfig==2.0.0 @@ -374,7 +381,7 @@ jmespath==1.0.1 # via # boto3 # botocore -json5==0.9.14 +json5==0.9.17 # via jupyterlab-server jsonpatch==1.33 # via great-expectations @@ -419,7 +426,7 @@ jupyter-server==2.12.5 # notebook-shim jupyter-server-terminals==0.5.2 # via jupyter-server -jupyterlab==4.1.1 +jupyterlab==4.1.2 # via notebook jupyterlab-pygments==0.3.0 # via nbconvert @@ -435,6 +442,8 @@ locket==1.0.0 # via partd makefun==1.15.2 # via great-expectations +markdown-it-py==3.0.0 + # via rich markupsafe==2.1.5 # via # jinja2 @@ -448,6 +457,8 @@ matplotlib-inline==0.1.6 # ipython mccabe==0.7.0 # via flake8 +mdurl==0.1.2 + # via markdown-it-py minio==7.1.0 # via feast (setup.py) mistune==3.0.2 @@ -462,7 +473,7 @@ moreorless==0.4.0 # via bowler moto==4.2.14 # via feast (setup.py) -msal==1.26.0 +msal==1.27.0 # via # azure-identity # msal-extensions @@ -470,6 +481,8 @@ msal-extensions==1.1.0 # via azure-identity msgpack==1.0.7 # via cachecontrol +multipledispatch==1.0.0 + # via ibis-framework multiprocess==0.70.16 # via bytewax mypy==1.8.0 @@ -484,7 +497,7 @@ mypy-protobuf==3.1.0 # via feast (setup.py) nbclient==0.9.0 # via nbconvert -nbconvert==7.16.0 +nbconvert==7.16.1 # via jupyter-server nbformat==5.9.2 # via @@ -498,7 +511,7 @@ nodeenv==1.8.0 # via pre-commit notebook==7.1.0 # via great-expectations -notebook-shim==0.2.3 +notebook-shim==0.2.4 # via # jupyterlab # notebook @@ -508,6 +521,7 @@ numpy==1.24.4 # db-dtypes # feast (setup.py) # great-expectations + # ibis-framework # pandas # pandavro # pyarrow @@ -526,6 +540,7 @@ packaging==23.2 # google-cloud-bigquery # great-expectations # gunicorn + # ibis-substrait # ipykernel # jupyter-server # jupyterlab @@ -543,6 +558,7 @@ pandas==1.5.3 # feast (setup.py) # google-cloud-bigquery # great-expectations + # ibis-framework # pandavro # snowflake-connector-python pandavro==1.5.2 @@ -551,6 +567,8 @@ pandocfilters==1.5.1 # via nbconvert parso==0.8.3 # via jedi +parsy==2.1 + # via ibis-framework partd==1.4.1 # via dask pathspec==0.12.1 @@ -559,7 +577,7 @@ pbr==6.0.0 # via mock pexpect==4.9.0 # via ipython -pip-tools==7.3.0 +pip-tools==7.4.0 # via feast (setup.py) platformdirs==3.11.0 # via @@ -587,7 +605,7 @@ proto-plus==1.23.0 # google-cloud-bigtable # google-cloud-datastore # google-cloud-firestore -protobuf==4.23.3 +protobuf==4.23.4 # via # feast (setup.py) # google-api-core @@ -605,6 +623,7 @@ protobuf==4.23.3 # grpcio-tools # mypy-protobuf # proto-plus + # substrait psutil==5.9.0 # via # feast (setup.py) @@ -628,7 +647,10 @@ pyarrow==15.0.0 # db-dtypes # feast (setup.py) # google-cloud-bigquery + # ibis-framework # snowflake-connector-python +pyarrow-hotfix==0.6 + # via ibis-framework pyasn1==0.5.1 # via # pyasn1-modules @@ -641,12 +663,12 @@ pycodestyle==2.10.0 # via flake8 pycparser==2.21 # via cffi -pydantic==2.6.1 +pydantic==2.6.2 # via # fastapi # feast (setup.py) # great-expectations -pydantic-core==2.16.2 +pydantic-core==2.16.3 # via pydantic pyflakes==3.0.1 # via flake8 @@ -655,6 +677,7 @@ pygments==2.17.2 # feast (setup.py) # ipython # nbconvert + # rich # sphinx pyjwt[crypto]==2.8.0 # via @@ -666,14 +689,16 @@ pymysql==1.1.0 # via feast (setup.py) pyodbc==5.1.0 # via feast (setup.py) -pyopenssl==23.3.0 +pyopenssl==24.0.0 # via snowflake-connector-python pyparsing==3.1.1 # via # great-expectations # httplib2 pyproject-hooks==1.0.0 - # via build + # via + # build + # pip-tools pyspark==3.5.0 # via feast (setup.py) pytest==7.4.4 @@ -706,6 +731,7 @@ python-dateutil==2.8.2 # botocore # google-cloud-bigquery # great-expectations + # ibis-framework # jupyter-client # kubernetes # moto @@ -719,6 +745,7 @@ python-json-logger==2.0.7 pytz==2024.1 # via # great-expectations + # ibis-framework # pandas # snowflake-connector-python # trino @@ -726,6 +753,7 @@ pyyaml==6.0.1 # via # dask # feast (setup.py) + # ibis-substrait # jupyter-events # kubernetes # pre-commit @@ -776,7 +804,9 @@ rfc3986-validator==0.1.1 # via # jsonschema # jupyter-events -rockset==2.1.0 +rich==13.7.0 + # via ibis-framework +rockset==2.1.1 # via feast (setup.py) rpds-py==0.18.0 # via @@ -814,7 +844,7 @@ sniffio==1.3.0 # httpx snowballstemmer==2.2.0 # via sphinx -snowflake-connector-python[pandas]==3.7.0 +snowflake-connector-python[pandas]==3.7.1 # via feast (setup.py) sortedcontainers==2.4.0 # via snowflake-connector-python @@ -835,13 +865,19 @@ sphinxcontrib-qthelp==1.0.7 sphinxcontrib-serializinghtml==1.1.10 # via sphinx sqlalchemy[mypy]==1.4.51 - # via feast (setup.py) + # via + # feast (setup.py) + # sqlalchemy sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy +sqlglot==20.11.0 + # via ibis-framework stack-data==0.6.3 # via ipython starlette==0.36.3 # via fastapi +substrait==0.12.1 + # via ibis-substrait tabulate==0.9.0 # via feast (setup.py) tenacity==8.2.3 @@ -874,6 +910,7 @@ toolz==0.12.1 # via # altair # dask + # ibis-framework # partd tornado==6.4 # via @@ -902,7 +939,7 @@ traitlets==5.14.1 # nbclient # nbconvert # nbformat -trino==0.327.0 +trino==0.328.0 # via feast (setup.py) typeguard==4.1.5 # via feast (setup.py) @@ -922,11 +959,11 @@ types-pytz==2024.1.0.20240203 # via feast (setup.py) types-pyyaml==6.0.12.12 # via feast (setup.py) -types-redis==4.6.0.20240106 +types-redis==4.6.0.20240218 # via feast (setup.py) types-requests==2.30.0.0 # via feast (setup.py) -types-setuptools==69.0.0.20240125 +types-setuptools==69.1.0.20240223 # via feast (setup.py) types-tabulate==0.9.0.20240106 # via feast (setup.py) @@ -941,6 +978,7 @@ typing-extensions==4.9.0 # black # fastapi # great-expectations + # ibis-framework # ipython # mypy # pydantic @@ -948,6 +986,7 @@ typing-extensions==4.9.0 # snowflake-connector-python # sqlalchemy2-stubs # starlette + # typeguard # uvicorn tzlocal==5.2 # via diff --git a/sdk/python/requirements/py3.9-requirements.txt b/sdk/python/requirements/py3.9-requirements.txt index 182cb7ad07..12c14a03ae 100644 --- a/sdk/python/requirements/py3.9-requirements.txt +++ b/sdk/python/requirements/py3.9-requirements.txt @@ -6,7 +6,7 @@ # annotated-types==0.6.0 # via pydantic -anyio==4.2.0 +anyio==4.3.0 # via # httpx # starlette @@ -38,7 +38,7 @@ cloudpickle==3.0.0 # via dask colorama==0.4.6 # via feast (setup.py) -dask==2024.2.0 +dask==2024.2.1 # via feast (setup.py) dill==0.3.8 # via feast (setup.py) @@ -56,17 +56,17 @@ fsspec==2024.2.0 # via dask greenlet==3.0.3 # via sqlalchemy -grpcio==1.60.1 +grpcio==1.62.0 # via # feast (setup.py) # grpcio-health-checking # grpcio-reflection # grpcio-tools -grpcio-health-checking==1.60.1 +grpcio-health-checking==1.62.0 # via feast (setup.py) -grpcio-reflection==1.60.1 +grpcio-reflection==1.62.0 # via feast (setup.py) -grpcio-tools==1.60.1 +grpcio-tools==1.62.0 # via feast (setup.py) gunicorn==21.2.0 # via feast (setup.py) @@ -74,11 +74,11 @@ h11==0.14.0 # via # httpcore # uvicorn -httpcore==1.0.3 +httpcore==1.0.4 # via httpx httptools==0.6.1 # via uvicorn -httpx==0.26.0 +httpx==0.27.0 # via feast (setup.py) idna==3.6 # via @@ -89,6 +89,7 @@ importlib-metadata==6.11.0 # via # dask # feast (setup.py) + # typeguard importlib-resources==6.1.1 # via feast (setup.py) jinja2==3.1.3 @@ -131,7 +132,7 @@ partd==1.4.1 # via dask proto-plus==1.23.0 # via feast (setup.py) -protobuf==4.23.3 +protobuf==4.23.4 # via # feast (setup.py) # grpcio-health-checking @@ -141,11 +142,11 @@ protobuf==4.23.3 # proto-plus pyarrow==15.0.0 # via feast (setup.py) -pydantic==2.6.1 +pydantic==2.6.2 # via # fastapi # feast (setup.py) -pydantic-core==2.16.2 +pydantic-core==2.16.3 # via pydantic pygments==2.17.2 # via feast (setup.py) @@ -179,7 +180,9 @@ sniffio==1.3.0 # anyio # httpx sqlalchemy[mypy]==1.4.51 - # via feast (setup.py) + # via + # feast (setup.py) + # sqlalchemy sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy starlette==0.36.3 @@ -211,8 +214,9 @@ typing-extensions==4.9.0 # pydantic-core # sqlalchemy2-stubs # starlette + # typeguard # uvicorn -urllib3==2.2.0 +urllib3==2.2.1 # via requests uvicorn[standard]==0.27.1 # via feast (setup.py) diff --git a/sdk/python/tests/unit/test_on_demand_substrait_transformation.py b/sdk/python/tests/unit/test_on_demand_substrait_transformation.py new file mode 100644 index 0000000000..c9d30c5b7a --- /dev/null +++ b/sdk/python/tests/unit/test_on_demand_substrait_transformation.py @@ -0,0 +1,112 @@ +import os +import tempfile +from datetime import datetime, timedelta + +import pandas as pd + +from feast import Entity, FeatureStore, FeatureView, FileSource, RepoConfig +from feast.driver_test_data import create_driver_hourly_stats_df +from feast.field import Field +from feast.infra.online_stores.sqlite import SqliteOnlineStoreConfig +from feast.on_demand_feature_view import on_demand_feature_view +from feast.types import Float32, Float64, Int64 + + +def test_ibis_pandas_parity(): + with tempfile.TemporaryDirectory() as data_dir: + store = FeatureStore( + config=RepoConfig( + project="test_on_demand_substrait_transformation", + registry=os.path.join(data_dir, "registry.db"), + provider="local", + entity_key_serialization_version=2, + online_store=SqliteOnlineStoreConfig( + path=os.path.join(data_dir, "online.db") + ), + ) + ) + + # Generate test data. + end_date = datetime.now().replace(microsecond=0, second=0, minute=0) + start_date = end_date - timedelta(days=15) + + driver_entities = [1001, 1002, 1003, 1004, 1005] + driver_df = create_driver_hourly_stats_df(driver_entities, start_date, end_date) + driver_stats_path = os.path.join(data_dir, "driver_stats.parquet") + driver_df.to_parquet(path=driver_stats_path, allow_truncated_timestamps=True) + + driver = Entity(name="driver", join_keys=["driver_id"]) + + driver_stats_source = FileSource( + name="driver_hourly_stats_source", + path=driver_stats_path, + timestamp_field="event_timestamp", + created_timestamp_column="created", + ) + + driver_stats_fv = FeatureView( + name="driver_hourly_stats", + entities=[driver], + ttl=timedelta(days=1), + schema=[ + Field(name="conv_rate", dtype=Float32), + Field(name="acc_rate", dtype=Float32), + Field(name="avg_daily_trips", dtype=Int64), + ], + online=True, + source=driver_stats_source, + ) + + @on_demand_feature_view( + sources=[driver_stats_fv], + schema=[Field(name="conv_rate_plus_acc", dtype=Float64)], + ) + def pandas_view(inputs: pd.DataFrame) -> pd.DataFrame: + df = pd.DataFrame() + df["conv_rate_plus_acc"] = inputs["conv_rate"] + inputs["acc_rate"] + return df + + from ibis.expr.types import Table + + @on_demand_feature_view( + sources=[driver_stats_fv[["conv_rate", "acc_rate"]]], + schema=[Field(name="conv_rate_plus_acc_substrait", dtype=Float64)], + ) + def substrait_view(inputs: Table) -> Table: + return inputs.select( + (inputs["conv_rate"] + inputs["acc_rate"]).name( + "conv_rate_plus_acc_substrait" + ) + ) + + store.apply( + [driver, driver_stats_source, driver_stats_fv, substrait_view, pandas_view] + ) + + entity_df = pd.DataFrame.from_dict( + { + # entity's join key -> entity values + "driver_id": [1001, 1002, 1003], + # "event_timestamp" (reserved key) -> timestamps + "event_timestamp": [ + datetime(2021, 4, 12, 10, 59, 42), + datetime(2021, 4, 12, 8, 12, 10), + datetime(2021, 4, 12, 16, 40, 26), + ], + } + ) + + training_df = store.get_historical_features( + entity_df=entity_df, + features=[ + "driver_hourly_stats:conv_rate", + "driver_hourly_stats:acc_rate", + "driver_hourly_stats:avg_daily_trips", + "substrait_view:conv_rate_plus_acc_substrait", + "pandas_view:conv_rate_plus_acc", + ], + ).to_df() + + assert training_df["conv_rate_plus_acc"].equals( + training_df["conv_rate_plus_acc_substrait"] + ) diff --git a/setup.py b/setup.py index c14d64557a..3aaaa51ea0 100644 --- a/setup.py +++ b/setup.py @@ -144,6 +144,11 @@ "hazelcast-python-client>=5.1", ] +IBIS_REQUIRED = [ + "ibis-framework", + "ibis-substrait" +] + CI_REQUIRED = ( [ "build", @@ -201,6 +206,7 @@ + AZURE_REQUIRED + ROCKSET_REQUIRED + HAZELCAST_REQUIRED + + IBIS_REQUIRED ) @@ -368,6 +374,7 @@ def run(self): "cassandra": CASSANDRA_REQUIRED, "hazelcast": HAZELCAST_REQUIRED, "rockset": ROCKSET_REQUIRED, + "ibis": IBIS_REQUIRED }, include_package_data=True, license="Apache", From cf06704bd58c77931679f1c0c7e44de7042f931f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabio=20Badal=C3=AC?= Date: Sat, 24 Feb 2024 23:14:08 +0100 Subject: [PATCH 046/122] fix: Fix copy method for StreamFeatureView (#3951) Signed-off-by: fbad --- sdk/python/feast/stream_feature_view.py | 6 ++++-- sdk/python/tests/unit/test_feature_views.py | 20 ++++++++++++++++++++ 2 files changed, 24 insertions(+), 2 deletions(-) diff --git a/sdk/python/feast/stream_feature_view.py b/sdk/python/feast/stream_feature_view.py index 6a204d6813..13abbc5e28 100644 --- a/sdk/python/feast/stream_feature_view.py +++ b/sdk/python/feast/stream_feature_view.py @@ -284,7 +284,6 @@ def __copy__(self): fv = StreamFeatureView( name=self.name, schema=self.schema, - entities=self.entities, ttl=self.ttl, tags=self.tags, online=self.online, @@ -293,9 +292,12 @@ def __copy__(self): aggregations=self.aggregations, mode=self.mode, timestamp_field=self.timestamp_field, - source=self.source, + source=self.stream_source if self.stream_source else self.batch_source, udf=self.udf, ) + fv.entities = self.entities + fv.features = copy.copy(self.features) + fv.entity_columns = copy.copy(self.entity_columns) fv.projection = copy.copy(self.projection) return fv diff --git a/sdk/python/tests/unit/test_feature_views.py b/sdk/python/tests/unit/test_feature_views.py index 20863645b7..2ad9680703 100644 --- a/sdk/python/tests/unit/test_feature_views.py +++ b/sdk/python/tests/unit/test_feature_views.py @@ -1,3 +1,4 @@ +import copy from datetime import timedelta import pytest @@ -300,3 +301,22 @@ def test_stream_feature_view_proto_type(): aggregations=[], ) assert sfv.proto_class is StreamFeatureViewProto + + +def test_stream_feature_view_copy(): + stream_source = KafkaSource( + name="kafka", + timestamp_field="event_timestamp", + kafka_bootstrap_servers="", + message_format=AvroFormat(""), + topic="topic", + batch_source=FileSource(path="some path"), + ) + sfv = StreamFeatureView( + name="test stream featureview proto class", + entities=[], + ttl=timedelta(days=30), + source=stream_source, + aggregations=[], + ) + assert sfv == copy.copy(sfv) From b023aa5817bffe235f460c5df879141bb5945edb Mon Sep 17 00:00:00 2001 From: Theodor Mihalache <84387487+tmihalac@users.noreply.github.com> Date: Wed, 28 Feb 2024 14:20:24 -0500 Subject: [PATCH 047/122] fix: Quickstart notebook example (#3976) fix: quickstart notebook example Removed unnecessary comma in notebook Signed-off-by: Theodor Mihalache --- examples/quickstart/quickstart.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/quickstart/quickstart.ipynb b/examples/quickstart/quickstart.ipynb index f84457ac02..9e9a0b27ca 100644 --- a/examples/quickstart/quickstart.ipynb +++ b/examples/quickstart/quickstart.ipynb @@ -1065,7 +1065,7 @@ "\n", "- Read the [Concepts](https://docs.feast.dev/getting-started/concepts/) page to understand the Feast data model and architecture.\n", "- Check out our [Tutorials](https://docs.feast.dev/tutorials/tutorials-overview) section for more examples on how to use Feast.\n", - "- Follow our [Running Feast with Snowflake/GCP/AWS](https://docs.feast.dev/how-to-guides/feast-snowflake-gcp-aws) guide for a more in-depth tutorial on using Feast.\n", + "- Follow our [Running Feast with Snowflake/GCP/AWS](https://docs.feast.dev/how-to-guides/feast-snowflake-gcp-aws) guide for a more in-depth tutorial on using Feast.\n" ] } ], From 42f37bb85f97d5244556e307ababcda0f22660b3 Mon Sep 17 00:00:00 2001 From: Harry Date: Thu, 29 Feb 2024 02:26:43 +0700 Subject: [PATCH 048/122] chore: Bump CI requirement cryptography to v42 (#3966) --- sdk/python/requirements/py3.10-ci-requirements.txt | 2 +- sdk/python/requirements/py3.8-ci-requirements.txt | 2 +- sdk/python/requirements/py3.9-ci-requirements.txt | 2 +- setup.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index 45c4eb765d..39d33caa39 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -128,7 +128,7 @@ comm==0.2.1 # ipywidgets coverage[toml]==7.4.3 # via pytest-cov -cryptography==41.0.7 +cryptography==42.0.4 # via # azure-identity # azure-storage-blob diff --git a/sdk/python/requirements/py3.8-ci-requirements.txt b/sdk/python/requirements/py3.8-ci-requirements.txt index b006c1c621..0356b85973 100644 --- a/sdk/python/requirements/py3.8-ci-requirements.txt +++ b/sdk/python/requirements/py3.8-ci-requirements.txt @@ -132,7 +132,7 @@ comm==0.2.1 # ipywidgets coverage[toml]==7.4.3 # via pytest-cov -cryptography==41.0.7 +cryptography==42.0.4 # via # azure-identity # azure-storage-blob diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index e17d545d38..6515779481 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -128,7 +128,7 @@ comm==0.2.1 # ipywidgets coverage[toml]==7.4.3 # via pytest-cov -cryptography==41.0.7 +cryptography==42.0.4 # via # azure-identity # azure-storage-blob diff --git a/setup.py b/setup.py index 3aaaa51ea0..57decb8301 100644 --- a/setup.py +++ b/setup.py @@ -153,7 +153,7 @@ [ "build", "virtualenv==20.23.0", - "cryptography>=35.0,<42", + "cryptography>=35.0,<43", "flake8>=6.0.0,<6.1.0", "black>=22.6.0,<23", "isort>=5,<6", From 1e64c77e1e146f952f450db9370e2da5c85a8500 Mon Sep 17 00:00:00 2001 From: John Lemmon <137814163+JohnLemmonMedely@users.noreply.github.com> Date: Thu, 29 Feb 2024 07:49:52 -0600 Subject: [PATCH 049/122] fix: Fix for materializing entityless feature views in Snowflake (#3961) --- .../infra/materialization/snowflake_engine.py | 12 ++-- .../materialization/test_snowflake.py | 62 +++++++++++++++++++ 2 files changed, 68 insertions(+), 6 deletions(-) diff --git a/sdk/python/feast/infra/materialization/snowflake_engine.py b/sdk/python/feast/infra/materialization/snowflake_engine.py index 62b23dfade..28bec198a5 100644 --- a/sdk/python/feast/infra/materialization/snowflake_engine.py +++ b/sdk/python/feast/infra/materialization/snowflake_engine.py @@ -14,7 +14,7 @@ import feast from feast.batch_feature_view import BatchFeatureView from feast.entity import Entity -from feast.feature_view import FeatureView +from feast.feature_view import DUMMY_ENTITY_ID, FeatureView from feast.infra.materialization.batch_materialization_engine import ( BatchMaterializationEngine, MaterializationJob, @@ -274,7 +274,11 @@ def _materialize_one( fv_latest_values_sql = offline_job.to_sql() - if feature_view.entity_columns: + if ( + feature_view.entity_columns[0].name == DUMMY_ENTITY_ID + ): # entityless Feature View's placeholder entity + entities_to_write = 1 + else: join_keys = [entity.name for entity in feature_view.entity_columns] unique_entities = '"' + '", "'.join(join_keys) + '"' @@ -287,10 +291,6 @@ def _materialize_one( with GetSnowflakeConnection(self.repo_config.offline_store) as conn: entities_to_write = conn.cursor().execute(query).fetchall()[0][0] - else: - entities_to_write = ( - 1 # entityless feature view has a placeholder entity - ) if feature_view.batch_source.field_mapping is not None: fv_latest_mapped_values_sql = _run_snowflake_field_mapping( diff --git a/sdk/python/tests/integration/materialization/test_snowflake.py b/sdk/python/tests/integration/materialization/test_snowflake.py index daa96a87c9..60fa9b30aa 100644 --- a/sdk/python/tests/integration/materialization/test_snowflake.py +++ b/sdk/python/tests/integration/materialization/test_snowflake.py @@ -185,3 +185,65 @@ def test_snowflake_materialization_consistency_internal_with_lists( finally: fs.teardown() snowflake_environment.data_source_creator.teardown() + + +@pytest.mark.integration +def test_snowflake_materialization_entityless_fv(): + snowflake_config = IntegrationTestRepoConfig( + online_store=SNOWFLAKE_ONLINE_CONFIG, + offline_store_creator=SnowflakeDataSourceCreator, + batch_engine=SNOWFLAKE_ENGINE_CONFIG, + ) + snowflake_environment = construct_test_environment(snowflake_config, None) + + df = create_basic_driver_dataset() + entityless_df = df.drop("driver_id", axis=1) + ds = snowflake_environment.data_source_creator.create_data_source( + entityless_df, + snowflake_environment.feature_store.project, + field_mapping={"ts_1": "ts"}, + ) + + fs = snowflake_environment.feature_store + + # We include the driver entity so we can provide an entity ID when fetching features + driver = Entity( + name="driver_id", + join_keys=["driver_id"], + ) + + overall_stats_fv = FeatureView( + name="overall_hourly_stats", + entities=[], + ttl=timedelta(weeks=52), + source=ds, + ) + + try: + fs.apply([overall_stats_fv, driver]) + + # materialization is run in two steps and + # we use timestamp from generated dataframe as a split point + split_dt = df["ts_1"][4].to_pydatetime() - timedelta(seconds=1) + + print(f"Split datetime: {split_dt}") + + now = datetime.utcnow() + + start_date = (now - timedelta(hours=5)).replace(tzinfo=utc) + end_date = split_dt + fs.materialize( + feature_views=[overall_stats_fv.name], + start_date=start_date, + end_date=end_date, + ) + + response_dict = fs.get_online_features( + [f"{overall_stats_fv.name}:value"], + [{"driver_id": 1}], # Included because we need an entity + ).to_dict() + assert response_dict["value"] == [0.3] + + finally: + fs.teardown() + snowflake_environment.data_source_creator.teardown() From 591ba4e39842b5fbb49db32be4fce28e6d520d93 Mon Sep 17 00:00:00 2001 From: Jeremy Ary Date: Thu, 29 Feb 2024 10:47:52 -0600 Subject: [PATCH 050/122] fix: Restore label check for all actions using pull_request_target (#3978) Signed-off-by: Jeremy Ary --- .github/workflows/java_pr.yml | 24 ++++++++++++++----- .github/workflows/lint_pr.yml | 6 ++++- .github/workflows/pr_integration_tests.yml | 4 ++-- .../workflows/pr_local_integration_tests.yml | 6 ++--- 4 files changed, 28 insertions(+), 12 deletions(-) diff --git a/.github/workflows/java_pr.yml b/.github/workflows/java_pr.yml index 83c52e7dbf..d362a6bb54 100644 --- a/.github/workflows/java_pr.yml +++ b/.github/workflows/java_pr.yml @@ -9,7 +9,11 @@ on: jobs: lint-java: - if: github.repository == 'feast-dev/feast' + # when using pull_request_target, all jobs MUST have this if check for 'ok-to-test' or 'approved' for security purposes. + if: + ((github.event.action == 'labeled' && (github.event.label.name == 'approved' || github.event.label.name == 'lgtm' || github.event.label.name == 'ok-to-test')) || + (github.event.action != 'labeled' && (contains(github.event.pull_request.labels.*.name, 'ok-to-test') || contains(github.event.pull_request.labels.*.name, 'approved') || contains(github.event.pull_request.labels.*.name, 'lgtm')))) && + github.repository == 'feast-dev/feast' runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 @@ -23,7 +27,11 @@ jobs: run: make lint-java unit-test-java: - if: github.repository == 'feast-dev/feast' + # when using pull_request_target, all jobs MUST have this if check for 'ok-to-test' or 'approved' for security purposes. + if: + ((github.event.action == 'labeled' && (github.event.label.name == 'approved' || github.event.label.name == 'lgtm' || github.event.label.name == 'ok-to-test')) || + (github.event.action != 'labeled' && (contains(github.event.pull_request.labels.*.name, 'ok-to-test') || contains(github.event.pull_request.labels.*.name, 'approved') || contains(github.event.pull_request.labels.*.name, 'lgtm')))) && + github.repository == 'feast-dev/feast' runs-on: ubuntu-latest needs: lint-java steps: @@ -60,7 +68,11 @@ jobs: path: ${{ github.workspace }}/docs/coverage/java/target/site/jacoco-aggregate/ build-docker-image-java: - if: github.repository == 'feast-dev/feast' + # when using pull_request_target, all jobs MUST have this if check for 'ok-to-test' or 'approved' for security purposes. + if: + ((github.event.action == 'labeled' && (github.event.label.name == 'approved' || github.event.label.name == 'lgtm' || github.event.label.name == 'ok-to-test')) || + (github.event.action != 'labeled' && (contains(github.event.pull_request.labels.*.name, 'ok-to-test') || contains(github.event.pull_request.labels.*.name, 'approved') || contains(github.event.pull_request.labels.*.name, 'lgtm')))) && + github.repository == 'feast-dev/feast' runs-on: ubuntu-latest strategy: matrix: @@ -91,10 +103,10 @@ jobs: run: make build-${{ matrix.component }}-docker REGISTRY=${REGISTRY} VERSION=${GITHUB_SHA} integration-test-java-pr: - # all jobs MUST have this if check for 'ok-to-test' or 'approved' for security purposes. + # when using pull_request_target, all jobs MUST have this if check for 'ok-to-test' or 'approved' for security purposes. if: - ((github.event.action == 'labeled' && (github.event.label.name == 'approved' || github.event.label.name == 'ok-to-test')) || - (github.event.action != 'labeled' && (contains(github.event.pull_request.labels.*.name, 'ok-to-test') || contains(github.event.pull_request.labels.*.name, 'approved')))) && + ((github.event.action == 'labeled' && (github.event.label.name == 'approved' || github.event.label.name == 'lgtm' || github.event.label.name == 'ok-to-test')) || + (github.event.action != 'labeled' && (contains(github.event.pull_request.labels.*.name, 'ok-to-test') || contains(github.event.pull_request.labels.*.name, 'approved') || contains(github.event.pull_request.labels.*.name, 'lgtm')))) && github.repository == 'feast-dev/feast' runs-on: ubuntu-latest needs: unit-test-java diff --git a/.github/workflows/lint_pr.yml b/.github/workflows/lint_pr.yml index f9af8b27c7..12f7182ce8 100644 --- a/.github/workflows/lint_pr.yml +++ b/.github/workflows/lint_pr.yml @@ -9,7 +9,11 @@ on: jobs: validate-title: - if: github.repository == 'feast-dev/feast' + # when using pull_request_target, all jobs MUST have this if check for 'ok-to-test' or 'approved' for security purposes. + if: + ((github.event.action == 'labeled' && (github.event.label.name == 'approved' || github.event.label.name == 'lgtm' || github.event.label.name == 'ok-to-test')) || + (github.event.action != 'labeled' && (contains(github.event.pull_request.labels.*.name, 'ok-to-test') || contains(github.event.pull_request.labels.*.name, 'approved') || contains(github.event.pull_request.labels.*.name, 'lgtm')))) && + github.repository == 'feast-dev/feast' name: Validate PR title runs-on: ubuntu-latest steps: diff --git a/.github/workflows/pr_integration_tests.yml b/.github/workflows/pr_integration_tests.yml index 26c85b0126..ba4169c292 100644 --- a/.github/workflows/pr_integration_tests.yml +++ b/.github/workflows/pr_integration_tests.yml @@ -14,7 +14,7 @@ on: jobs: build-docker-image: - # all jobs MUST have this if check for 'ok-to-test' or 'approved' for security purposes. + # when using pull_request_target, all jobs MUST have this if check for 'ok-to-test' or 'approved' for security purposes. if: ((github.event.action == 'labeled' && (github.event.label.name == 'approved' || github.event.label.name == 'lgtm' || github.event.label.name == 'ok-to-test')) || (github.event.action != 'labeled' && (contains(github.event.pull_request.labels.*.name, 'ok-to-test') || contains(github.event.pull_request.labels.*.name, 'approved') || contains(github.event.pull_request.labels.*.name, 'lgtm')))) && @@ -76,7 +76,7 @@ jobs: outputs: DOCKER_IMAGE_TAG: ${{ steps.image-tag.outputs.DOCKER_IMAGE_TAG }} integration-test-python: - # all jobs MUST have this if check for 'ok-to-test' or 'approved' for security purposes. + # when using pull_request_target, all jobs MUST have this if check for 'ok-to-test' or 'approved' for security purposes. if: ((github.event.action == 'labeled' && (github.event.label.name == 'approved' || github.event.label.name == 'lgtm' || github.event.label.name == 'ok-to-test')) || (github.event.action != 'labeled' && (contains(github.event.pull_request.labels.*.name, 'ok-to-test') || contains(github.event.pull_request.labels.*.name, 'approved') || contains(github.event.pull_request.labels.*.name, 'lgtm')))) && diff --git a/.github/workflows/pr_local_integration_tests.yml b/.github/workflows/pr_local_integration_tests.yml index aeb4100dc8..668bcb5e50 100644 --- a/.github/workflows/pr_local_integration_tests.yml +++ b/.github/workflows/pr_local_integration_tests.yml @@ -10,11 +10,11 @@ on: jobs: integration-test-python-local: - # all jobs MUST have this if check for 'ok-to-test' or 'approved' for security purposes. + # when using pull_request_target, all jobs MUST have this if check for 'ok-to-test' or 'approved' for security purposes. if: ((github.event.action == 'labeled' && (github.event.label.name == 'approved' || github.event.label.name == 'lgtm' || github.event.label.name == 'ok-to-test')) || - (github.event.action != 'labeled' && (contains(github.event.pull_request.labels.*.name, 'ok-to-test') || contains(github.event.pull_request.labels.*.name, 'approved') || contains(github.event.pull_request.labels.*.name, 'lgtm')))) || - github.repository != 'feast-dev/feast' + (github.event.action != 'labeled' && (contains(github.event.pull_request.labels.*.name, 'ok-to-test') || contains(github.event.pull_request.labels.*.name, 'approved') || contains(github.event.pull_request.labels.*.name, 'lgtm')))) && + github.repository == 'feast-dev/feast' runs-on: ${{ matrix.os }} strategy: fail-fast: false From 64459ad1b5ed4a782b7ce87fcec3012e00408c74 Mon Sep 17 00:00:00 2001 From: Harry Date: Mon, 4 Mar 2024 20:41:36 +0700 Subject: [PATCH 051/122] feat: Pandas v2 compatibility (#3957) * feat: Support pandas v2 Signed-off-by: Hai Nguyen * fix: Prune dependencies Signed-off-by: Hai Nguyen * chore: Re-compile reqs py310 Signed-off-by: Hai Nguyen * fix: Mark test skip with conditions Signed-off-by: Hai Nguyen * chore: Re-compile reqs py39 Signed-off-by: Hai Nguyen * chore: Update skip reason Signed-off-by: Hai Nguyen * chore: Re-compile reqs py38 Signed-off-by: Hai Nguyen * chore: Bump snowflake connector Signed-off-by: Hai Nguyen * chore: Remove test skip Signed-off-by: Hai Nguyen --------- Signed-off-by: Hai Nguyen --- sdk/python/feast/infra/offline_stores/file.py | 8 ++++++-- .../requirements/py3.10-ci-requirements.txt | 14 +++----------- sdk/python/requirements/py3.10-requirements.txt | 15 ++------------- sdk/python/requirements/py3.8-ci-requirements.txt | 11 +---------- sdk/python/requirements/py3.8-requirements.txt | 15 ++------------- sdk/python/requirements/py3.9-ci-requirements.txt | 14 +++----------- sdk/python/requirements/py3.9-requirements.txt | 15 ++------------- .../tests/integration/e2e/test_validation.py | 2 +- .../test_universal_historical_retrieval.py | 5 +++++ setup.py | 6 ++---- 10 files changed, 27 insertions(+), 78 deletions(-) diff --git a/sdk/python/feast/infra/offline_stores/file.py b/sdk/python/feast/infra/offline_stores/file.py index 0e5064ba78..0b873a2091 100644 --- a/sdk/python/feast/infra/offline_stores/file.py +++ b/sdk/python/feast/infra/offline_stores/file.py @@ -4,6 +4,7 @@ from pathlib import Path from typing import Any, Callable, List, Literal, Optional, Tuple, Union +import dask import dask.dataframe as dd import pandas as pd import pyarrow @@ -42,6 +43,11 @@ _run_dask_field_mapping, ) +# FileRetrievalJob will cast string objects to string[pyarrow] from dask version 2023.7.1 +# This is not the desired behavior for our use case, so we set the convert-string option to False +# See (https://github.com/dask/dask/issues/10881#issuecomment-1923327936) +dask.config.set({"dataframe.convert-string": False}) + class FileOfflineStoreConfig(FeastConfigBaseModel): """Offline store config for local (file-based) store""" @@ -366,8 +372,6 @@ def evaluate_offline_job(): source_df[DUMMY_ENTITY_ID] = DUMMY_ENTITY_VAL columns_to_extract.add(DUMMY_ENTITY_ID) - source_df = source_df.persist() - return source_df[list(columns_to_extract)].persist() # When materializing a single feature view, we don't need full feature names. On demand transforms aren't materialized diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index 39d33caa39..e41706f403 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -179,10 +179,6 @@ executing==2.0.1 # via stack-data fastapi==0.109.2 # via feast (setup.py) -fastavro==1.9.4 - # via - # feast (setup.py) - # pandavro fastjsonschema==2.19.1 # via nbformat filelock==3.13.1 @@ -515,7 +511,6 @@ numpy==1.24.4 # great-expectations # ibis-framework # pandas - # pandavro # pyarrow # scipy oauthlib==3.2.2 @@ -543,18 +538,14 @@ packaging==23.2 # pytest # snowflake-connector-python # sphinx -pandas==1.5.3 +pandas==2.2.0 ; python_version >= "3.9" # via # altair # db-dtypes # feast (setup.py) # google-cloud-bigquery # great-expectations - # ibis-framework - # pandavro # snowflake-connector-python -pandavro==1.5.2 - # via feast (setup.py) pandocfilters==1.5.1 # via nbconvert parso==0.8.3 @@ -824,7 +815,6 @@ six==1.16.0 # isodate # kubernetes # mock - # pandavro # python-dateutil # rfc3339-validator # thriftpy2 @@ -975,6 +965,8 @@ typing-extensions==4.9.0 # sqlalchemy2-stubs # typeguard # uvicorn +tzdata==2024.1 + # via pandas tzlocal==5.2 # via # great-expectations diff --git a/sdk/python/requirements/py3.10-requirements.txt b/sdk/python/requirements/py3.10-requirements.txt index 7141cd0f25..b5dd9a78be 100644 --- a/sdk/python/requirements/py3.10-requirements.txt +++ b/sdk/python/requirements/py3.10-requirements.txt @@ -46,10 +46,6 @@ exceptiongroup==1.2.0 # via anyio fastapi==0.109.2 # via feast (setup.py) -fastavro==1.9.4 - # via - # feast (setup.py) - # pandavro fissix==21.11.13 # via bowler fsspec==2024.2.0 @@ -115,17 +111,12 @@ numpy==1.24.4 # via # feast (setup.py) # pandas - # pandavro # pyarrow packaging==23.2 # via # dask # gunicorn -pandas==1.5.3 - # via - # feast (setup.py) - # pandavro -pandavro==1.5.2 +pandas==2.2.0 # via feast (setup.py) partd==1.4.1 # via dask @@ -171,9 +162,7 @@ rpds-py==0.18.0 # jsonschema # referencing six==1.16.0 - # via - # pandavro - # python-dateutil + # via python-dateutil sniffio==1.3.0 # via # anyio diff --git a/sdk/python/requirements/py3.8-ci-requirements.txt b/sdk/python/requirements/py3.8-ci-requirements.txt index 0356b85973..339a6b1c49 100644 --- a/sdk/python/requirements/py3.8-ci-requirements.txt +++ b/sdk/python/requirements/py3.8-ci-requirements.txt @@ -182,10 +182,6 @@ executing==2.0.1 # via stack-data fastapi==0.109.2 # via feast (setup.py) -fastavro==1.9.4 - # via - # feast (setup.py) - # pandavro fastjsonschema==2.19.1 # via nbformat filelock==3.13.1 @@ -530,7 +526,6 @@ numpy==1.24.4 # great-expectations # ibis-framework # pandas - # pandavro # pyarrow # scipy oauthlib==3.2.2 @@ -558,18 +553,14 @@ packaging==23.2 # pytest # snowflake-connector-python # sphinx -pandas==1.5.3 +pandas==1.5.3 ; python_version < "3.9" # via # altair # db-dtypes # feast (setup.py) # google-cloud-bigquery # great-expectations - # ibis-framework - # pandavro # snowflake-connector-python -pandavro==1.5.2 - # via feast (setup.py) pandocfilters==1.5.1 # via nbconvert parso==0.8.3 diff --git a/sdk/python/requirements/py3.8-requirements.txt b/sdk/python/requirements/py3.8-requirements.txt index 541beecf0d..d00a77ee6f 100644 --- a/sdk/python/requirements/py3.8-requirements.txt +++ b/sdk/python/requirements/py3.8-requirements.txt @@ -46,10 +46,6 @@ exceptiongroup==1.2.0 # via anyio fastapi==0.109.2 # via feast (setup.py) -fastavro==1.9.4 - # via - # feast (setup.py) - # pandavro fissix==21.11.13 # via bowler fsspec==2024.2.0 @@ -119,17 +115,12 @@ numpy==1.24.4 # via # feast (setup.py) # pandas - # pandavro # pyarrow packaging==23.2 # via # dask # gunicorn -pandas==1.5.3 - # via - # feast (setup.py) - # pandavro -pandavro==1.5.2 +pandas==2.0.3 # via feast (setup.py) partd==1.4.1 # via dask @@ -177,9 +168,7 @@ rpds-py==0.18.0 # jsonschema # referencing six==1.16.0 - # via - # pandavro - # python-dateutil + # via python-dateutil sniffio==1.3.0 # via # anyio diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index 6515779481..99f7ee0285 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -179,10 +179,6 @@ executing==2.0.1 # via stack-data fastapi==0.109.2 # via feast (setup.py) -fastavro==1.9.4 - # via - # feast (setup.py) - # pandavro fastjsonschema==2.19.1 # via nbformat filelock==3.13.1 @@ -523,7 +519,6 @@ numpy==1.24.4 # great-expectations # ibis-framework # pandas - # pandavro # pyarrow # scipy oauthlib==3.2.2 @@ -551,18 +546,14 @@ packaging==23.2 # pytest # snowflake-connector-python # sphinx -pandas==1.5.3 +pandas==2.2.0 # via # altair # db-dtypes # feast (setup.py) # google-cloud-bigquery # great-expectations - # ibis-framework - # pandavro # snowflake-connector-python -pandavro==1.5.2 - # via feast (setup.py) pandocfilters==1.5.1 # via nbconvert parso==0.8.3 @@ -834,7 +825,6 @@ six==1.16.0 # isodate # kubernetes # mock - # pandavro # python-dateutil # rfc3339-validator # thriftpy2 @@ -988,6 +978,8 @@ typing-extensions==4.9.0 # starlette # typeguard # uvicorn +tzdata==2024.1 + # via pandas tzlocal==5.2 # via # great-expectations diff --git a/sdk/python/requirements/py3.9-requirements.txt b/sdk/python/requirements/py3.9-requirements.txt index 12c14a03ae..4364dc62bf 100644 --- a/sdk/python/requirements/py3.9-requirements.txt +++ b/sdk/python/requirements/py3.9-requirements.txt @@ -46,10 +46,6 @@ exceptiongroup==1.2.0 # via anyio fastapi==0.109.2 # via feast (setup.py) -fastavro==1.9.4 - # via - # feast (setup.py) - # pandavro fissix==21.11.13 # via bowler fsspec==2024.2.0 @@ -116,17 +112,12 @@ numpy==1.24.4 # via # feast (setup.py) # pandas - # pandavro # pyarrow packaging==23.2 # via # dask # gunicorn -pandas==1.5.3 - # via - # feast (setup.py) - # pandavro -pandavro==1.5.2 +pandas==2.2.0 # via feast (setup.py) partd==1.4.1 # via dask @@ -172,9 +163,7 @@ rpds-py==0.18.0 # jsonschema # referencing six==1.16.0 - # via - # pandavro - # python-dateutil + # via python-dateutil sniffio==1.3.0 # via # anyio diff --git a/sdk/python/tests/integration/e2e/test_validation.py b/sdk/python/tests/integration/e2e/test_validation.py index f49ed80a26..fdf182be57 100644 --- a/sdk/python/tests/integration/e2e/test_validation.py +++ b/sdk/python/tests/integration/e2e/test_validation.py @@ -167,7 +167,7 @@ def test_logged_features_validation(environment, universal_data_sources): { "customer_id": 2000 + i, "driver_id": 6000 + i, - "event_timestamp": datetime.datetime.now(), + "event_timestamp": make_tzaware(datetime.datetime.now()), } ] ), diff --git a/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py b/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py index 0abb290563..9baba2397b 100644 --- a/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py +++ b/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py @@ -340,6 +340,11 @@ def test_historical_features_with_entities_from_query( table_from_sql_entities = job_from_sql.to_arrow().to_pandas() for col in table_from_sql_entities.columns: + # check if col dtype is timezone naive + if pd.api.types.is_datetime64_dtype(table_from_sql_entities[col]): + table_from_sql_entities[col] = table_from_sql_entities[col].dt.tz_localize( + "UTC" + ) expected_df_query[col] = expected_df_query[col].astype( table_from_sql_entities[col].dtype ) diff --git a/setup.py b/setup.py index 57decb8301..484024b206 100644 --- a/setup.py +++ b/setup.py @@ -44,7 +44,6 @@ "click>=7.0.0,<9.0.0", "colorama>=0.3.9,<1", "dill~=0.3.0", - "fastavro>=1.1.0,<2", "grpcio>=1.56.2,<2", "grpcio-tools>=1.56.2,<2", "grpcio-reflection>=1.56.2,<2", @@ -54,9 +53,7 @@ "jsonschema", "mmh3", "numpy>=1.22,<1.25", - "pandas>=1.4.3,<2", - # For some reason pandavro higher than 1.5.* only support pandas less than 1.3. - "pandavro~=1.5.0", + "pandas>=1.4.3,<3", # Higher than 4.23.4 seems to cause a seg fault "protobuf<4.23.4,>3.20", "proto-plus>=1.20.0,<2", @@ -190,6 +187,7 @@ "types-setuptools", "types-tabulate", "virtualenv<20.24.2", + "pandas>=1.4.3,<2; python_version < '3.9'", ] + GCP_REQUIRED + REDIS_REQUIRED From 03dae13aa60c072b171c7f21d4e795eaaad18e55 Mon Sep 17 00:00:00 2001 From: Jiwon Park Date: Tue, 5 Mar 2024 01:20:50 +0900 Subject: [PATCH 052/122] feat: Added delete_table to redis online store (#3857) * feat: Added delete_table to redis online store Signed-off-by: Jiwon Park * fix: Fix hdel params to *args Signed-off-by: Jiwon Park --------- Signed-off-by: Jiwon Park --- sdk/python/feast/infra/online_stores/redis.py | 52 ++++++++++++++++--- 1 file changed, 44 insertions(+), 8 deletions(-) diff --git a/sdk/python/feast/infra/online_stores/redis.py b/sdk/python/feast/infra/online_stores/redis.py index ad84e8db7c..6f6c2fb45c 100644 --- a/sdk/python/feast/infra/online_stores/redis.py +++ b/sdk/python/feast/infra/online_stores/redis.py @@ -106,6 +106,39 @@ def delete_entity_values(self, config: RepoConfig, join_keys: List[str]): logger.debug(f"Deleted {deleted_count} rows for entity {', '.join(join_keys)}") + def delete_table(self, config: RepoConfig, table: FeatureView): + """ + Delete all rows in Redis for a specific feature view + + Args: + config: Feast config + table: Feature view to delete + """ + client = self._get_client(config.online_store) + deleted_count = 0 + prefix = _redis_key_prefix(table.join_keys) + + redis_hash_keys = [_mmh3(f"{table.name}:{f.name}") for f in table.features] + redis_hash_keys.append(bytes(f"_ts:{table.name}", "utf8")) + + with client.pipeline(transaction=False) as pipe: + for _k in client.scan_iter( + b"".join([prefix, b"*", config.project.encode("utf8")]) + ): + _tables = { + _hk[4:] for _hk in client.hgetall(_k) if _hk.startswith(b"_ts:") + } + if bytes(table.name, "utf8") not in _tables: + continue + if len(_tables) == 1: + pipe.delete(_k) + else: + pipe.hdel(_k, *redis_hash_keys) + deleted_count += 1 + pipe.execute() + + logger.debug(f"Deleted {deleted_count} rows for feature view {table.name}") + @log_exceptions_and_usage(online_store="redis") def update( self, @@ -117,16 +150,19 @@ def update( partial: bool, ): """ - Look for join_keys (list of entities) that are not in use anymore - (usually this happens when the last feature view that was using specific compound key is deleted) - and remove all features attached to this "join_keys". + Delete data from feature views that are no longer in use. + + Args: + config: Feast config + tables_to_delete: Feature views to delete + tables_to_keep: Feature views to keep + entities_to_delete: Entities to delete + entities_to_keep: Entities to keep + partial: Whether to do a partial update """ - join_keys_to_keep = set(tuple(table.join_keys) for table in tables_to_keep) - join_keys_to_delete = set(tuple(table.join_keys) for table in tables_to_delete) - - for join_keys in join_keys_to_delete - join_keys_to_keep: - self.delete_entity_values(config, list(join_keys)) + for table in tables_to_delete: + self.delete_table(config, table) def teardown( self, From 161547b167c7a9b2d53517d498acbe50d9298a40 Mon Sep 17 00:00:00 2001 From: Tornike Gurgenidze Date: Tue, 5 Mar 2024 23:33:59 +0400 Subject: [PATCH 053/122] feat: Add duckdb offline store (#3981) * add ibis and duckdb offline stores Signed-off-by: tokoko * add linter ignore rule in ibis Signed-off-by: tokoko * add linter ignore rule in ibis Signed-off-by: tokoko --------- Signed-off-by: tokoko --- Makefile | 13 + .../contrib/duckdb_offline_store/__init__.py | 0 .../contrib/duckdb_offline_store/duckdb.py | 17 + .../contrib/duckdb_repo_configuration.py | 19 + .../contrib/ibis_offline_store/__init__.py | 0 .../contrib/ibis_offline_store/ibis.py | 407 ++++++++++++++++++ sdk/python/feast/repo_config.py | 1 + setup.py | 7 +- 8 files changed, 463 insertions(+), 1 deletion(-) create mode 100644 sdk/python/feast/infra/offline_stores/contrib/duckdb_offline_store/__init__.py create mode 100644 sdk/python/feast/infra/offline_stores/contrib/duckdb_offline_store/duckdb.py create mode 100644 sdk/python/feast/infra/offline_stores/contrib/duckdb_repo_configuration.py create mode 100644 sdk/python/feast/infra/offline_stores/contrib/ibis_offline_store/__init__.py create mode 100644 sdk/python/feast/infra/offline_stores/contrib/ibis_offline_store/ibis.py diff --git a/Makefile b/Makefile index 1598664f83..1a1d3b6e92 100644 --- a/Makefile +++ b/Makefile @@ -186,6 +186,19 @@ test-python-universal-athena: not test_snowflake" \ sdk/python/tests +test-python-universal-duckdb: + PYTHONPATH='.' \ + FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.offline_stores.contrib.duckdb_repo_configuration \ + FEAST_USAGE=False IS_TEST=True \ + python -m pytest -n 8 --integration \ + -k "not test_nullable_online_store and \ + not gcs_registry and \ + not s3_registry and \ + not test_snowflake and \ + not bigquery and \ + not test_spark_materialization_consistency" \ + sdk/python/tests + test-python-universal-postgres-offline: PYTHONPATH='.' \ FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.offline_stores.contrib.postgres_repo_configuration \ diff --git a/sdk/python/feast/infra/offline_stores/contrib/duckdb_offline_store/__init__.py b/sdk/python/feast/infra/offline_stores/contrib/duckdb_offline_store/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/infra/offline_stores/contrib/duckdb_offline_store/duckdb.py b/sdk/python/feast/infra/offline_stores/contrib/duckdb_offline_store/duckdb.py new file mode 100644 index 0000000000..f927f2ff92 --- /dev/null +++ b/sdk/python/feast/infra/offline_stores/contrib/duckdb_offline_store/duckdb.py @@ -0,0 +1,17 @@ +import ibis +from pydantic import StrictStr + +from feast.infra.offline_stores.contrib.ibis_offline_store.ibis import IbisOfflineStore +from feast.repo_config import FeastConfigBaseModel + + +class DuckDBOfflineStoreConfig(FeastConfigBaseModel): + type: StrictStr = "duckdb" + # """ Offline store type selector""" + + +class DuckDBOfflineStore(IbisOfflineStore): + @staticmethod + def setup_ibis_backend(): + # there's no need to call setup as duckdb is default ibis backend + ibis.set_backend("duckdb") diff --git a/sdk/python/feast/infra/offline_stores/contrib/duckdb_repo_configuration.py b/sdk/python/feast/infra/offline_stores/contrib/duckdb_repo_configuration.py new file mode 100644 index 0000000000..263ae97466 --- /dev/null +++ b/sdk/python/feast/infra/offline_stores/contrib/duckdb_repo_configuration.py @@ -0,0 +1,19 @@ +from feast.infra.offline_stores.contrib.duckdb_offline_store.duckdb import ( + DuckDBOfflineStoreConfig, +) +from tests.integration.feature_repos.universal.data_sources.file import ( # noqa: E402 + FileDataSourceCreator, +) + + +class DuckDBDataSourceCreator(FileDataSourceCreator): + def create_offline_store_config(self): + self.duckdb_offline_store_config = DuckDBOfflineStoreConfig() + return self.duckdb_offline_store_config + + +AVAILABLE_OFFLINE_STORES = [ + ("local", DuckDBDataSourceCreator), +] + +AVAILABLE_ONLINE_STORES = {"sqlite": ({"type": "sqlite"}, None)} diff --git a/sdk/python/feast/infra/offline_stores/contrib/ibis_offline_store/__init__.py b/sdk/python/feast/infra/offline_stores/contrib/ibis_offline_store/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/infra/offline_stores/contrib/ibis_offline_store/ibis.py b/sdk/python/feast/infra/offline_stores/contrib/ibis_offline_store/ibis.py new file mode 100644 index 0000000000..72e0d970c6 --- /dev/null +++ b/sdk/python/feast/infra/offline_stores/contrib/ibis_offline_store/ibis.py @@ -0,0 +1,407 @@ +import os +import uuid +from datetime import datetime, timedelta +from pathlib import Path +from typing import Any, Callable, Dict, List, Optional, Tuple, Union + +import ibis +import ibis.selectors as s +import numpy as np +import pandas as pd +import pyarrow +from ibis.expr import datatypes as dt +from ibis.expr.types import Table +from pytz import utc + +from feast.data_source import DataSource +from feast.errors import SavedDatasetLocationAlreadyExists +from feast.feature_logging import LoggingConfig, LoggingSource +from feast.feature_view import FeatureView +from feast.infra.offline_stores import offline_utils +from feast.infra.offline_stores.file_source import ( + FileLoggingDestination, + FileSource, + SavedDatasetFileStorage, +) +from feast.infra.offline_stores.offline_store import ( + OfflineStore, + RetrievalJob, + RetrievalMetadata, +) +from feast.infra.offline_stores.offline_utils import ( + get_pyarrow_schema_from_batch_source, +) +from feast.infra.registry.base_registry import BaseRegistry +from feast.on_demand_feature_view import OnDemandFeatureView +from feast.repo_config import RepoConfig +from feast.saved_dataset import SavedDatasetStorage + + +def _get_entity_schema(entity_df: pd.DataFrame) -> Dict[str, np.dtype]: + return dict(zip(entity_df.columns, entity_df.dtypes)) + + +class IbisOfflineStore(OfflineStore): + @staticmethod + def pull_latest_from_table_or_query( + config: RepoConfig, + data_source: DataSource, + join_key_columns: List[str], + feature_name_columns: List[str], + timestamp_field: str, + created_timestamp_column: Optional[str], + start_date: datetime, + end_date: datetime, + ) -> RetrievalJob: + raise NotImplementedError() + + def _get_entity_df_event_timestamp_range( + entity_df: pd.DataFrame, entity_df_event_timestamp_col: str + ) -> Tuple[datetime, datetime]: + entity_df_event_timestamp = entity_df.loc[ + :, entity_df_event_timestamp_col + ].infer_objects() + if pd.api.types.is_string_dtype(entity_df_event_timestamp): + entity_df_event_timestamp = pd.to_datetime( + entity_df_event_timestamp, utc=True + ) + entity_df_event_timestamp_range = ( + entity_df_event_timestamp.min().to_pydatetime(), + entity_df_event_timestamp.max().to_pydatetime(), + ) + + return entity_df_event_timestamp_range + + @staticmethod + def _get_historical_features_one( + feature_view: FeatureView, + entity_table: Table, + feature_refs: List[str], + full_feature_names: bool, + timestamp_range: Tuple, + acc_table: Table, + event_timestamp_col: str, + ) -> Table: + fv_table: Table = ibis.read_parquet(feature_view.batch_source.name) + + for old_name, new_name in feature_view.batch_source.field_mapping.items(): + if old_name in fv_table.columns: + fv_table = fv_table.rename({new_name: old_name}) + + timestamp_field = feature_view.batch_source.timestamp_field + + # TODO mutate only if tz-naive + fv_table = fv_table.mutate( + **{ + timestamp_field: fv_table[timestamp_field].cast( + dt.Timestamp(timezone="UTC") + ) + } + ) + + full_name_prefix = feature_view.projection.name_alias or feature_view.name + + feature_refs = [ + fr.split(":")[1] + for fr in feature_refs + if fr.startswith(f"{full_name_prefix}:") + ] + + timestamp_range_start_minus_ttl = ( + timestamp_range[0] - feature_view.ttl + if feature_view.ttl and feature_view.ttl > timedelta(0, 0, 0, 0, 0, 0, 0) + else timestamp_range[0] + ) + + timestamp_range_start_minus_ttl = ibis.literal( + timestamp_range_start_minus_ttl.strftime("%Y-%m-%d %H:%M:%S.%f") + ).cast(dt.Timestamp(timezone="UTC")) + + timestamp_range_end = ibis.literal( + timestamp_range[1].strftime("%Y-%m-%d %H:%M:%S.%f") + ).cast(dt.Timestamp(timezone="UTC")) + + fv_table = fv_table.filter( + ibis.and_( + fv_table[timestamp_field] <= timestamp_range_end, + fv_table[timestamp_field] >= timestamp_range_start_minus_ttl, + ) + ) + + # join_key_map = feature_view.projection.join_key_map or {e.name: e.name for e in feature_view.entity_columns} + # predicates = [fv_table[k] == entity_table[v] for k, v in join_key_map.items()] + + if feature_view.projection.join_key_map: + predicates = [ + fv_table[k] == entity_table[v] + for k, v in feature_view.projection.join_key_map.items() + ] + else: + predicates = [ + fv_table[e.name] == entity_table[e.name] + for e in feature_view.entity_columns + ] + + predicates.append( + fv_table[timestamp_field] <= entity_table[event_timestamp_col] + ) + + fv_table = fv_table.inner_join( + entity_table, predicates, lname="", rname="{name}_y" + ) + + fv_table = ( + fv_table.group_by(by="entity_row_id") + .order_by(ibis.desc(fv_table[timestamp_field])) + .mutate(rn=ibis.row_number()) + ) + + fv_table = fv_table.filter(fv_table["rn"] == ibis.literal(0)) + + select_cols = ["entity_row_id"] + select_cols.extend(feature_refs) + fv_table = fv_table.select(select_cols) + + if full_feature_names: + fv_table = fv_table.rename( + {f"{full_name_prefix}__{feature}": feature for feature in feature_refs} + ) + + acc_table = acc_table.left_join( + fv_table, + predicates=[fv_table.entity_row_id == acc_table.entity_row_id], + lname="", + rname="{name}_yyyy", + ) + + acc_table = acc_table.drop(s.endswith("_yyyy")) + + return acc_table + + @staticmethod + def _to_utc(entity_df: pd.DataFrame, event_timestamp_col): + entity_df_event_timestamp = entity_df.loc[ + :, event_timestamp_col + ].infer_objects() + if pd.api.types.is_string_dtype(entity_df_event_timestamp): + entity_df_event_timestamp = pd.to_datetime( + entity_df_event_timestamp, utc=True + ) + + entity_df[event_timestamp_col] = entity_df_event_timestamp + return entity_df + + @staticmethod + def _generate_row_id( + entity_table: Table, feature_views: List[FeatureView], event_timestamp_col + ) -> Table: + all_entities = [event_timestamp_col] + for fv in feature_views: + if fv.projection.join_key_map: + all_entities.extend(fv.projection.join_key_map.values()) + else: + all_entities.extend([e.name for e in fv.entity_columns]) + + r = ibis.literal("") + + for e in set(all_entities): + r = r.concat(entity_table[e].cast("string")) # type: ignore + + entity_table = entity_table.mutate(entity_row_id=r) + + return entity_table + + @staticmethod + def get_historical_features( + config: RepoConfig, + feature_views: List[FeatureView], + feature_refs: List[str], + entity_df: Union[pd.DataFrame, str], + registry: BaseRegistry, + project: str, + full_feature_names: bool = False, + ) -> RetrievalJob: + entity_schema = _get_entity_schema( + entity_df=entity_df, + ) + event_timestamp_col = offline_utils.infer_event_timestamp_from_entity_df( + entity_schema=entity_schema, + ) + + timestamp_range = IbisOfflineStore._get_entity_df_event_timestamp_range( + entity_df, event_timestamp_col + ) + entity_df = IbisOfflineStore._to_utc(entity_df, event_timestamp_col) + + entity_table = ibis.memtable(entity_df) + entity_table = IbisOfflineStore._generate_row_id( + entity_table, feature_views, event_timestamp_col + ) + + res: Table = entity_table + + for fv in feature_views: + res = IbisOfflineStore._get_historical_features_one( + fv, + entity_table, + feature_refs, + full_feature_names, + timestamp_range, + res, + event_timestamp_col, + ) + + res = res.drop("entity_row_id") + + return IbisRetrievalJob( + res, + OnDemandFeatureView.get_requested_odfvs(feature_refs, project, registry), + full_feature_names, + metadata=RetrievalMetadata( + features=feature_refs, + keys=list(set(entity_df.columns) - {event_timestamp_col}), + min_event_timestamp=timestamp_range[0], + max_event_timestamp=timestamp_range[1], + ), + ) + + @staticmethod + def pull_all_from_table_or_query( + config: RepoConfig, + data_source: DataSource, + join_key_columns: List[str], + feature_name_columns: List[str], + timestamp_field: str, + start_date: datetime, + end_date: datetime, + ) -> RetrievalJob: + assert isinstance(data_source, FileSource) + + fields = join_key_columns + feature_name_columns + [timestamp_field] + start_date = start_date.astimezone(tz=utc) + end_date = end_date.astimezone(tz=utc) + + table = ibis.read_parquet(data_source.path) + + table = table.select(*fields) + + table = table.filter( + ibis.and_( + table[timestamp_field] >= ibis.literal(start_date), + table[timestamp_field] <= ibis.literal(end_date), + ) + ) + + return IbisRetrievalJob( + table=table, + on_demand_feature_views=[], + full_feature_names=False, + metadata=None, + ) + + @staticmethod + def write_logged_features( + config: RepoConfig, + data: Union[pyarrow.Table, Path], + source: LoggingSource, + logging_config: LoggingConfig, + registry: BaseRegistry, + ): + destination = logging_config.destination + assert isinstance(destination, FileLoggingDestination) + + if isinstance(data, Path): + table = ibis.read_parquet(data) + else: + table = ibis.memtable(data) + + if destination.partition_by: + kwargs = {"partition_by": destination.partition_by} + else: + kwargs = {} + + table.to_parquet( + f"{destination.path}/{uuid.uuid4().hex}-{{i}}.parquet", **kwargs + ) + + @staticmethod + def offline_write_batch( + config: RepoConfig, + feature_view: FeatureView, + table: pyarrow.Table, + progress: Optional[Callable[[int], Any]], + ): + assert isinstance(feature_view.batch_source, FileSource) + + pa_schema, column_names = get_pyarrow_schema_from_batch_source( + config, feature_view.batch_source + ) + if column_names != table.column_names: + raise ValueError( + f"The input pyarrow table has schema {table.schema} with the incorrect columns {table.column_names}. " + f"The schema is expected to be {pa_schema} with the columns (in this exact order) to be {column_names}." + ) + + file_options = feature_view.batch_source.file_options + prev_table = ibis.read_parquet(file_options.uri).to_pyarrow() + if table.schema != prev_table.schema: + table = table.cast(prev_table.schema) + new_table = pyarrow.concat_tables([table, prev_table]) + + ibis.memtable(new_table).to_parquet(file_options.uri) + + +class IbisRetrievalJob(RetrievalJob): + def __init__( + self, table, on_demand_feature_views, full_feature_names, metadata + ) -> None: + super().__init__() + self.table = table + self._on_demand_feature_views: List[ + OnDemandFeatureView + ] = on_demand_feature_views + self._full_feature_names = full_feature_names + self._metadata = metadata + + def _to_df_internal(self, timeout: Optional[int] = None) -> pd.DataFrame: + return self.table.execute() + + def _to_arrow_internal(self, timeout: Optional[int] = None) -> pyarrow.Table: + return self.table.to_pyarrow() + + @property + def full_feature_names(self) -> bool: + return self._full_feature_names + + @property + def on_demand_feature_views(self) -> List[OnDemandFeatureView]: + return self._on_demand_feature_views + + def persist( + self, + storage: SavedDatasetStorage, + allow_overwrite: bool = False, + timeout: Optional[int] = None, + ): + assert isinstance(storage, SavedDatasetFileStorage) + if not allow_overwrite and os.path.exists(storage.file_options.uri): + raise SavedDatasetLocationAlreadyExists(location=storage.file_options.uri) + + filesystem, path = FileSource.create_filesystem_and_path( + storage.file_options.uri, + storage.file_options.s3_endpoint_override, + ) + + if path.endswith(".parquet"): + pyarrow.parquet.write_table( + self.to_arrow(), where=path, filesystem=filesystem + ) + else: + # otherwise assume destination is directory + pyarrow.parquet.write_to_dataset( + self.to_arrow(), root_path=path, filesystem=filesystem + ) + + @property + def metadata(self) -> Optional[RetrievalMetadata]: + return self._metadata diff --git a/sdk/python/feast/repo_config.py b/sdk/python/feast/repo_config.py index c69bb4d1e7..d500059c6b 100644 --- a/sdk/python/feast/repo_config.py +++ b/sdk/python/feast/repo_config.py @@ -74,6 +74,7 @@ "postgres": "feast.infra.offline_stores.contrib.postgres_offline_store.postgres.PostgreSQLOfflineStore", "athena": "feast.infra.offline_stores.contrib.athena_offline_store.athena.AthenaOfflineStore", "mssql": "feast.infra.offline_stores.contrib.mssql_offline_store.mssql.MsSqlServerOfflineStore", + "duckdb": "feast.infra.offline_stores.contrib.duckdb_offline_store.duckdb.DuckDBOfflineStore", } FEATURE_SERVER_CONFIG_CLASS_FOR_TYPE = { diff --git a/setup.py b/setup.py index 484024b206..6d59fa0aa5 100644 --- a/setup.py +++ b/setup.py @@ -146,6 +146,10 @@ "ibis-substrait" ] +DUCKDB_REQUIRED = [ + "ibis-framework[duckdb]" +] + CI_REQUIRED = ( [ "build", @@ -372,7 +376,8 @@ def run(self): "cassandra": CASSANDRA_REQUIRED, "hazelcast": HAZELCAST_REQUIRED, "rockset": ROCKSET_REQUIRED, - "ibis": IBIS_REQUIRED + "ibis": IBIS_REQUIRED, + "duckdb": DUCKDB_REQUIRED }, include_package_data=True, license="Apache", From 7c908822f8d9f5e32ab17d96e6b5dd79e5b59b3e Mon Sep 17 00:00:00 2001 From: locnt241 <73770977+ElliotNguyen68@users.noreply.github.com> Date: Wed, 6 Mar 2024 04:42:18 +0700 Subject: [PATCH 054/122] fix: remove not use input parameter in spark source (#3980) remove unused parameter when init sparksource Signed-off-by: tanlocnguyen Co-authored-by: tanlocnguyen --- .../offline_stores/contrib/spark_offline_store/spark_source.py | 1 - 1 file changed, 1 deletion(-) diff --git a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark_source.py b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark_source.py index a27065fb5e..1ff7e6de58 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark_source.py @@ -39,7 +39,6 @@ def __init__( query: Optional[str] = None, path: Optional[str] = None, file_format: Optional[str] = None, - event_timestamp_column: Optional[str] = None, created_timestamp_column: Optional[str] = None, field_mapping: Optional[Dict[str, str]] = None, description: Optional[str] = "", From 5e0af8f52832daec34edd19cbad5e20ac3fd74d0 Mon Sep 17 00:00:00 2001 From: Theodor Mihalache <84387487+tmihalac@users.noreply.github.com> Date: Tue, 5 Mar 2024 16:43:23 -0500 Subject: [PATCH 055/122] fix: Added registryPath parameter documentation in WebUI reference (#3983) * fix: Added registryPath parameter documentation Added a note in Webui reference in regard to supported registryPath values Related to #3974 Signed-off-by: Theodor Mihalache * fix: Added registryPath parameter documentation Added a note in Webui reference in regard to supported registryPath values Related to #3974 Signed-off-by: Theodor Mihalache --------- Signed-off-by: Theodor Mihalache --- docs/reference/alpha-web-ui.md | 2 ++ ui/README.md | 2 ++ 2 files changed, 4 insertions(+) diff --git a/docs/reference/alpha-web-ui.md b/docs/reference/alpha-web-ui.md index 7d21a3d45d..398c8de0ae 100644 --- a/docs/reference/alpha-web-ui.md +++ b/docs/reference/alpha-web-ui.md @@ -85,6 +85,8 @@ When you start the React app, it will look for `project-list.json` to find a lis } ``` +* **Note** - `registryPath` only supports a file location or a url. + Then start the React App ```bash diff --git a/ui/README.md b/ui/README.md index a9ce5d3ec7..12aacd329e 100644 --- a/ui/README.md +++ b/ui/README.md @@ -61,6 +61,8 @@ When you start the React app, it will look for `projects-list.json` to find a li } ``` +* **Note** - `registryPath` only supports a file location or a url. + ``` // Start the React App yarn start From 2cf1a0fa9efbceca2e79c5e375796696e248e3d9 Mon Sep 17 00:00:00 2001 From: Tornike Gurgenidze Date: Wed, 6 Mar 2024 01:54:02 +0400 Subject: [PATCH 056/122] fix: Get container host addresses from testcontainers (#3946) * fix: get container host addresses from testcontainers Signed-off-by: tokoko * resolve trino container host with testcontainers Signed-off-by: tokoko --------- Signed-off-by: tokoko --- .../contrib/trino_offline_store/tests/data_source.py | 5 +++-- .../feature_repos/universal/online_store/redis.py | 6 +++++- sdk/python/tests/unit/test_sql_registry.py | 6 ++++-- 3 files changed, 12 insertions(+), 5 deletions(-) diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/tests/data_source.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/tests/data_source.py index fcc0c8d0fa..bd3f9def8f 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/tests/data_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/tests/data_source.py @@ -62,10 +62,11 @@ def __init__( "must be include into pytest plugins" ) self.exposed_port = self.container.get_exposed_port("8080") + self.container_host = self.container.get_container_host_ip() self.client = Trino( user="user", catalog="memory", - host="localhost", + host=self.container_host, port=self.exposed_port, source="trino-python-client", http_scheme="http", @@ -123,7 +124,7 @@ def get_prefixed_table_name(self, suffix: str) -> str: def create_offline_store_config(self) -> FeastConfigBaseModel: return TrinoOfflineStoreConfig( - host="localhost", + host=self.container_host, port=self.exposed_port, catalog="memory", dataset=self.project_name, diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store/redis.py b/sdk/python/tests/integration/feature_repos/universal/online_store/redis.py index 11d62d9d30..8e18f7fb17 100644 --- a/sdk/python/tests/integration/feature_repos/universal/online_store/redis.py +++ b/sdk/python/tests/integration/feature_repos/universal/online_store/redis.py @@ -20,7 +20,11 @@ def create_online_store(self) -> Dict[str, str]: container=self.container, predicate=log_string_to_wait_for, timeout=10 ) exposed_port = self.container.get_exposed_port("6379") - return {"type": "redis", "connection_string": f"localhost:{exposed_port},db=0"} + container_host = self.container.get_container_host_ip() + return { + "type": "redis", + "connection_string": f"{container_host}:{exposed_port},db=0", + } def teardown(self): self.container.stop() diff --git a/sdk/python/tests/unit/test_sql_registry.py b/sdk/python/tests/unit/test_sql_registry.py index b96dc6fe77..722e318b0c 100644 --- a/sdk/python/tests/unit/test_sql_registry.py +++ b/sdk/python/tests/unit/test_sql_registry.py @@ -66,10 +66,11 @@ def pg_registry(): ) logger.info("Waited for %s seconds until postgres container was up", waited) container_port = container.get_exposed_port(5432) + container_host = container.get_container_host_ip() registry_config = RegistryConfig( registry_type="sql", - path=f"postgresql://{POSTGRES_USER}:{POSTGRES_PASSWORD}@127.0.0.1:{container_port}/{POSTGRES_DB}", + path=f"postgresql://{POSTGRES_USER}:{POSTGRES_PASSWORD}@{container_host}:{container_port}/{POSTGRES_DB}", ) yield SqlRegistry(registry_config, "project", None) @@ -100,10 +101,11 @@ def mysql_registry(): ) logger.info("Waited for %s seconds until mysql container was up", waited) container_port = container.get_exposed_port(3306) + container_host = container.get_container_host_ip() registry_config = RegistryConfig( registry_type="sql", - path=f"mysql+pymysql://{POSTGRES_USER}:{POSTGRES_PASSWORD}@127.0.0.1:{container_port}/{POSTGRES_DB}", + path=f"mysql+pymysql://{POSTGRES_USER}:{POSTGRES_PASSWORD}@{container_host}:{container_port}/{POSTGRES_DB}", ) yield SqlRegistry(registry_config, "project", None) From f604af9ebf56ebd88b4e6ef541fdc20de2cc5b8c Mon Sep 17 00:00:00 2001 From: Jeremy Ary Date: Wed, 6 Mar 2024 08:20:27 -0600 Subject: [PATCH 057/122] fix: Swap security label check on the PR title validation job to explicit permissions instead (#3987) revert security label check for PR title validation & add explicit read-only permission instead Signed-off-by: Jeremy Ary --- .github/workflows/lint_pr.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/lint_pr.yml b/.github/workflows/lint_pr.yml index 12f7182ce8..d1aa7d16a3 100644 --- a/.github/workflows/lint_pr.yml +++ b/.github/workflows/lint_pr.yml @@ -7,12 +7,13 @@ on: - edited - synchronize +permissions: + # read-only perms specified due to use of pull_request_target in lieu of security label check + pull-requests: read + jobs: validate-title: - # when using pull_request_target, all jobs MUST have this if check for 'ok-to-test' or 'approved' for security purposes. if: - ((github.event.action == 'labeled' && (github.event.label.name == 'approved' || github.event.label.name == 'lgtm' || github.event.label.name == 'ok-to-test')) || - (github.event.action != 'labeled' && (contains(github.event.pull_request.labels.*.name, 'ok-to-test') || contains(github.event.pull_request.labels.*.name, 'approved') || contains(github.event.pull_request.labels.*.name, 'lgtm')))) && github.repository == 'feast-dev/feast' name: Validate PR title runs-on: ubuntu-latest From 43b2c287705c2a3e882517524229f155c9ce0a01 Mon Sep 17 00:00:00 2001 From: locnt241 <73770977+ElliotNguyen68@users.noreply.github.com> Date: Thu, 7 Mar 2024 00:20:46 +0700 Subject: [PATCH 058/122] feat: Add Entity df in format of a Spark Dataframe instead of just pd.DataFrame or string for SparkOfflineStore (#3988) * remove unused parameter when init sparksource Signed-off-by: tanlocnguyen * feat: add entity df to SparkOfflineStore when get_historical_features Signed-off-by: tanlocnguyen * fix: lint error Signed-off-by: tanlocnguyen --------- Signed-off-by: tanlocnguyen Co-authored-by: tanlocnguyen --- .../contrib/spark_offline_store/spark.py | 27 ++++++++++++------- 1 file changed, 17 insertions(+), 10 deletions(-) diff --git a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py index c9591b7c3f..b1b1c04c7d 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py +++ b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py @@ -125,7 +125,7 @@ def get_historical_features( config: RepoConfig, feature_views: List[FeatureView], feature_refs: List[str], - entity_df: Union[pandas.DataFrame, str], + entity_df: Union[pandas.DataFrame, str, pyspark.sql.DataFrame], registry: Registry, project: str, full_feature_names: bool = False, @@ -473,15 +473,16 @@ def _get_entity_df_event_timestamp_range( entity_df_event_timestamp.min().to_pydatetime(), entity_df_event_timestamp.max().to_pydatetime(), ) - elif isinstance(entity_df, str): + elif isinstance(entity_df, str) or isinstance(entity_df, pyspark.sql.DataFrame): # If the entity_df is a string (SQL query), determine range # from table - df = spark_session.sql(entity_df).select(entity_df_event_timestamp_col) - - # Checks if executing entity sql resulted in any data - if df.rdd.isEmpty(): - raise EntitySQLEmptyResults(entity_df) - + if isinstance(entity_df, str): + df = spark_session.sql(entity_df).select(entity_df_event_timestamp_col) + # Checks if executing entity sql resulted in any data + if df.rdd.isEmpty(): + raise EntitySQLEmptyResults(entity_df) + else: + df = entity_df # TODO(kzhang132): need utc conversion here. entity_df_event_timestamp_range = ( @@ -499,8 +500,11 @@ def _get_entity_schema( ) -> Dict[str, np.dtype]: if isinstance(entity_df, pd.DataFrame): return dict(zip(entity_df.columns, entity_df.dtypes)) - elif isinstance(entity_df, str): - entity_spark_df = spark_session.sql(entity_df) + elif isinstance(entity_df, str) or isinstance(entity_df, pyspark.sql.DataFrame): + if isinstance(entity_df, str): + entity_spark_df = spark_session.sql(entity_df) + else: + entity_spark_df = entity_df return dict( zip( entity_spark_df.columns, @@ -526,6 +530,9 @@ def _upload_entity_df( elif isinstance(entity_df, str): spark_session.sql(entity_df).createOrReplaceTempView(table_name) return + elif isinstance(entity_df, pyspark.sql.DataFrame): + entity_df.createOrReplaceTempView(table_name) + return else: raise InvalidEntityType(type(entity_df)) From 60f24f9ed16a216acb0f3642892dea73690ca29f Mon Sep 17 00:00:00 2001 From: Francisco Javier Arceo Date: Wed, 6 Mar 2024 13:28:54 -0500 Subject: [PATCH 059/122] feat: Dropping unit tests for Python 3.8 (#3989) feat: dropping unit tests for Python 3.8 Update unit_tests.yml to no longer run for Python 3.8 Signed-off-by: franciscojavierarceo --- .github/workflows/unit_tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/unit_tests.yml b/.github/workflows/unit_tests.yml index 31e6d08c74..7e2e3b577a 100644 --- a/.github/workflows/unit_tests.yml +++ b/.github/workflows/unit_tests.yml @@ -7,7 +7,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: [ "3.8", "3.9", "3.10" ] + python-version: [ "3.9", "3.10" ] os: [ ubuntu-latest, macOS-latest ] exclude: - os: macOS-latest From f93c5fd4b8bd0031942c4f6ba4e84ebc54be8522 Mon Sep 17 00:00:00 2001 From: cburroughs Date: Wed, 6 Mar 2024 16:45:39 -0500 Subject: [PATCH 060/122] fix: Move gRPC dependencies to an extra (#3900) --- .../docker-compose/feast10/entrypoint.sh | 4 ++-- setup.py | 15 ++++++++++----- 2 files changed, 12 insertions(+), 7 deletions(-) diff --git a/java/serving/src/test/resources/docker-compose/feast10/entrypoint.sh b/java/serving/src/test/resources/docker-compose/feast10/entrypoint.sh index d7dcd03c5f..0690b734c3 100755 --- a/java/serving/src/test/resources/docker-compose/feast10/entrypoint.sh +++ b/java/serving/src/test/resources/docker-compose/feast10/entrypoint.sh @@ -4,8 +4,8 @@ set -e # feast root directory is expected to be mounted (eg, by docker compose) cd /mnt/feast -pip install -e '.[redis]' +pip install -e '.[grpcio,redis]' cd /app python materialize.py -feast serve_transformations --port 8080 \ No newline at end of file +feast serve_transformations --port 8080 diff --git a/setup.py b/setup.py index 6d59fa0aa5..b601c90146 100644 --- a/setup.py +++ b/setup.py @@ -44,10 +44,6 @@ "click>=7.0.0,<9.0.0", "colorama>=0.3.9,<1", "dill~=0.3.0", - "grpcio>=1.56.2,<2", - "grpcio-tools>=1.56.2,<2", - "grpcio-reflection>=1.56.2,<2", - "grpcio-health-checking>=1.56.2,<2", "mypy-protobuf==3.1", "Jinja2>=2,<4", "jsonschema", @@ -143,7 +139,14 @@ IBIS_REQUIRED = [ "ibis-framework", - "ibis-substrait" + "ibis-substrait", +] + +GRPCIO_REQUIRED = [ + "grpcio>=1.56.2,<2", + "grpcio-tools>=1.56.2,<2", + "grpcio-reflection>=1.56.2,<2", + "grpcio-health-checking>=1.56.2,<2", ] DUCKDB_REQUIRED = [ @@ -209,6 +212,7 @@ + ROCKSET_REQUIRED + HAZELCAST_REQUIRED + IBIS_REQUIRED + + GRPCIO_REQUIRED ) @@ -375,6 +379,7 @@ def run(self): "docs": DOCS_REQUIRED, "cassandra": CASSANDRA_REQUIRED, "hazelcast": HAZELCAST_REQUIRED, + "grpcio": GRPCIO_REQUIRED, "rockset": ROCKSET_REQUIRED, "ibis": IBIS_REQUIRED, "duckdb": DUCKDB_REQUIRED From 0e036f86738cb7085630817394ba9e6c8cfbf8c9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 6 Mar 2024 21:08:51 -0500 Subject: [PATCH 061/122] chore: Bump ip from 1.1.5 to 1.1.9 in /ui (#3959) --- ui/yarn.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ui/yarn.lock b/ui/yarn.lock index becb6bbd7b..5c9e5c17ac 100644 --- a/ui/yarn.lock +++ b/ui/yarn.lock @@ -6461,9 +6461,9 @@ invariant@^2.2.4: loose-envify "^1.0.0" ip@^1.1.0: - version "1.1.5" - resolved "https://registry.yarnpkg.com/ip/-/ip-1.1.5.tgz#bdded70114290828c0a039e72ef25f5aaec4354a" - integrity sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo= + version "1.1.9" + resolved "https://registry.yarnpkg.com/ip/-/ip-1.1.9.tgz#8dfbcc99a754d07f425310b86a99546b1151e396" + integrity sha512-cyRxvOEpNHNtchU3Ln9KC/auJgup87llfQpQ+t5ghoC/UhL16SWzbueiCsdTnWmqAWl7LadfuwhlqmtOaqMHdQ== ipaddr.js@1.9.1: version "1.9.1" From 158a240b36593cc2fd5b60bf1d3bb0bc1b847b93 Mon Sep 17 00:00:00 2001 From: locnt241 <73770977+ElliotNguyen68@users.noreply.github.com> Date: Thu, 7 Mar 2024 19:43:32 +0700 Subject: [PATCH 062/122] chore: Update readme spark.md (#3992) update readme spark.md Signed-off-by: tanlocnguyen Co-authored-by: tanlocnguyen --- docs/reference/offline-stores/spark.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/offline-stores/spark.md b/docs/reference/offline-stores/spark.md index ae5ea78071..3cca2aab1a 100644 --- a/docs/reference/offline-stores/spark.md +++ b/docs/reference/offline-stores/spark.md @@ -4,7 +4,7 @@ The Spark offline store provides support for reading [SparkSources](../data-sources/spark.md). -* Entity dataframes can be provided as a SQL query or can be provided as a Pandas dataframe. A Pandas dataframes will be converted to a Spark dataframe and processed as a temporary view. +* Entity dataframes can be provided as a SQL query, Pandas dataframe or can be provided as a Pyspark dataframe. A Pandas dataframes will be converted to a Spark dataframe and processed as a temporary view. ## Disclaimer From 817995c12588cc35c53d1ad487efaaf53da287be Mon Sep 17 00:00:00 2001 From: Francisco Javier Arceo Date: Thu, 7 Mar 2024 07:53:20 -0500 Subject: [PATCH 063/122] feat: Dropping Python 3.8 from local integration tests and integration tests (#3994) --- .github/workflows/pr_integration_tests.yml | 4 ++-- .github/workflows/pr_local_integration_tests.yml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/pr_integration_tests.yml b/.github/workflows/pr_integration_tests.yml index ba4169c292..2b0e6a1056 100644 --- a/.github/workflows/pr_integration_tests.yml +++ b/.github/workflows/pr_integration_tests.yml @@ -86,7 +86,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: [ "3.8", "3.10" ] + python-version: [ "3.10" ] os: [ ubuntu-latest ] env: OS: ${{ matrix.os }} @@ -167,4 +167,4 @@ jobs: SNOWFLAKE_CI_PASSWORD: ${{ secrets.SNOWFLAKE_CI_PASSWORD }} SNOWFLAKE_CI_ROLE: ${{ secrets.SNOWFLAKE_CI_ROLE }} SNOWFLAKE_CI_WAREHOUSE: ${{ secrets.SNOWFLAKE_CI_WAREHOUSE }} - run: pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread \ No newline at end of file + run: pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread diff --git a/.github/workflows/pr_local_integration_tests.yml b/.github/workflows/pr_local_integration_tests.yml index 668bcb5e50..17ff54b1f8 100644 --- a/.github/workflows/pr_local_integration_tests.yml +++ b/.github/workflows/pr_local_integration_tests.yml @@ -19,7 +19,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: [ "3.8", "3.10" ] + python-version: [ "3.10" ] os: [ ubuntu-latest ] env: OS: ${{ matrix.os }} From fa8492dfe7f38ab493a8d35a412ec9334a0ff6b9 Mon Sep 17 00:00:00 2001 From: Alex Sasnouskikh Date: Thu, 7 Mar 2024 16:43:23 +0100 Subject: [PATCH 064/122] fix: Handle ComplexFeastType to None comparison (#3876) Signed-off-by: Aliaksandr Sasnouskikh --- sdk/python/feast/types.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/sdk/python/feast/types.py b/sdk/python/feast/types.py index 0ba1725f17..4b07c58d19 100644 --- a/sdk/python/feast/types.py +++ b/sdk/python/feast/types.py @@ -50,7 +50,10 @@ def __hash__(self): return hash(self.to_value_type().value) def __eq__(self, other): - return self.to_value_type() == other.to_value_type() + if isinstance(other, ComplexFeastType): + return self.to_value_type() == other.to_value_type() + else: + return False class PrimitiveFeastType(Enum): From 42a7b8170d6dc994055c67989046d11c238af40f Mon Sep 17 00:00:00 2001 From: Tornike Gurgenidze Date: Thu, 7 Mar 2024 20:27:32 +0400 Subject: [PATCH 065/122] feat: Add python client for remote registry server (#3941) * add remote registry Signed-off-by: tokoko * format and lint remote registry code Signed-off-by: tokoko * add read-only registry exception Signed-off-by: tokoko --------- Signed-off-by: tokoko --- sdk/python/feast/errors.py | 5 + sdk/python/feast/feature_store.py | 4 + .../feast/infra/registry/base_registry.py | 2 +- sdk/python/feast/infra/registry/registry.py | 4 + sdk/python/feast/infra/registry/remote.py | 370 ++++++++++++++++++ sdk/python/feast/repo_config.py | 1 + .../tests/unit/infra/registry/test_remote.py | 69 ++++ 7 files changed, 454 insertions(+), 1 deletion(-) create mode 100644 sdk/python/feast/infra/registry/remote.py create mode 100644 sdk/python/tests/unit/infra/registry/test_remote.py diff --git a/sdk/python/feast/errors.py b/sdk/python/feast/errors.py index 9097e40c94..b7151ff0c8 100644 --- a/sdk/python/feast/errors.py +++ b/sdk/python/feast/errors.py @@ -415,3 +415,8 @@ def __init__(self): class PushSourceNotFoundException(Exception): def __init__(self, push_source_name: str): super().__init__(f"Unable to find push source '{push_source_name}'.") + + +class ReadOnlyRegistryException(Exception): + def __init__(self): + super().__init__("Registry implementation is read-only.") diff --git a/sdk/python/feast/feature_store.py b/sdk/python/feast/feature_store.py index 4a53672b2e..e38120c33d 100644 --- a/sdk/python/feast/feature_store.py +++ b/sdk/python/feast/feature_store.py @@ -164,6 +164,10 @@ def __init__( self._registry = SnowflakeRegistry( registry_config, self.config.project, None ) + elif registry_config and registry_config.registry_type == "remote": + from feast.infra.registry.remote import RemoteRegistry + + self._registry = RemoteRegistry(registry_config, self.config.project, None) else: r = Registry(self.config.project, registry_config, repo_path=self.repo_path) r._initialize_registry(self.config.project) diff --git a/sdk/python/feast/infra/registry/base_registry.py b/sdk/python/feast/infra/registry/base_registry.py index d0ab74812e..9ee3bbbabc 100644 --- a/sdk/python/feast/infra/registry/base_registry.py +++ b/sdk/python/feast/infra/registry/base_registry.py @@ -246,7 +246,7 @@ def delete_feature_view(self, name: str, project: str, commit: bool = True): @abstractmethod def get_stream_feature_view( self, name: str, project: str, allow_cache: bool = False - ): + ) -> StreamFeatureView: """ Retrieves a stream feature view. diff --git a/sdk/python/feast/infra/registry/registry.py b/sdk/python/feast/infra/registry/registry.py index fc7be75e0d..a9d6c44f38 100644 --- a/sdk/python/feast/infra/registry/registry.py +++ b/sdk/python/feast/infra/registry/registry.py @@ -178,6 +178,10 @@ def __new__( from feast.infra.registry.snowflake import SnowflakeRegistry return SnowflakeRegistry(registry_config, project, repo_path) + elif registry_config and registry_config.registry_type == "remote": + from feast.infra.registry.remote import RemoteRegistry + + return RemoteRegistry(registry_config, project, repo_path) else: return super(Registry, cls).__new__(cls) diff --git a/sdk/python/feast/infra/registry/remote.py b/sdk/python/feast/infra/registry/remote.py new file mode 100644 index 0000000000..67d61ffec7 --- /dev/null +++ b/sdk/python/feast/infra/registry/remote.py @@ -0,0 +1,370 @@ +from datetime import datetime +from pathlib import Path +from typing import List, Optional, Union + +import grpc +from google.protobuf.empty_pb2 import Empty +from pydantic import StrictStr + +from feast.base_feature_view import BaseFeatureView +from feast.data_source import DataSource +from feast.entity import Entity +from feast.errors import ReadOnlyRegistryException +from feast.feature_service import FeatureService +from feast.feature_view import FeatureView +from feast.infra.infra_object import Infra +from feast.infra.registry.base_registry import BaseRegistry +from feast.on_demand_feature_view import OnDemandFeatureView +from feast.project_metadata import ProjectMetadata +from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto +from feast.protos.feast.registry import RegistryServer_pb2, RegistryServer_pb2_grpc +from feast.repo_config import RegistryConfig +from feast.request_feature_view import RequestFeatureView +from feast.saved_dataset import SavedDataset, ValidationReference +from feast.stream_feature_view import StreamFeatureView + + +class RemoteRegistryConfig(RegistryConfig): + registry_type: StrictStr = "remote" + """ str: Provider name or a class name that implements Registry.""" + + path: StrictStr = "" + """ str: Path to metadata store. + If registry_type is 'remote', then this is a URL for registry server """ + + +class RemoteRegistry(BaseRegistry): + def __init__( + self, + registry_config: Union[RegistryConfig, RemoteRegistryConfig], + project: str, + repo_path: Optional[Path], + ): + self.channel = grpc.insecure_channel(registry_config.path) + self.stub = RegistryServer_pb2_grpc.RegistryServerStub(self.channel) + + def apply_entity(self, entity: Entity, project: str, commit: bool = True): + raise ReadOnlyRegistryException() + + def delete_entity(self, name: str, project: str, commit: bool = True): + raise ReadOnlyRegistryException() + + def get_entity(self, name: str, project: str, allow_cache: bool = False) -> Entity: + request = RegistryServer_pb2.GetEntityRequest( + name=name, project=project, allow_cache=allow_cache + ) + + response = self.stub.GetEntity(request) + + return Entity.from_proto(response) + + def list_entities(self, project: str, allow_cache: bool = False) -> List[Entity]: + request = RegistryServer_pb2.ListEntitiesRequest( + project=project, allow_cache=allow_cache + ) + + response = self.stub.ListEntities(request) + + return [Entity.from_proto(entity) for entity in response.entities] + + def apply_data_source( + self, data_source: DataSource, project: str, commit: bool = True + ): + raise ReadOnlyRegistryException() + + def delete_data_source(self, name: str, project: str, commit: bool = True): + raise ReadOnlyRegistryException() + + def get_data_source( + self, name: str, project: str, allow_cache: bool = False + ) -> DataSource: + request = RegistryServer_pb2.GetDataSourceRequest( + name=name, project=project, allow_cache=allow_cache + ) + + response = self.stub.GetDataSource(request) + + return DataSource.from_proto(response) + + def list_data_sources( + self, project: str, allow_cache: bool = False + ) -> List[DataSource]: + request = RegistryServer_pb2.ListDataSourcesRequest( + project=project, allow_cache=allow_cache + ) + + response = self.stub.ListDataSources(request) + + return [ + DataSource.from_proto(data_source) for data_source in response.data_sources + ] + + def apply_feature_service( + self, feature_service: FeatureService, project: str, commit: bool = True + ): + raise ReadOnlyRegistryException() + + def delete_feature_service(self, name: str, project: str, commit: bool = True): + raise ReadOnlyRegistryException() + + def get_feature_service( + self, name: str, project: str, allow_cache: bool = False + ) -> FeatureService: + request = RegistryServer_pb2.GetFeatureServiceRequest( + name=name, project=project, allow_cache=allow_cache + ) + + response = self.stub.GetFeatureService(request) + + return FeatureService.from_proto(response) + + def list_feature_services( + self, project: str, allow_cache: bool = False + ) -> List[FeatureService]: + request = RegistryServer_pb2.ListFeatureServicesRequest( + project=project, allow_cache=allow_cache + ) + + response = self.stub.ListFeatureServices(request) + + return [ + FeatureService.from_proto(feature_service) + for feature_service in response.feature_services + ] + + def apply_feature_view( + self, feature_view: BaseFeatureView, project: str, commit: bool = True + ): + raise ReadOnlyRegistryException() + + def delete_feature_view(self, name: str, project: str, commit: bool = True): + raise ReadOnlyRegistryException() + + def get_stream_feature_view( + self, name: str, project: str, allow_cache: bool = False + ) -> StreamFeatureView: + request = RegistryServer_pb2.GetStreamFeatureViewRequest( + name=name, project=project, allow_cache=allow_cache + ) + + response = self.stub.GetStreamFeatureView(request) + + return StreamFeatureView.from_proto(response) + + def list_stream_feature_views( + self, project: str, allow_cache: bool = False + ) -> List[StreamFeatureView]: + request = RegistryServer_pb2.ListStreamFeatureViewsRequest( + project=project, allow_cache=allow_cache + ) + + response = self.stub.ListStreamFeatureViews(request) + + return [ + StreamFeatureView.from_proto(stream_feature_view) + for stream_feature_view in response.stream_feature_views + ] + + def get_on_demand_feature_view( + self, name: str, project: str, allow_cache: bool = False + ) -> OnDemandFeatureView: + request = RegistryServer_pb2.GetOnDemandFeatureViewRequest( + name=name, project=project, allow_cache=allow_cache + ) + + response = self.stub.GetOnDemandFeatureView(request) + + return OnDemandFeatureView.from_proto(response) + + def list_on_demand_feature_views( + self, project: str, allow_cache: bool = False + ) -> List[OnDemandFeatureView]: + request = RegistryServer_pb2.ListOnDemandFeatureViewsRequest( + project=project, allow_cache=allow_cache + ) + + response = self.stub.ListOnDemandFeatureViews(request) + + return [ + OnDemandFeatureView.from_proto(on_demand_feature_view) + for on_demand_feature_view in response.on_demand_feature_views + ] + + def get_feature_view( + self, name: str, project: str, allow_cache: bool = False + ) -> FeatureView: + request = RegistryServer_pb2.GetFeatureViewRequest( + name=name, project=project, allow_cache=allow_cache + ) + + response = self.stub.GetFeatureView(request) + + return FeatureView.from_proto(response) + + def list_feature_views( + self, project: str, allow_cache: bool = False + ) -> List[FeatureView]: + request = RegistryServer_pb2.ListFeatureViewsRequest( + project=project, allow_cache=allow_cache + ) + + response = self.stub.ListFeatureViews(request) + + return [ + FeatureView.from_proto(feature_view) + for feature_view in response.feature_views + ] + + def get_request_feature_view( + self, name: str, project: str, allow_cache: bool = False + ) -> RequestFeatureView: + request = RegistryServer_pb2.GetRequestFeatureViewRequest( + name=name, project=project, allow_cache=allow_cache + ) + + response = self.stub.GetRequestFeatureView(request) + + return RequestFeatureView.from_proto(response) + + def list_request_feature_views( + self, project: str, allow_cache: bool = False + ) -> List[RequestFeatureView]: + request = RegistryServer_pb2.ListRequestFeatureViewsRequest( + project=project, allow_cache=allow_cache + ) + + response = self.stub.ListRequestFeatureViews(request) + + return [ + RequestFeatureView.from_proto(request_feature_view) + for request_feature_view in response.request_feature_views + ] + + def apply_materialization( + self, + feature_view: FeatureView, + project: str, + start_date: datetime, + end_date: datetime, + commit: bool = True, + ): + raise ReadOnlyRegistryException() + + def apply_saved_dataset( + self, + saved_dataset: SavedDataset, + project: str, + commit: bool = True, + ): + raise ReadOnlyRegistryException() + + def delete_saved_dataset(self, name: str, project: str, allow_cache: bool = False): + raise ReadOnlyRegistryException() + + def get_saved_dataset( + self, name: str, project: str, allow_cache: bool = False + ) -> SavedDataset: + request = RegistryServer_pb2.GetSavedDatasetRequest( + name=name, project=project, allow_cache=allow_cache + ) + + response = self.stub.GetSavedDataset(request) + + return SavedDataset.from_proto(response) + + def list_saved_datasets( + self, project: str, allow_cache: bool = False + ) -> List[SavedDataset]: + request = RegistryServer_pb2.ListSavedDatasetsRequest( + project=project, allow_cache=allow_cache + ) + + response = self.stub.ListSavedDatasets(request) + + return [ + SavedDataset.from_proto(saved_dataset) + for saved_dataset in response.saved_datasets + ] + + def apply_validation_reference( + self, + validation_reference: ValidationReference, + project: str, + commit: bool = True, + ): + raise ReadOnlyRegistryException() + + def delete_validation_reference(self, name: str, project: str, commit: bool = True): + raise ReadOnlyRegistryException() + + def get_validation_reference( + self, name: str, project: str, allow_cache: bool = False + ) -> ValidationReference: + request = RegistryServer_pb2.GetValidationReferenceRequest( + name=name, project=project, allow_cache=allow_cache + ) + + response = self.stub.GetValidationReference(request) + + return ValidationReference.from_proto(response) + + def list_validation_references( + self, project: str, allow_cache: bool = False + ) -> List[ValidationReference]: + request = RegistryServer_pb2.ListValidationReferencesRequest( + project=project, allow_cache=allow_cache + ) + + response = self.stub.ListValidationReferences(request) + + return [ + ValidationReference.from_proto(validation_reference) + for validation_reference in response.validation_references + ] + + def list_project_metadata( + self, project: str, allow_cache: bool = False + ) -> List[ProjectMetadata]: + request = RegistryServer_pb2.ListProjectMetadataRequest( + project=project, allow_cache=allow_cache + ) + + response = self.stub.ListProjectMetadata(request) + + return [ProjectMetadata.from_proto(pm) for pm in response.project_metadata] + + def update_infra(self, infra: Infra, project: str, commit: bool = True): + raise ReadOnlyRegistryException() + + def get_infra(self, project: str, allow_cache: bool = False) -> Infra: + request = RegistryServer_pb2.GetInfraRequest( + project=project, allow_cache=allow_cache + ) + + response = self.stub.GetInfra(request) + + return Infra.from_proto(response) + + def apply_user_metadata( + self, + project: str, + feature_view: BaseFeatureView, + metadata_bytes: Optional[bytes], + ): + pass + + def get_user_metadata( + self, project: str, feature_view: BaseFeatureView + ) -> Optional[bytes]: + pass + + def proto(self) -> RegistryProto: + return self.stub.Proto(Empty()) + + def commit(self): + raise ReadOnlyRegistryException() + + def refresh(self, project: Optional[str] = None): + request = RegistryServer_pb2.RefreshRequest(project=str(project)) + + self.stub.Refresh(request) diff --git a/sdk/python/feast/repo_config.py b/sdk/python/feast/repo_config.py index d500059c6b..263ba81e39 100644 --- a/sdk/python/feast/repo_config.py +++ b/sdk/python/feast/repo_config.py @@ -39,6 +39,7 @@ "file": "feast.infra.registry.registry.Registry", "sql": "feast.infra.registry.sql.SqlRegistry", "snowflake.registry": "feast.infra.registry.snowflake.SnowflakeRegistry", + "remote": "feast.infra.registry.remote.RemoteRegistry", } BATCH_ENGINE_CLASS_FOR_TYPE = { diff --git a/sdk/python/tests/unit/infra/registry/test_remote.py b/sdk/python/tests/unit/infra/registry/test_remote.py new file mode 100644 index 0000000000..16c6f0abfb --- /dev/null +++ b/sdk/python/tests/unit/infra/registry/test_remote.py @@ -0,0 +1,69 @@ +import assertpy +import grpc_testing +import pytest + +from feast import Entity, FeatureStore +from feast.infra.registry.remote import RemoteRegistry, RemoteRegistryConfig +from feast.protos.feast.registry import RegistryServer_pb2, RegistryServer_pb2_grpc +from feast.registry_server import RegistryServer + + +class GrpcMockChannel: + def __init__(self, service, servicer): + self.service = service + self.test_server = grpc_testing.server_from_dictionary( + {service: servicer}, + grpc_testing.strict_real_time(), + ) + + def unary_unary( + self, method: str, request_serializer=None, response_deserializer=None + ): + method_name = method.split("/")[-1] + method_descriptor = self.service.methods_by_name[method_name] + + def handler(request): + rpc = self.test_server.invoke_unary_unary( + method_descriptor, (), request, None + ) + + response, trailing_metadata, code, details = rpc.termination() + return response + + return handler + + +@pytest.fixture +def mock_remote_registry(environment): + store: FeatureStore = environment.feature_store + registry = RemoteRegistry( + registry_config=RemoteRegistryConfig(path=""), project=None, repo_path=None + ) + mock_channel = GrpcMockChannel( + RegistryServer_pb2.DESCRIPTOR.services_by_name["RegistryServer"], + RegistryServer(store=store), + ) + registry.stub = RegistryServer_pb2_grpc.RegistryServerStub(mock_channel) + return registry + + +def test_registry_server_get_entity(environment, mock_remote_registry): + store: FeatureStore = environment.feature_store + entity = Entity(name="driver", join_keys=["driver_id"]) + store.apply(entity) + + expected = store.get_entity(entity.name) + response_entity = mock_remote_registry.get_entity(entity.name, store.project) + + assertpy.assert_that(response_entity).is_equal_to(expected) + + +def test_registry_server_proto(environment, mock_remote_registry): + store: FeatureStore = environment.feature_store + entity = Entity(name="driver", join_keys=["driver_id"]) + store.apply(entity) + + expected = store.registry.proto() + response = mock_remote_registry.proto() + + assertpy.assert_that(response).is_equal_to(expected) From 21931d59f8a2f8b69383de0dd371a780149ccda8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gal=C3=A9n?= <105213101+galen-ft@users.noreply.github.com> Date: Sat, 9 Mar 2024 07:35:34 +0200 Subject: [PATCH 066/122] fix: Extend SQL registry config with a sqlalchemy_config_kwargs key (#3997) * Add SQLAlchemy Engine settings to registry config Signed-off-by: Galen * Allow for dict values of any-type Signed-off-by: Galen Terziysky <105213101+galen-ft@users.noreply.github.com> * Extend PostgreSQL and MySQL registry tests and add examples to the docs Signed-off-by: Galen Terziysky <105213101+galen-ft@users.noreply.github.com> * Add "echo=False" to registry docs Signed-off-by: Galen Terziysky <105213101+galen-ft@users.noreply.github.com> * Fix mypy type checker error Signed-off-by: Galen Terziysky <105213101+galen-ft@users.noreply.github.com> * Accept change by the Black code formatter Signed-off-by: Galen Terziysky <105213101+galen-ft@users.noreply.github.com> --------- Signed-off-by: Galen Signed-off-by: Galen Terziysky <105213101+galen-ft@users.noreply.github.com> --- docs/getting-started/concepts/registry.md | 3 +++ docs/tutorials/using-scalable-registry.md | 3 +++ sdk/python/feast/infra/registry/sql.py | 9 +++++++-- sdk/python/feast/repo_config.py | 3 +++ sdk/python/tests/unit/test_sql_registry.py | 2 ++ 5 files changed, 18 insertions(+), 2 deletions(-) diff --git a/docs/getting-started/concepts/registry.md b/docs/getting-started/concepts/registry.md index f7d4a5b3e1..8ac32ce87b 100644 --- a/docs/getting-started/concepts/registry.md +++ b/docs/getting-started/concepts/registry.md @@ -57,6 +57,9 @@ registry: registry_type: sql path: postgresql://postgres:mysecretpassword@127.0.0.1:55001/feast cache_ttl_seconds: 60 + sqlalchemy_config_kwargs: + echo: false + pool_pre_ping: true ``` This supports any SQLAlchemy compatible database as a backend. The exact schema can be seen in [sql.py](https://github.com/feast-dev/feast/blob/master/sdk/python/feast/infra/registry/sql.py) diff --git a/docs/tutorials/using-scalable-registry.md b/docs/tutorials/using-scalable-registry.md index a87aedd9b9..30b8e01ed5 100644 --- a/docs/tutorials/using-scalable-registry.md +++ b/docs/tutorials/using-scalable-registry.md @@ -29,6 +29,9 @@ registry: registry_type: sql path: postgresql://postgres:mysecretpassword@127.0.0.1:55001/feast cache_ttl_seconds: 60 + sqlalchemy_config_kwargs: + echo: false + pool_pre_ping: true ``` Specifically, the registry_type needs to be set to sql in the registry config block. On doing so, the path should refer to the [Database URL](https://docs.sqlalchemy.org/en/14/core/engines.html#database-urls) for the database to be used, as expected by SQLAlchemy. No other additional commands are currently needed to configure this registry. diff --git a/sdk/python/feast/infra/registry/sql.py b/sdk/python/feast/infra/registry/sql.py index d57bcc7c0a..1e5c2a8725 100644 --- a/sdk/python/feast/infra/registry/sql.py +++ b/sdk/python/feast/infra/registry/sql.py @@ -4,7 +4,7 @@ from enum import Enum from pathlib import Path from threading import Lock -from typing import Any, Callable, List, Optional, Set, Union +from typing import Any, Callable, Dict, List, Optional, Set, Union from pydantic import StrictStr from sqlalchemy import ( # type: ignore @@ -190,6 +190,9 @@ class SqlRegistryConfig(RegistryConfig): """ str: Path to metadata store. If registry_type is 'sql', then this is a database URL as expected by SQLAlchemy """ + sqlalchemy_config_kwargs: Dict[str, Any] = {"echo": False} + """ Dict[str, Any]: Extra arguments to pass to SQLAlchemy.create_engine. """ + class SqlRegistry(BaseRegistry): def __init__( @@ -199,7 +202,9 @@ def __init__( repo_path: Optional[Path], ): assert registry_config is not None, "SqlRegistry needs a valid registry_config" - self.engine: Engine = create_engine(registry_config.path, echo=False) + self.engine: Engine = create_engine( + registry_config.path, **registry_config.sqlalchemy_config_kwargs + ) metadata.create_all(self.engine) self.cached_registry_proto = self.proto() proto_registry_utils.init_project_metadata(self.cached_registry_proto, project) diff --git a/sdk/python/feast/repo_config.py b/sdk/python/feast/repo_config.py index 263ba81e39..5708754622 100644 --- a/sdk/python/feast/repo_config.py +++ b/sdk/python/feast/repo_config.py @@ -126,6 +126,9 @@ class RegistryConfig(FeastBaseModel): s3_additional_kwargs: Optional[Dict[str, str]] = None """ Dict[str, str]: Extra arguments to pass to boto3 when writing the registry file to S3. """ + sqlalchemy_config_kwargs: Dict[str, Any] = {} + """ Dict[str, Any]: Extra arguments to pass to SQLAlchemy.create_engine. """ + class RepoConfig(FeastBaseModel): """Repo config. Typically loaded from `feature_store.yaml`""" diff --git a/sdk/python/tests/unit/test_sql_registry.py b/sdk/python/tests/unit/test_sql_registry.py index 722e318b0c..4ca41423c1 100644 --- a/sdk/python/tests/unit/test_sql_registry.py +++ b/sdk/python/tests/unit/test_sql_registry.py @@ -71,6 +71,7 @@ def pg_registry(): registry_config = RegistryConfig( registry_type="sql", path=f"postgresql://{POSTGRES_USER}:{POSTGRES_PASSWORD}@{container_host}:{container_port}/{POSTGRES_DB}", + sqlalchemy_config_kwargs={"echo": False, "pool_pre_ping": True}, ) yield SqlRegistry(registry_config, "project", None) @@ -106,6 +107,7 @@ def mysql_registry(): registry_config = RegistryConfig( registry_type="sql", path=f"mysql+pymysql://{POSTGRES_USER}:{POSTGRES_PASSWORD}@{container_host}:{container_port}/{POSTGRES_DB}", + sqlalchemy_config_kwargs={"echo": False, "pool_pre_ping": True}, ) yield SqlRegistry(registry_config, "project", None) From 924f9441107b8e36a3d5c6f8b16ed24f9a03b867 Mon Sep 17 00:00:00 2001 From: Tornike Gurgenidze Date: Mon, 11 Mar 2024 11:08:11 +0400 Subject: [PATCH 067/122] feat: Refactor registry caching logic into a separate class (#3943) --- .../feast/infra/registry/caching_registry.py | 342 ++++++++++++++++++ sdk/python/feast/infra/registry/sql.py | 224 ++---------- 2 files changed, 369 insertions(+), 197 deletions(-) create mode 100644 sdk/python/feast/infra/registry/caching_registry.py diff --git a/sdk/python/feast/infra/registry/caching_registry.py b/sdk/python/feast/infra/registry/caching_registry.py new file mode 100644 index 0000000000..4c408b0a46 --- /dev/null +++ b/sdk/python/feast/infra/registry/caching_registry.py @@ -0,0 +1,342 @@ +import logging +from abc import abstractmethod +from datetime import datetime, timedelta +from threading import Lock +from typing import List, Optional + +from feast import usage +from feast.data_source import DataSource +from feast.entity import Entity +from feast.feature_service import FeatureService +from feast.feature_view import FeatureView +from feast.infra.infra_object import Infra +from feast.infra.registry import proto_registry_utils +from feast.infra.registry.base_registry import BaseRegistry +from feast.on_demand_feature_view import OnDemandFeatureView +from feast.project_metadata import ProjectMetadata +from feast.request_feature_view import RequestFeatureView +from feast.saved_dataset import SavedDataset, ValidationReference +from feast.stream_feature_view import StreamFeatureView + +logger = logging.getLogger(__name__) + + +class CachingRegistry(BaseRegistry): + def __init__( + self, + project: str, + cache_ttl_seconds: int, + ): + self.cached_registry_proto = self.proto() + proto_registry_utils.init_project_metadata(self.cached_registry_proto, project) + self.cached_registry_proto_created = datetime.utcnow() + self._refresh_lock = Lock() + self.cached_registry_proto_ttl = timedelta( + seconds=cache_ttl_seconds if cache_ttl_seconds is not None else 0 + ) + + @abstractmethod + def _get_data_source(self, name: str, project: str) -> DataSource: + pass + + def get_data_source( + self, name: str, project: str, allow_cache: bool = False + ) -> DataSource: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.get_data_source( + self.cached_registry_proto, name, project + ) + return self._get_data_source(name, project) + + @abstractmethod + def _list_data_sources(self, project: str) -> List[DataSource]: + pass + + def list_data_sources( + self, project: str, allow_cache: bool = False + ) -> List[DataSource]: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.list_data_sources( + self.cached_registry_proto, project + ) + return self._list_data_sources(project) + + @abstractmethod + def _get_entity(self, name: str, project: str) -> Entity: + pass + + def get_entity(self, name: str, project: str, allow_cache: bool = False) -> Entity: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.get_entity( + self.cached_registry_proto, name, project + ) + return self._get_entity(name, project) + + @abstractmethod + def _list_entities(self, project: str) -> List[Entity]: + pass + + def list_entities(self, project: str, allow_cache: bool = False) -> List[Entity]: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.list_entities( + self.cached_registry_proto, project + ) + return self._list_entities(project) + + @abstractmethod + def _get_feature_view(self, name: str, project: str) -> FeatureView: + pass + + def get_feature_view( + self, name: str, project: str, allow_cache: bool = False + ) -> FeatureView: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.get_feature_view( + self.cached_registry_proto, name, project + ) + return self._get_feature_view(name, project) + + @abstractmethod + def _list_feature_views(self, project: str) -> List[FeatureView]: + pass + + def list_feature_views( + self, project: str, allow_cache: bool = False + ) -> List[FeatureView]: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.list_feature_views( + self.cached_registry_proto, project + ) + return self._list_feature_views(project) + + @abstractmethod + def _get_on_demand_feature_view( + self, name: str, project: str + ) -> OnDemandFeatureView: + pass + + def get_on_demand_feature_view( + self, name: str, project: str, allow_cache: bool = False + ) -> OnDemandFeatureView: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.get_on_demand_feature_view( + self.cached_registry_proto, name, project + ) + return self._get_on_demand_feature_view(name, project) + + @abstractmethod + def _list_on_demand_feature_views(self, project: str) -> List[OnDemandFeatureView]: + pass + + def list_on_demand_feature_views( + self, project: str, allow_cache: bool = False + ) -> List[OnDemandFeatureView]: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.list_on_demand_feature_views( + self.cached_registry_proto, project + ) + return self._list_on_demand_feature_views(project) + + @abstractmethod + def _get_request_feature_view(self, name: str, project: str) -> RequestFeatureView: + pass + + def get_request_feature_view( + self, name: str, project: str, allow_cache: bool = False + ) -> RequestFeatureView: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.get_request_feature_view( + self.cached_registry_proto, name, project + ) + return self._get_request_feature_view(name, project) + + @abstractmethod + def _list_request_feature_views(self, project: str) -> List[RequestFeatureView]: + pass + + def list_request_feature_views( + self, project: str, allow_cache: bool = False + ) -> List[RequestFeatureView]: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.list_request_feature_views( + self.cached_registry_proto, project + ) + return self._list_request_feature_views(project) + + @abstractmethod + def _get_stream_feature_view(self, name: str, project: str) -> StreamFeatureView: + pass + + def get_stream_feature_view( + self, name: str, project: str, allow_cache: bool = False + ) -> StreamFeatureView: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.get_stream_feature_view( + self.cached_registry_proto, name, project + ) + return self._get_stream_feature_view(name, project) + + @abstractmethod + def _list_stream_feature_views(self, project: str) -> List[StreamFeatureView]: + pass + + def list_stream_feature_views( + self, project: str, allow_cache: bool = False + ) -> List[StreamFeatureView]: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.list_stream_feature_views( + self.cached_registry_proto, project + ) + return self._list_stream_feature_views(project) + + @abstractmethod + def _get_feature_service(self, name: str, project: str) -> FeatureService: + pass + + def get_feature_service( + self, name: str, project: str, allow_cache: bool = False + ) -> FeatureService: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.get_feature_service( + self.cached_registry_proto, name, project + ) + return self._get_feature_service(name, project) + + @abstractmethod + def _list_feature_services(self, project: str) -> List[FeatureService]: + pass + + def list_feature_services( + self, project: str, allow_cache: bool = False + ) -> List[FeatureService]: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.list_feature_services( + self.cached_registry_proto, project + ) + return self._list_feature_services(project) + + @abstractmethod + def _get_saved_dataset(self, name: str, project: str) -> SavedDataset: + pass + + def get_saved_dataset( + self, name: str, project: str, allow_cache: bool = False + ) -> SavedDataset: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.get_saved_dataset( + self.cached_registry_proto, name, project + ) + return self._get_saved_dataset(name, project) + + @abstractmethod + def _list_saved_datasets(self, project: str) -> List[SavedDataset]: + pass + + def list_saved_datasets( + self, project: str, allow_cache: bool = False + ) -> List[SavedDataset]: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.list_saved_datasets( + self.cached_registry_proto, project + ) + return self._list_saved_datasets(project) + + @abstractmethod + def _get_validation_reference(self, name: str, project: str) -> ValidationReference: + pass + + def get_validation_reference( + self, name: str, project: str, allow_cache: bool = False + ) -> ValidationReference: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.get_validation_reference( + self.cached_registry_proto, name, project + ) + return self._get_validation_reference(name, project) + + @abstractmethod + def _list_validation_references(self, project: str) -> List[ValidationReference]: + pass + + def list_validation_references( + self, project: str, allow_cache: bool = False + ) -> List[ValidationReference]: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.list_validation_references( + self.cached_registry_proto, project + ) + return self._list_validation_references(project) + + @abstractmethod + def _list_project_metadata(self, project: str) -> List[ProjectMetadata]: + pass + + def list_project_metadata( + self, project: str, allow_cache: bool = False + ) -> List[ProjectMetadata]: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.list_project_metadata( + self.cached_registry_proto, project + ) + return self._list_project_metadata(project) + + @abstractmethod + def _get_infra(self, project: str) -> Infra: + pass + + def get_infra(self, project: str, allow_cache: bool = False) -> Infra: + return self._get_infra(project) + + def refresh(self, project: Optional[str] = None): + if project: + project_metadata = proto_registry_utils.get_project_metadata( + registry_proto=self.cached_registry_proto, project=project + ) + if project_metadata: + usage.set_current_project_uuid(project_metadata.project_uuid) + else: + proto_registry_utils.init_project_metadata( + self.cached_registry_proto, project + ) + self.cached_registry_proto = self.proto() + self.cached_registry_proto_created = datetime.utcnow() + + def _refresh_cached_registry_if_necessary(self): + with self._refresh_lock: + expired = ( + self.cached_registry_proto is None + or self.cached_registry_proto_created is None + ) or ( + self.cached_registry_proto_ttl.total_seconds() + > 0 # 0 ttl means infinity + and ( + datetime.utcnow() + > ( + self.cached_registry_proto_created + + self.cached_registry_proto_ttl + ) + ) + ) + + if expired: + logger.info("Registry cache expired, so refreshing") + self.refresh() diff --git a/sdk/python/feast/infra/registry/sql.py b/sdk/python/feast/infra/registry/sql.py index 1e5c2a8725..597c9b8513 100644 --- a/sdk/python/feast/infra/registry/sql.py +++ b/sdk/python/feast/infra/registry/sql.py @@ -1,9 +1,8 @@ import logging import uuid -from datetime import datetime, timedelta +from datetime import datetime from enum import Enum from pathlib import Path -from threading import Lock from typing import Any, Callable, Dict, List, Optional, Set, Union from pydantic import StrictStr @@ -37,8 +36,7 @@ from feast.feature_service import FeatureService from feast.feature_view import FeatureView from feast.infra.infra_object import Infra -from feast.infra.registry import proto_registry_utils -from feast.infra.registry.base_registry import BaseRegistry +from feast.infra.registry.caching_registry import CachingRegistry from feast.on_demand_feature_view import OnDemandFeatureView from feast.project_metadata import ProjectMetadata from feast.protos.feast.core.DataSource_pb2 import DataSource as DataSourceProto @@ -194,7 +192,7 @@ class SqlRegistryConfig(RegistryConfig): """ Dict[str, Any]: Extra arguments to pass to SQLAlchemy.create_engine. """ -class SqlRegistry(BaseRegistry): +class SqlRegistry(CachingRegistry): def __init__( self, registry_config: Optional[Union[RegistryConfig, SqlRegistryConfig]], @@ -202,20 +200,14 @@ def __init__( repo_path: Optional[Path], ): assert registry_config is not None, "SqlRegistry needs a valid registry_config" + self.engine: Engine = create_engine( registry_config.path, **registry_config.sqlalchemy_config_kwargs ) metadata.create_all(self.engine) - self.cached_registry_proto = self.proto() - proto_registry_utils.init_project_metadata(self.cached_registry_proto, project) - self.cached_registry_proto_created = datetime.utcnow() - self._refresh_lock = Lock() - self.cached_registry_proto_ttl = timedelta( - seconds=registry_config.cache_ttl_seconds - if registry_config.cache_ttl_seconds is not None - else 0 + super().__init__( + project=project, cache_ttl_seconds=registry_config.cache_ttl_seconds ) - self.project = project def teardown(self): for t in { @@ -232,49 +224,7 @@ def teardown(self): stmt = delete(t) conn.execute(stmt) - def refresh(self, project: Optional[str] = None): - if project: - project_metadata = proto_registry_utils.get_project_metadata( - registry_proto=self.cached_registry_proto, project=project - ) - if project_metadata: - usage.set_current_project_uuid(project_metadata.project_uuid) - else: - proto_registry_utils.init_project_metadata( - self.cached_registry_proto, project - ) - self.cached_registry_proto = self.proto() - self.cached_registry_proto_created = datetime.utcnow() - - def _refresh_cached_registry_if_necessary(self): - with self._refresh_lock: - expired = ( - self.cached_registry_proto is None - or self.cached_registry_proto_created is None - ) or ( - self.cached_registry_proto_ttl.total_seconds() - > 0 # 0 ttl means infinity - and ( - datetime.utcnow() - > ( - self.cached_registry_proto_created - + self.cached_registry_proto_ttl - ) - ) - ) - - if expired: - logger.info("Registry cache expired, so refreshing") - self.refresh() - - def get_stream_feature_view( - self, name: str, project: str, allow_cache: bool = False - ): - if allow_cache: - self._refresh_cached_registry_if_necessary() - return proto_registry_utils.get_stream_feature_view( - self.cached_registry_proto, name, project - ) + def _get_stream_feature_view(self, name: str, project: str): return self._get_object( table=stream_feature_views, name=name, @@ -286,14 +236,7 @@ def get_stream_feature_view( not_found_exception=FeatureViewNotFoundException, ) - def list_stream_feature_views( - self, project: str, allow_cache: bool = False - ) -> List[StreamFeatureView]: - if allow_cache: - self._refresh_cached_registry_if_necessary() - return proto_registry_utils.list_stream_feature_views( - self.cached_registry_proto, project - ) + def _list_stream_feature_views(self, project: str) -> List[StreamFeatureView]: return self._list_objects( stream_feature_views, project, @@ -311,12 +254,7 @@ def apply_entity(self, entity: Entity, project: str, commit: bool = True): proto_field_name="entity_proto", ) - def get_entity(self, name: str, project: str, allow_cache: bool = False) -> Entity: - if allow_cache: - self._refresh_cached_registry_if_necessary() - return proto_registry_utils.get_entity( - self.cached_registry_proto, name, project - ) + def _get_entity(self, name: str, project: str) -> Entity: return self._get_object( table=entities, name=name, @@ -328,14 +266,7 @@ def get_entity(self, name: str, project: str, allow_cache: bool = False) -> Enti not_found_exception=EntityNotFoundException, ) - def get_feature_view( - self, name: str, project: str, allow_cache: bool = False - ) -> FeatureView: - if allow_cache: - self._refresh_cached_registry_if_necessary() - return proto_registry_utils.get_feature_view( - self.cached_registry_proto, name, project - ) + def _get_feature_view(self, name: str, project: str) -> FeatureView: return self._get_object( table=feature_views, name=name, @@ -347,14 +278,9 @@ def get_feature_view( not_found_exception=FeatureViewNotFoundException, ) - def get_on_demand_feature_view( - self, name: str, project: str, allow_cache: bool = False + def _get_on_demand_feature_view( + self, name: str, project: str ) -> OnDemandFeatureView: - if allow_cache: - self._refresh_cached_registry_if_necessary() - return proto_registry_utils.get_on_demand_feature_view( - self.cached_registry_proto, name, project - ) return self._get_object( table=on_demand_feature_views, name=name, @@ -366,14 +292,7 @@ def get_on_demand_feature_view( not_found_exception=FeatureViewNotFoundException, ) - def get_request_feature_view( - self, name: str, project: str, allow_cache: bool = False - ): - if allow_cache: - self._refresh_cached_registry_if_necessary() - return proto_registry_utils.get_request_feature_view( - self.cached_registry_proto, name, project - ) + def _get_request_feature_view(self, name: str, project: str): return self._get_object( table=request_feature_views, name=name, @@ -385,14 +304,7 @@ def get_request_feature_view( not_found_exception=FeatureViewNotFoundException, ) - def get_feature_service( - self, name: str, project: str, allow_cache: bool = False - ) -> FeatureService: - if allow_cache: - self._refresh_cached_registry_if_necessary() - return proto_registry_utils.get_feature_service( - self.cached_registry_proto, name, project - ) + def _get_feature_service(self, name: str, project: str) -> FeatureService: return self._get_object( table=feature_services, name=name, @@ -404,14 +316,7 @@ def get_feature_service( not_found_exception=FeatureServiceNotFoundException, ) - def get_saved_dataset( - self, name: str, project: str, allow_cache: bool = False - ) -> SavedDataset: - if allow_cache: - self._refresh_cached_registry_if_necessary() - return proto_registry_utils.get_saved_dataset( - self.cached_registry_proto, name, project - ) + def _get_saved_dataset(self, name: str, project: str) -> SavedDataset: return self._get_object( table=saved_datasets, name=name, @@ -423,14 +328,7 @@ def get_saved_dataset( not_found_exception=SavedDatasetNotFound, ) - def get_validation_reference( - self, name: str, project: str, allow_cache: bool = False - ) -> ValidationReference: - if allow_cache: - self._refresh_cached_registry_if_necessary() - return proto_registry_utils.get_validation_reference( - self.cached_registry_proto, name, project - ) + def _get_validation_reference(self, name: str, project: str) -> ValidationReference: return self._get_object( table=validation_references, name=name, @@ -442,14 +340,7 @@ def get_validation_reference( not_found_exception=ValidationReferenceNotFound, ) - def list_validation_references( - self, project: str, allow_cache: bool = False - ) -> List[ValidationReference]: - if allow_cache: - self._refresh_cached_registry_if_necessary() - return proto_registry_utils.list_validation_references( - self.cached_registry_proto, project - ) + def _list_validation_references(self, project: str) -> List[ValidationReference]: return self._list_objects( table=validation_references, project=project, @@ -458,12 +349,7 @@ def list_validation_references( proto_field_name="validation_reference_proto", ) - def list_entities(self, project: str, allow_cache: bool = False) -> List[Entity]: - if allow_cache: - self._refresh_cached_registry_if_necessary() - return proto_registry_utils.list_entities( - self.cached_registry_proto, project - ) + def _list_entities(self, project: str) -> List[Entity]: return self._list_objects( entities, project, EntityProto, Entity, "entity_proto" ) @@ -496,14 +382,7 @@ def delete_feature_service(self, name: str, project: str, commit: bool = True): FeatureServiceNotFoundException, ) - def get_data_source( - self, name: str, project: str, allow_cache: bool = False - ) -> DataSource: - if allow_cache: - self._refresh_cached_registry_if_necessary() - return proto_registry_utils.get_data_source( - self.cached_registry_proto, name, project - ) + def _get_data_source(self, name: str, project: str) -> DataSource: return self._get_object( table=data_sources, name=name, @@ -515,14 +394,7 @@ def get_data_source( not_found_exception=DataSourceObjectNotFoundException, ) - def list_data_sources( - self, project: str, allow_cache: bool = False - ) -> List[DataSource]: - if allow_cache: - self._refresh_cached_registry_if_necessary() - return proto_registry_utils.list_data_sources( - self.cached_registry_proto, project - ) + def _list_data_sources(self, project: str) -> List[DataSource]: return self._list_objects( data_sources, project, DataSourceProto, DataSource, "data_source_proto" ) @@ -564,14 +436,7 @@ def delete_data_source(self, name: str, project: str, commit: bool = True): if rows.rowcount < 1: raise DataSourceObjectNotFoundException(name, project) - def list_feature_services( - self, project: str, allow_cache: bool = False - ) -> List[FeatureService]: - if allow_cache: - self._refresh_cached_registry_if_necessary() - return proto_registry_utils.list_feature_services( - self.cached_registry_proto, project - ) + def _list_feature_services(self, project: str) -> List[FeatureService]: return self._list_objects( feature_services, project, @@ -580,26 +445,12 @@ def list_feature_services( "feature_service_proto", ) - def list_feature_views( - self, project: str, allow_cache: bool = False - ) -> List[FeatureView]: - if allow_cache: - self._refresh_cached_registry_if_necessary() - return proto_registry_utils.list_feature_views( - self.cached_registry_proto, project - ) + def _list_feature_views(self, project: str) -> List[FeatureView]: return self._list_objects( feature_views, project, FeatureViewProto, FeatureView, "feature_view_proto" ) - def list_saved_datasets( - self, project: str, allow_cache: bool = False - ) -> List[SavedDataset]: - if allow_cache: - self._refresh_cached_registry_if_necessary() - return proto_registry_utils.list_saved_datasets( - self.cached_registry_proto, project - ) + def _list_saved_datasets(self, project: str) -> List[SavedDataset]: return self._list_objects( saved_datasets, project, @@ -608,14 +459,7 @@ def list_saved_datasets( "saved_dataset_proto", ) - def list_request_feature_views( - self, project: str, allow_cache: bool = False - ) -> List[RequestFeatureView]: - if allow_cache: - self._refresh_cached_registry_if_necessary() - return proto_registry_utils.list_request_feature_views( - self.cached_registry_proto, project - ) + def _list_request_feature_views(self, project: str) -> List[RequestFeatureView]: return self._list_objects( request_feature_views, project, @@ -624,14 +468,7 @@ def list_request_feature_views( "feature_view_proto", ) - def list_on_demand_feature_views( - self, project: str, allow_cache: bool = False - ) -> List[OnDemandFeatureView]: - if allow_cache: - self._refresh_cached_registry_if_necessary() - return proto_registry_utils.list_on_demand_feature_views( - self.cached_registry_proto, project - ) + def _list_on_demand_feature_views(self, project: str) -> List[OnDemandFeatureView]: return self._list_objects( on_demand_feature_views, project, @@ -640,14 +477,7 @@ def list_on_demand_feature_views( "feature_view_proto", ) - def list_project_metadata( - self, project: str, allow_cache: bool = False - ) -> List[ProjectMetadata]: - if allow_cache: - self._refresh_cached_registry_if_necessary() - return proto_registry_utils.list_project_metadata( - self.cached_registry_proto, project - ) + def _list_project_metadata(self, project: str) -> List[ProjectMetadata]: with self.engine.connect() as conn: stmt = select(feast_metadata).where( feast_metadata.c.project_id == project, @@ -740,7 +570,7 @@ def update_infra(self, infra: Infra, project: str, commit: bool = True): name="infra_obj", ) - def get_infra(self, project: str, allow_cache: bool = False) -> Infra: + def _get_infra(self, project: str) -> Infra: infra_object = self._get_object( table=managed_infra, name="infra_obj", From 207693810fe714d5e03a25e3a3ca4286276e0393 Mon Sep 17 00:00:00 2001 From: Chester Date: Mon, 11 Mar 2024 22:51:16 +0800 Subject: [PATCH 068/122] chore: Increase snowflake-connector-python dependency to 3.7 (#4008) * increase snowflake-connector-python to 3.7 Signed-off-by: Chester Ong * force_return_table to True Signed-off-by: Chester Ong --------- Signed-off-by: Chester Ong --- .../feast/infra/offline_stores/snowflake.py | 15 ++------------- setup.py | 2 +- 2 files changed, 3 insertions(+), 14 deletions(-) diff --git a/sdk/python/feast/infra/offline_stores/snowflake.py b/sdk/python/feast/infra/offline_stores/snowflake.py index 14752fd857..cfaca038e7 100644 --- a/sdk/python/feast/infra/offline_stores/snowflake.py +++ b/sdk/python/feast/infra/offline_stores/snowflake.py @@ -470,20 +470,9 @@ def _to_df_internal(self, timeout: Optional[int] = None) -> pd.DataFrame: return df def _to_arrow_internal(self, timeout: Optional[int] = None) -> pyarrow.Table: - pa_table = execute_snowflake_statement( + return execute_snowflake_statement( self.snowflake_conn, self.to_sql() - ).fetch_arrow_all(force_return_table=False) - - if pa_table: - return pa_table - else: - empty_result = execute_snowflake_statement( - self.snowflake_conn, self.to_sql() - ) - - return pyarrow.Table.from_pandas( - pd.DataFrame(columns=[md.name for md in empty_result.description]) - ) + ).fetch_arrow_all(force_return_table=True) def to_sql(self) -> str: """ diff --git a/setup.py b/setup.py index b601c90146..0459cbd6eb 100644 --- a/setup.py +++ b/setup.py @@ -96,7 +96,7 @@ BYTEWAX_REQUIRED = ["bytewax==0.15.1", "docker>=5.0.2", "kubernetes<=20.13.0"] SNOWFLAKE_REQUIRED = [ - "snowflake-connector-python[pandas]>=3,<4", + "snowflake-connector-python[pandas]>=3.7,<4", ] SPARK_REQUIRED = [ From ee4c4f1ca486facc14e13ad0dbe7c9cc7c82d832 Mon Sep 17 00:00:00 2001 From: Hao Xu Date: Mon, 11 Mar 2024 11:00:01 -0400 Subject: [PATCH 069/122] fix: Update actions/setup-python from v3 to v4 (#4003) --- .github/workflows/pr_integration_tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pr_integration_tests.yml b/.github/workflows/pr_integration_tests.yml index 2b0e6a1056..b335c0f042 100644 --- a/.github/workflows/pr_integration_tests.yml +++ b/.github/workflows/pr_integration_tests.yml @@ -110,7 +110,7 @@ jobs: ref: refs/pull/${{ github.event.pull_request.number }}/merge submodules: recursive - name: Setup Python - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 id: setup-python with: python-version: ${{ matrix.python-version }} From 0441b8b9a7eae2eb478d12a8de911c1bd39ced37 Mon Sep 17 00:00:00 2001 From: Tornike Gurgenidze Date: Tue, 12 Mar 2024 07:45:57 +0400 Subject: [PATCH 070/122] fix: CI unittest warnings (#4006) fix ci unittest warnings Signed-off-by: tokoko --- sdk/python/feast/driver_test_data.py | 4 ++-- sdk/python/feast/infra/offline_stores/file.py | 19 ++++++++++++++----- .../feast/infra/offline_stores/file_source.py | 10 +++++++++- .../feast/on_demand_pandas_transformation.py | 3 --- sdk/python/feast/utils.py | 13 ------------- .../infra/offline_stores/test_redshift.py | 1 + .../infra/offline_stores/test_snowflake.py | 1 + .../test_local_feature_store.py | 4 ++++ .../tests/unit/test_on_demand_feature_view.py | 2 ++ sdk/python/tests/unit/test_sql_registry.py | 11 +++++++++-- sdk/python/tests/utils/test_wrappers.py | 8 ++++---- 11 files changed, 46 insertions(+), 30 deletions(-) diff --git a/sdk/python/feast/driver_test_data.py b/sdk/python/feast/driver_test_data.py index 58c3e8db8f..7959046e6e 100644 --- a/sdk/python/feast/driver_test_data.py +++ b/sdk/python/feast/driver_test_data.py @@ -103,7 +103,7 @@ def create_driver_hourly_stats_df(drivers, start_date, end_date) -> pd.DataFrame "event_timestamp": [ pd.Timestamp(dt, unit="ms", tz="UTC").round("ms") for dt in pd.date_range( - start=start_date, end=end_date, freq="1H", inclusive="left" + start=start_date, end=end_date, freq="1h", inclusive="left" ) ] # include a fixed timestamp for get_historical_features in the quickstart @@ -209,7 +209,7 @@ def create_location_stats_df(locations, start_date, end_date) -> pd.DataFrame: "event_timestamp": [ pd.Timestamp(dt, unit="ms", tz="UTC").round("ms") for dt in pd.date_range( - start=start_date, end=end_date, freq="1H", inclusive="left" + start=start_date, end=end_date, freq="1h", inclusive="left" ) ] } diff --git a/sdk/python/feast/infra/offline_stores/file.py b/sdk/python/feast/infra/offline_stores/file.py index 0b873a2091..d922e98c14 100644 --- a/sdk/python/feast/infra/offline_stores/file.py +++ b/sdk/python/feast/infra/offline_stores/file.py @@ -2,7 +2,7 @@ import uuid from datetime import datetime from pathlib import Path -from typing import Any, Callable, List, Literal, Optional, Tuple, Union +from typing import Any, Callable, Dict, List, Literal, Optional, Tuple, Union import dask import dask.dataframe as dd @@ -38,10 +38,7 @@ from feast.repo_config import FeastConfigBaseModel, RepoConfig from feast.saved_dataset import SavedDatasetStorage from feast.usage import log_exceptions_and_usage -from feast.utils import ( - _get_requested_feature_views_to_features_dict, - _run_dask_field_mapping, -) +from feast.utils import _get_requested_feature_views_to_features_dict # FileRetrievalJob will cast string objects to string[pyarrow] from dask version 2023.7.1 # This is not the desired behavior for our use case, so we set the convert-string option to False @@ -512,6 +509,18 @@ def _read_datasource(data_source) -> dd.DataFrame: ) +def _run_dask_field_mapping( + table: dd.DataFrame, + field_mapping: Dict[str, str], +): + if field_mapping: + # run field mapping in the forward direction + table = table.rename(columns=field_mapping) + table = table.persist() + + return table + + def _field_mapping( df_to_join: dd.DataFrame, feature_view: FeatureView, diff --git a/sdk/python/feast/infra/offline_stores/file_source.py b/sdk/python/feast/infra/offline_stores/file_source.py index 887b410079..2672cf78bf 100644 --- a/sdk/python/feast/infra/offline_stores/file_source.py +++ b/sdk/python/feast/infra/offline_stores/file_source.py @@ -1,5 +1,7 @@ from typing import Callable, Dict, Iterable, List, Optional, Tuple +import pyarrow +from packaging import version from pyarrow._fs import FileSystem from pyarrow._s3fs import S3FileSystem from pyarrow.parquet import ParquetDataset @@ -158,7 +160,13 @@ def get_table_column_names_and_types( # Adding support for different file format path # based on S3 filesystem if filesystem is None: - schema = ParquetDataset(path, use_legacy_dataset=False).schema + kwargs = ( + {"use_legacy_dataset": False} + if version.parse(pyarrow.__version__) < version.parse("15.0.0") + else {} + ) + + schema = ParquetDataset(path, **kwargs).schema if hasattr(schema, "names") and hasattr(schema, "types"): # Newer versions of pyarrow doesn't have this method, # but this field is good enough. diff --git a/sdk/python/feast/on_demand_pandas_transformation.py b/sdk/python/feast/on_demand_pandas_transformation.py index 52d45893c5..32cb44b429 100644 --- a/sdk/python/feast/on_demand_pandas_transformation.py +++ b/sdk/python/feast/on_demand_pandas_transformation.py @@ -30,9 +30,6 @@ def __eq__(self, other): "Comparisons should only involve OnDemandPandasTransformation class objects." ) - if not super().__eq__(other): - return False - if ( self.udf_string != other.udf_string or self.udf.__code__.co_code != other.udf.__code__.co_code diff --git a/sdk/python/feast/utils.py b/sdk/python/feast/utils.py index 50b1e73c86..70fbda964d 100644 --- a/sdk/python/feast/utils.py +++ b/sdk/python/feast/utils.py @@ -7,7 +7,6 @@ import pandas as pd import pyarrow -from dask import dataframe as dd from dateutil.tz import tzlocal from pytz import utc @@ -174,18 +173,6 @@ def _run_pyarrow_field_mapping( return table -def _run_dask_field_mapping( - table: dd.DataFrame, - field_mapping: Dict[str, str], -): - if field_mapping: - # run field mapping in the forward direction - table = table.rename(columns=field_mapping) - table = table.persist() - - return table - - def _coerce_datetime(ts): """ Depending on underlying time resolution, arrow to_pydict() sometimes returns pd diff --git a/sdk/python/tests/unit/infra/offline_stores/test_redshift.py b/sdk/python/tests/unit/infra/offline_stores/test_redshift.py index 48ee99e89f..a9ed4c2b59 100644 --- a/sdk/python/tests/unit/infra/offline_stores/test_redshift.py +++ b/sdk/python/tests/unit/infra/offline_stores/test_redshift.py @@ -33,6 +33,7 @@ def test_offline_write_batch( s3_staging_location="s3://bucket/path", workgroup="", ), + entity_key_serialization_version=2, ) batch_source = RedshiftSource( diff --git a/sdk/python/tests/unit/infra/offline_stores/test_snowflake.py b/sdk/python/tests/unit/infra/offline_stores/test_snowflake.py index ac55f123bb..6e27cba341 100644 --- a/sdk/python/tests/unit/infra/offline_stores/test_snowflake.py +++ b/sdk/python/tests/unit/infra/offline_stores/test_snowflake.py @@ -38,6 +38,7 @@ def retrieval_job(request): provider="snowflake.offline", online_store=SqliteOnlineStoreConfig(type="sqlite"), offline_store=offline_store_config, + entity_key_serialization_version=2, ), full_feature_names=True, on_demand_feature_views=[], diff --git a/sdk/python/tests/unit/local_feast_tests/test_local_feature_store.py b/sdk/python/tests/unit/local_feast_tests/test_local_feature_store.py index 2cced75eb2..b3e6762c17 100644 --- a/sdk/python/tests/unit/local_feast_tests/test_local_feature_store.py +++ b/sdk/python/tests/unit/local_feast_tests/test_local_feature_store.py @@ -130,6 +130,7 @@ def test_apply_feature_view_with_inline_batch_source( driver_fv = FeatureView( name="driver_fv", entities=[entity], + schema=[Field(name="test_key", dtype=Int64)], source=file_source, ) @@ -178,6 +179,7 @@ def test_apply_feature_view_with_inline_stream_source( driver_fv = FeatureView( name="driver_fv", entities=[entity], + schema=[Field(name="test_key", dtype=Int64)], source=stream_source, ) @@ -332,6 +334,7 @@ def test_apply_conflicting_feature_view_names(feature_store_with_local_registry) driver_stats = FeatureView( name="driver_hourly_stats", entities=[driver], + schema=[Field(name="driver_id", dtype=Int64)], ttl=timedelta(seconds=10), online=False, source=FileSource(path="driver_stats.parquet"), @@ -341,6 +344,7 @@ def test_apply_conflicting_feature_view_names(feature_store_with_local_registry) customer_stats = FeatureView( name="DRIVER_HOURLY_STATS", entities=[customer], + schema=[Field(name="customer_id", dtype=Int64)], ttl=timedelta(seconds=10), online=False, source=FileSource(path="customer_stats.parquet"), diff --git a/sdk/python/tests/unit/test_on_demand_feature_view.py b/sdk/python/tests/unit/test_on_demand_feature_view.py index 721026ea46..66d02c65d1 100644 --- a/sdk/python/tests/unit/test_on_demand_feature_view.py +++ b/sdk/python/tests/unit/test_on_demand_feature_view.py @@ -13,6 +13,7 @@ # limitations under the License. import pandas as pd +import pytest from feast.feature_view import FeatureView from feast.field import Field @@ -38,6 +39,7 @@ def udf2(features_df: pd.DataFrame) -> pd.DataFrame: return df +@pytest.mark.filterwarnings("ignore:udf and udf_string parameters are deprecated") def test_hash(): file_source = FileSource(name="my-file-source", path="test.parquet") feature_view = FeatureView( diff --git a/sdk/python/tests/unit/test_sql_registry.py b/sdk/python/tests/unit/test_sql_registry.py index 4ca41423c1..094b8967c1 100644 --- a/sdk/python/tests/unit/test_sql_registry.py +++ b/sdk/python/tests/unit/test_sql_registry.py @@ -93,7 +93,7 @@ def mysql_registry(): container.start() # The log string uses '8.0.*' since the version might be changed as new Docker images are pushed. - log_string_to_wait_for = "/usr/sbin/mysqld: ready for connections. Version: '(\d+(\.\d+){1,2})' socket: '/var/run/mysqld/mysqld.sock' port: 3306" # noqa: W605 + log_string_to_wait_for = "/usr/sbin/mysqld: ready for connections. Version: '(\\d+(\\.\\d+){1,2})' socket: '/var/run/mysqld/mysqld.sock' port: 3306" # noqa: W605 waited = wait_for_logs( container=container, predicate=log_string_to_wait_for, @@ -218,6 +218,7 @@ def test_apply_feature_view_success(sql_registry): fv1 = FeatureView( name="my_feature_view_1", schema=[ + Field(name="test", dtype=Int64), Field(name="fs1_my_feature_1", dtype=Int64), Field(name="fs1_my_feature_2", dtype=String), Field(name="fs1_my_feature_3", dtype=Array(String)), @@ -313,6 +314,7 @@ def test_apply_on_demand_feature_view_success(sql_registry): entities=[driver()], ttl=timedelta(seconds=8640000000), schema=[ + Field(name="driver_id", dtype=Int64), Field(name="daily_miles_driven", dtype=Float32), Field(name="lat", dtype=Float32), Field(name="lon", dtype=Float32), @@ -403,7 +405,10 @@ def test_modify_feature_views_success(sql_registry): fv1 = FeatureView( name="my_feature_view_1", - schema=[Field(name="fs1_my_feature_1", dtype=Int64)], + schema=[ + Field(name="test", dtype=Int64), + Field(name="fs1_my_feature_1", dtype=Int64), + ], entities=[entity], tags={"team": "matchmaking"}, source=batch_source, @@ -527,6 +532,7 @@ def test_apply_data_source(sql_registry): fv1 = FeatureView( name="my_feature_view_1", schema=[ + Field(name="test", dtype=Int64), Field(name="fs1_my_feature_1", dtype=Int64), Field(name="fs1_my_feature_2", dtype=String), Field(name="fs1_my_feature_3", dtype=Array(String)), @@ -596,6 +602,7 @@ def test_registry_cache(sql_registry): fv1 = FeatureView( name="my_feature_view_1", schema=[ + Field(name="test", dtype=Int64), Field(name="fs1_my_feature_1", dtype=Int64), Field(name="fs1_my_feature_2", dtype=String), Field(name="fs1_my_feature_3", dtype=Array(String)), diff --git a/sdk/python/tests/utils/test_wrappers.py b/sdk/python/tests/utils/test_wrappers.py index efee675790..eb5e3ef3f1 100644 --- a/sdk/python/tests/utils/test_wrappers.py +++ b/sdk/python/tests/utils/test_wrappers.py @@ -1,14 +1,14 @@ -import pytest +import warnings def no_warnings(func): def wrapper_no_warnings(*args, **kwargs): - with pytest.warns(None) as warnings: + with warnings.catch_warnings(record=True) as record: func(*args, **kwargs) - if len(warnings) > 0: + if len(record) > 0: raise AssertionError( - "Warnings were raised: " + ", ".join([str(w) for w in warnings]) + "Warnings were raised: " + ", ".join([str(w) for w in record]) ) return wrapper_no_warnings From 5561b306d8c7b43851f5f411e1c4f4f34d99933f Mon Sep 17 00:00:00 2001 From: Shuchu Han Date: Tue, 12 Mar 2024 10:37:27 -0400 Subject: [PATCH 071/122] fix: Use CopyFrom() instead of __deepycopy__() for creating a copy of protobuf object. (#3999) * fix: use CopyFrom() instead of __deepycopy__() for creating a copy of protobuf object. Signed-off-by: Shuchu Han * fix: Use Infra instead of RegistryProto for the Infra proto. Signed-off-by: Shuchu Han * fix: Update the Python requirements after upgrade the version of Protobuf. Signed-off-by: Shuchu Han --------- Signed-off-by: Shuchu Han --- sdk/python/feast/feature_store.py | 4 +- .../requirements/py3.10-ci-requirements.txt | 125 ++++++++++-------- .../requirements/py3.10-requirements.txt | 52 +++----- .../requirements/py3.8-ci-requirements.txt | 104 +++++++-------- .../requirements/py3.8-requirements.txt | 46 +++---- .../requirements/py3.9-ci-requirements.txt | 123 +++++++++-------- .../requirements/py3.9-requirements.txt | 52 +++----- setup.py | 2 +- 8 files changed, 242 insertions(+), 266 deletions(-) diff --git a/sdk/python/feast/feature_store.py b/sdk/python/feast/feature_store.py index e38120c33d..44236248fe 100644 --- a/sdk/python/feast/feature_store.py +++ b/sdk/python/feast/feature_store.py @@ -82,6 +82,7 @@ from feast.infra.registry.sql import SqlRegistry from feast.on_demand_feature_view import OnDemandFeatureView from feast.online_response import OnlineResponse +from feast.protos.feast.core.InfraObject_pb2 import Infra as InfraProto from feast.protos.feast.serving.ServingService_pb2 import ( FieldStatus, GetOnlineFeaturesResponse, @@ -745,7 +746,8 @@ def plan( # Compute the desired difference between the current infra, as stored in the registry, # and the desired infra. self._registry.refresh(project=self.project) - current_infra_proto = self._registry.proto().infra.__deepcopy__() + current_infra_proto = InfraProto() + current_infra_proto.CopyFrom(self._registry.proto().infra) desired_registry_proto = desired_repo_contents.to_registry_proto() new_infra = self._provider.plan_infra(self.config, desired_registry_proto) new_infra_proto = new_infra.to_proto() diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index e41706f403..54dc41fe5b 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -43,13 +43,13 @@ attrs==23.2.0 # referencing avro==1.10.0 # via feast (setup.py) -azure-core==1.30.0 +azure-core==1.30.1 # via # azure-identity # azure-storage-blob azure-identity==1.15.0 # via feast (setup.py) -azure-storage-blob==12.19.0 +azure-storage-blob==12.19.1 # via feast (setup.py) babel==2.14.0 # via @@ -63,18 +63,18 @@ black==22.12.0 # via feast (setup.py) bleach==6.1.0 # via nbconvert -boto3==1.34.49 +boto3==1.34.59 # via # feast (setup.py) # moto -botocore==1.34.49 +botocore==1.34.59 # via # boto3 # moto # s3transfer bowler==0.9.0 # via feast (setup.py) -build==1.0.3 +build==1.1.1 # via # feast (setup.py) # pip-tools @@ -82,7 +82,7 @@ bytewax==0.15.1 # via feast (setup.py) cachecontrol==0.14.0 # via firebase-admin -cachetools==5.3.2 +cachetools==5.3.3 # via google-auth cassandra-driver==3.29.0 # via feast (setup.py) @@ -127,8 +127,10 @@ comm==0.2.1 # ipykernel # ipywidgets coverage[toml]==7.4.3 - # via pytest-cov -cryptography==42.0.4 + # via + # coverage + # pytest-cov +cryptography==42.0.5 # via # azure-identity # azure-storage-blob @@ -177,7 +179,7 @@ execnet==2.0.2 # via pytest-xdist executing==2.0.1 # via stack-data -fastapi==0.109.2 +fastapi==0.110.0 # via feast (setup.py) fastjsonschema==2.19.1 # via nbformat @@ -213,9 +215,9 @@ google-api-core[grpc]==2.17.1 # google-cloud-datastore # google-cloud-firestore # google-cloud-storage -google-api-python-client==2.119.0 +google-api-python-client==2.121.0 # via firebase-admin -google-auth==2.28.1 +google-auth==2.28.2 # via # google-api-core # google-api-python-client @@ -226,7 +228,9 @@ google-auth==2.28.1 google-auth-httplib2==0.2.0 # via google-api-python-client google-cloud-bigquery[pandas]==3.12.0 - # via feast (setup.py) + # via + # feast (setup.py) + # google-cloud-bigquery google-cloud-bigquery-storage==2.24.0 # via feast (setup.py) google-cloud-bigtable==2.23.0 @@ -242,7 +246,7 @@ google-cloud-datastore==2.19.0 # via feast (setup.py) google-cloud-firestore==2.15.0 # via firebase-admin -google-cloud-storage==2.14.0 +google-cloud-storage==2.15.0 # via # feast (setup.py) # firebase-admin @@ -260,13 +264,13 @@ googleapis-common-protos[grpc]==1.62.0 # google-api-core # grpc-google-iam-v1 # grpcio-status -great-expectations==0.18.9 +great-expectations==0.18.10 # via feast (setup.py) greenlet==3.0.3 # via sqlalchemy grpc-google-iam-v1==0.13.0 # via google-cloud-bigtable -grpcio==1.62.0 +grpcio==1.62.1 # via # feast (setup.py) # google-api-core @@ -278,15 +282,15 @@ grpcio==1.62.0 # grpcio-status # grpcio-testing # grpcio-tools -grpcio-health-checking==1.62.0 +grpcio-health-checking==1.62.1 # via feast (setup.py) -grpcio-reflection==1.62.0 +grpcio-reflection==1.62.1 # via feast (setup.py) -grpcio-status==1.62.0 +grpcio-status==1.62.1 # via google-api-core -grpcio-testing==1.62.0 +grpcio-testing==1.62.1 # via feast (setup.py) -grpcio-tools==1.62.0 +grpcio-tools==1.62.1 # via feast (setup.py) gunicorn==21.2.0 # via feast (setup.py) @@ -333,13 +337,13 @@ importlib-metadata==6.11.0 # via # dask # feast (setup.py) -importlib-resources==6.1.1 +importlib-resources==6.1.3 # via feast (setup.py) iniconfig==2.0.0 # via pytest -ipykernel==6.29.2 +ipykernel==6.29.3 # via jupyterlab -ipython==8.22.1 +ipython==8.22.2 # via # great-expectations # ipykernel @@ -369,7 +373,7 @@ jmespath==1.0.1 # via # boto3 # botocore -json5==0.9.17 +json5==0.9.22 # via jupyterlab-server jsonpatch==1.33 # via great-expectations @@ -403,9 +407,9 @@ jupyter-core==5.7.1 # nbformat jupyter-events==0.9.0 # via jupyter-server -jupyter-lsp==2.2.2 +jupyter-lsp==2.2.4 # via jupyterlab -jupyter-server==2.12.5 +jupyter-server==2.13.0 # via # jupyter-lsp # jupyterlab @@ -414,7 +418,7 @@ jupyter-server==2.12.5 # notebook-shim jupyter-server-terminals==0.5.2 # via jupyter-server -jupyterlab==4.1.2 +jupyterlab==4.1.4 # via notebook jupyterlab-pygments==0.3.0 # via nbconvert @@ -437,7 +441,7 @@ markupsafe==2.1.5 # jinja2 # nbconvert # werkzeug -marshmallow==3.20.2 +marshmallow==3.21.1 # via great-expectations matplotlib-inline==0.1.6 # via @@ -467,13 +471,13 @@ msal==1.27.0 # msal-extensions msal-extensions==1.1.0 # via azure-identity -msgpack==1.0.7 +msgpack==1.0.8 # via cachecontrol multipledispatch==1.0.0 # via ibis-framework multiprocess==0.70.16 # via bytewax -mypy==1.8.0 +mypy==1.9.0 # via # feast (setup.py) # sqlalchemy @@ -485,7 +489,7 @@ mypy-protobuf==3.1.0 # via feast (setup.py) nbclient==0.9.0 # via nbconvert -nbconvert==7.16.1 +nbconvert==7.16.2 # via jupyter-server nbformat==5.9.2 # via @@ -497,7 +501,7 @@ nest-asyncio==1.6.0 # via ipykernel nodeenv==1.8.0 # via pre-commit -notebook==7.1.0 +notebook==7.1.1 # via great-expectations notebook-shim==0.2.4 # via @@ -517,7 +521,7 @@ oauthlib==3.2.2 # via requests-oauthlib overrides==7.7.0 # via jupyter-server -packaging==23.2 +packaging==24.0 # via # build # dask @@ -538,13 +542,14 @@ packaging==23.2 # pytest # snowflake-connector-python # sphinx -pandas==2.2.0 ; python_version >= "3.9" +pandas==2.2.1 # via # altair # db-dtypes # feast (setup.py) # google-cloud-bigquery # great-expectations + # ibis-framework # snowflake-connector-python pandocfilters==1.5.1 # via nbconvert @@ -560,7 +565,7 @@ pbr==6.0.0 # via mock pexpect==4.9.0 # via ipython -pip-tools==7.4.0 +pip-tools==7.4.1 # via feast (setup.py) platformdirs==3.11.0 # via @@ -588,7 +593,7 @@ proto-plus==1.23.0 # google-cloud-bigtable # google-cloud-datastore # google-cloud-firestore -protobuf==4.23.4 +protobuf==4.25.3 # via # feast (setup.py) # google-api-core @@ -625,7 +630,7 @@ py-cpuinfo==9.0.0 # via pytest-benchmark py4j==0.10.9.7 # via pyspark -pyarrow==15.0.0 +pyarrow==15.0.1 # via # db-dtypes # feast (setup.py) @@ -646,7 +651,7 @@ pycodestyle==2.10.0 # via flake8 pycparser==2.21 # via cffi -pydantic==2.6.2 +pydantic==2.6.3 # via # fastapi # feast (setup.py) @@ -672,9 +677,9 @@ pymysql==1.1.0 # via feast (setup.py) pyodbc==5.1.0 # via feast (setup.py) -pyopenssl==24.0.0 +pyopenssl==24.1.0 # via snowflake-connector-python -pyparsing==3.1.1 +pyparsing==3.1.2 # via # great-expectations # httplib2 @@ -682,7 +687,7 @@ pyproject-hooks==1.0.0 # via # build # pip-tools -pyspark==3.5.0 +pyspark==3.5.1 # via feast (setup.py) pytest==7.4.4 # via @@ -708,7 +713,7 @@ pytest-timeout==1.4.2 # via feast (setup.py) pytest-xdist==3.5.0 # via feast (setup.py) -python-dateutil==2.8.2 +python-dateutil==2.9.0.post0 # via # arrow # botocore @@ -775,7 +780,7 @@ requests==2.31.0 # snowflake-connector-python # sphinx # trino -requests-oauthlib==1.3.1 +requests-oauthlib==1.4.0 # via kubernetes responses==0.25.0 # via moto @@ -787,7 +792,7 @@ rfc3986-validator==0.1.1 # via # jsonschema # jupyter-events -rich==13.7.0 +rich==13.7.1 # via ibis-framework rockset==2.1.1 # via feast (setup.py) @@ -818,14 +823,16 @@ six==1.16.0 # python-dateutil # rfc3339-validator # thriftpy2 -sniffio==1.3.0 +sniffio==1.3.1 # via # anyio # httpx snowballstemmer==2.2.0 # via sphinx snowflake-connector-python[pandas]==3.7.1 - # via feast (setup.py) + # via + # feast (setup.py) + # snowflake-connector-python sortedcontainers==2.4.0 # via snowflake-connector-python soupsieve==2.5 @@ -844,7 +851,7 @@ sphinxcontrib-qthelp==1.0.7 # via sphinx sphinxcontrib-serializinghtml==1.1.10 # via sphinx -sqlalchemy[mypy]==1.4.51 +sqlalchemy[mypy]==1.4.52 # via # feast (setup.py) # sqlalchemy @@ -856,7 +863,7 @@ stack-data==0.6.3 # via ipython starlette==0.36.3 # via fastapi -substrait==0.12.1 +substrait==0.14.0 # via ibis-substrait tabulate==0.9.0 # via feast (setup.py) @@ -868,7 +875,7 @@ terminado==0.18.0 # jupyter-server-terminals testcontainers==3.7.1 # via feast (setup.py) -thriftpy2==0.4.17 +thriftpy2==0.4.20 # via happybase tinycss2==1.2.1 # via nbconvert @@ -884,7 +891,7 @@ tomli==2.0.1 # pip-tools # pyproject-hooks # pytest -tomlkit==0.12.3 +tomlkit==0.12.4 # via snowflake-connector-python toolz==0.12.1 # via @@ -929,27 +936,27 @@ types-protobuf==3.19.22 # mypy-protobuf types-pymysql==1.1.0.1 # via feast (setup.py) -types-pyopenssl==24.0.0.20240130 +types-pyopenssl==24.0.0.20240311 # via types-redis -types-python-dateutil==2.8.19.20240106 +types-python-dateutil==2.8.19.20240311 # via # arrow # feast (setup.py) types-pytz==2024.1.0.20240203 # via feast (setup.py) -types-pyyaml==6.0.12.12 +types-pyyaml==6.0.12.20240311 # via feast (setup.py) -types-redis==4.6.0.20240218 +types-redis==4.6.0.20240311 # via feast (setup.py) types-requests==2.30.0.0 # via feast (setup.py) -types-setuptools==69.1.0.20240223 +types-setuptools==69.1.0.20240310 # via feast (setup.py) types-tabulate==0.9.0.20240106 # via feast (setup.py) types-urllib3==1.26.25.14 # via types-requests -typing-extensions==4.9.0 +typing-extensions==4.10.0 # via # anyio # async-lru @@ -986,8 +993,10 @@ urllib3==1.26.18 # requests # responses # rockset -uvicorn[standard]==0.27.1 - # via feast (setup.py) +uvicorn[standard]==0.28.0 + # via + # feast (setup.py) + # uvicorn uvloop==0.19.0 # via uvicorn virtualenv==20.23.0 diff --git a/sdk/python/requirements/py3.10-requirements.txt b/sdk/python/requirements/py3.10-requirements.txt index b5dd9a78be..91b42f8271 100644 --- a/sdk/python/requirements/py3.10-requirements.txt +++ b/sdk/python/requirements/py3.10-requirements.txt @@ -44,7 +44,7 @@ dill==0.3.8 # via feast (setup.py) exceptiongroup==1.2.0 # via anyio -fastapi==0.109.2 +fastapi==0.110.0 # via feast (setup.py) fissix==21.11.13 # via bowler @@ -52,18 +52,6 @@ fsspec==2024.2.0 # via dask greenlet==3.0.3 # via sqlalchemy -grpcio==1.62.0 - # via - # feast (setup.py) - # grpcio-health-checking - # grpcio-reflection - # grpcio-tools -grpcio-health-checking==1.62.0 - # via feast (setup.py) -grpcio-reflection==1.62.0 - # via feast (setup.py) -grpcio-tools==1.62.0 - # via feast (setup.py) gunicorn==21.2.0 # via feast (setup.py) h11==0.14.0 @@ -85,7 +73,7 @@ importlib-metadata==6.11.0 # via # dask # feast (setup.py) -importlib-resources==6.1.1 +importlib-resources==6.1.3 # via feast (setup.py) jinja2==3.1.3 # via feast (setup.py) @@ -101,7 +89,7 @@ mmh3==4.1.0 # via feast (setup.py) moreorless==0.4.0 # via bowler -mypy==1.8.0 +mypy==1.9.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -112,27 +100,24 @@ numpy==1.24.4 # feast (setup.py) # pandas # pyarrow -packaging==23.2 +packaging==24.0 # via # dask # gunicorn -pandas==2.2.0 +pandas==2.2.1 # via feast (setup.py) partd==1.4.1 # via dask proto-plus==1.23.0 # via feast (setup.py) -protobuf==4.23.4 +protobuf==4.25.3 # via # feast (setup.py) - # grpcio-health-checking - # grpcio-reflection - # grpcio-tools # mypy-protobuf # proto-plus -pyarrow==15.0.0 +pyarrow==15.0.1 # via feast (setup.py) -pydantic==2.6.2 +pydantic==2.6.3 # via # fastapi # feast (setup.py) @@ -140,7 +125,7 @@ pydantic-core==2.16.3 # via pydantic pygments==2.17.2 # via feast (setup.py) -python-dateutil==2.8.2 +python-dateutil==2.9.0.post0 # via pandas python-dotenv==1.0.1 # via uvicorn @@ -163,11 +148,11 @@ rpds-py==0.18.0 # referencing six==1.16.0 # via python-dateutil -sniffio==1.3.0 +sniffio==1.3.1 # via # anyio # httpx -sqlalchemy[mypy]==1.4.51 +sqlalchemy[mypy]==1.4.52 # via # feast (setup.py) # sqlalchemy @@ -191,9 +176,9 @@ tqdm==4.66.2 # via feast (setup.py) typeguard==4.1.5 # via feast (setup.py) -types-protobuf==4.24.0.20240129 +types-protobuf==4.24.0.20240311 # via mypy-protobuf -typing-extensions==4.9.0 +typing-extensions==4.10.0 # via # anyio # fastapi @@ -203,10 +188,14 @@ typing-extensions==4.9.0 # sqlalchemy2-stubs # typeguard # uvicorn +tzdata==2024.1 + # via pandas urllib3==2.2.1 # via requests -uvicorn[standard]==0.27.1 - # via feast (setup.py) +uvicorn[standard]==0.28.0 + # via + # feast (setup.py) + # uvicorn uvloop==0.19.0 # via uvicorn volatile==2.1.0 @@ -217,6 +206,3 @@ websockets==12.0 # via uvicorn zipp==3.17.0 # via importlib-metadata - -# The following packages are considered to be unsafe in a requirements file: -# setuptools diff --git a/sdk/python/requirements/py3.8-ci-requirements.txt b/sdk/python/requirements/py3.8-ci-requirements.txt index 339a6b1c49..18da48cc5c 100644 --- a/sdk/python/requirements/py3.8-ci-requirements.txt +++ b/sdk/python/requirements/py3.8-ci-requirements.txt @@ -43,13 +43,13 @@ attrs==23.2.0 # referencing avro==1.10.0 # via feast (setup.py) -azure-core==1.30.0 +azure-core==1.30.1 # via # azure-identity # azure-storage-blob azure-identity==1.15.0 # via feast (setup.py) -azure-storage-blob==12.19.0 +azure-storage-blob==12.19.1 # via feast (setup.py) babel==2.14.0 # via @@ -67,18 +67,18 @@ black==22.12.0 # via feast (setup.py) bleach==6.1.0 # via nbconvert -boto3==1.34.49 +boto3==1.34.59 # via # feast (setup.py) # moto -botocore==1.34.49 +botocore==1.34.59 # via # boto3 # moto # s3transfer bowler==0.9.0 # via feast (setup.py) -build==1.0.3 +build==1.1.1 # via # feast (setup.py) # pip-tools @@ -86,7 +86,7 @@ bytewax==0.15.1 # via feast (setup.py) cachecontrol==0.14.0 # via firebase-admin -cachetools==5.3.2 +cachetools==5.3.3 # via google-auth cassandra-driver==3.29.0 # via feast (setup.py) @@ -132,7 +132,7 @@ comm==0.2.1 # ipywidgets coverage[toml]==7.4.3 # via pytest-cov -cryptography==42.0.4 +cryptography==42.0.5 # via # azure-identity # azure-storage-blob @@ -180,7 +180,7 @@ execnet==2.0.2 # via pytest-xdist executing==2.0.1 # via stack-data -fastapi==0.109.2 +fastapi==0.110.0 # via feast (setup.py) fastjsonschema==2.19.1 # via nbformat @@ -216,9 +216,9 @@ google-api-core[grpc]==2.17.1 # google-cloud-datastore # google-cloud-firestore # google-cloud-storage -google-api-python-client==2.119.0 +google-api-python-client==2.121.0 # via firebase-admin -google-auth==2.28.1 +google-auth==2.28.2 # via # google-api-core # google-api-python-client @@ -245,7 +245,7 @@ google-cloud-datastore==2.19.0 # via feast (setup.py) google-cloud-firestore==2.15.0 # via firebase-admin -google-cloud-storage==2.14.0 +google-cloud-storage==2.15.0 # via # feast (setup.py) # firebase-admin @@ -263,13 +263,13 @@ googleapis-common-protos[grpc]==1.62.0 # google-api-core # grpc-google-iam-v1 # grpcio-status -great-expectations==0.18.9 +great-expectations==0.18.10 # via feast (setup.py) greenlet==3.0.3 # via sqlalchemy grpc-google-iam-v1==0.13.0 # via google-cloud-bigtable -grpcio==1.62.0 +grpcio==1.62.1 # via # feast (setup.py) # google-api-core @@ -281,15 +281,15 @@ grpcio==1.62.0 # grpcio-status # grpcio-testing # grpcio-tools -grpcio-health-checking==1.62.0 +grpcio-health-checking==1.62.1 # via feast (setup.py) -grpcio-reflection==1.62.0 +grpcio-reflection==1.62.1 # via feast (setup.py) -grpcio-status==1.62.0 +grpcio-status==1.62.1 # via google-api-core -grpcio-testing==1.62.0 +grpcio-testing==1.62.1 # via feast (setup.py) -grpcio-tools==1.62.0 +grpcio-tools==1.62.1 # via feast (setup.py) gunicorn==21.2.0 # via feast (setup.py) @@ -344,7 +344,7 @@ importlib-metadata==6.11.0 # nbconvert # sphinx # typeguard -importlib-resources==6.1.1 +importlib-resources==6.1.3 # via # feast (setup.py) # jsonschema @@ -352,7 +352,7 @@ importlib-resources==6.1.1 # jupyterlab iniconfig==2.0.0 # via pytest -ipykernel==6.29.2 +ipykernel==6.29.3 # via jupyterlab ipython==8.12.3 # via @@ -384,7 +384,7 @@ jmespath==1.0.1 # via # boto3 # botocore -json5==0.9.17 +json5==0.9.22 # via jupyterlab-server jsonpatch==1.33 # via great-expectations @@ -418,9 +418,9 @@ jupyter-core==5.7.1 # nbformat jupyter-events==0.9.0 # via jupyter-server -jupyter-lsp==2.2.2 +jupyter-lsp==2.2.4 # via jupyterlab -jupyter-server==2.12.5 +jupyter-server==2.13.0 # via # jupyter-lsp # jupyterlab @@ -429,7 +429,7 @@ jupyter-server==2.12.5 # notebook-shim jupyter-server-terminals==0.5.2 # via jupyter-server -jupyterlab==4.1.2 +jupyterlab==4.1.4 # via notebook jupyterlab-pygments==0.3.0 # via nbconvert @@ -452,7 +452,7 @@ markupsafe==2.1.5 # jinja2 # nbconvert # werkzeug -marshmallow==3.20.2 +marshmallow==3.21.1 # via great-expectations matplotlib-inline==0.1.6 # via @@ -482,13 +482,13 @@ msal==1.27.0 # msal-extensions msal-extensions==1.1.0 # via azure-identity -msgpack==1.0.7 +msgpack==1.0.8 # via cachecontrol multipledispatch==0.6.0 # via ibis-framework multiprocess==0.70.16 # via bytewax -mypy==1.8.0 +mypy==1.9.0 # via # feast (setup.py) # sqlalchemy @@ -500,7 +500,7 @@ mypy-protobuf==3.1.0 # via feast (setup.py) nbclient==0.9.0 # via nbconvert -nbconvert==7.16.1 +nbconvert==7.16.2 # via jupyter-server nbformat==5.9.2 # via @@ -512,7 +512,7 @@ nest-asyncio==1.6.0 # via ipykernel nodeenv==1.8.0 # via pre-commit -notebook==7.1.0 +notebook==7.1.1 # via great-expectations notebook-shim==0.2.4 # via @@ -532,7 +532,7 @@ oauthlib==3.2.2 # via requests-oauthlib overrides==7.7.0 # via jupyter-server -packaging==23.2 +packaging==24.0 # via # build # dask @@ -560,6 +560,7 @@ pandas==1.5.3 ; python_version < "3.9" # feast (setup.py) # google-cloud-bigquery # great-expectations + # ibis-framework # snowflake-connector-python pandocfilters==1.5.1 # via nbconvert @@ -577,7 +578,7 @@ pexpect==4.9.0 # via ipython pickleshare==0.7.5 # via ipython -pip-tools==7.4.0 +pip-tools==7.4.1 # via feast (setup.py) pkgutil-resolve-name==1.3.10 # via jsonschema @@ -607,7 +608,7 @@ proto-plus==1.23.0 # google-cloud-bigtable # google-cloud-datastore # google-cloud-firestore -protobuf==4.23.4 +protobuf==4.25.3 # via # feast (setup.py) # google-api-core @@ -644,7 +645,7 @@ py-cpuinfo==9.0.0 # via pytest-benchmark py4j==0.10.9.7 # via pyspark -pyarrow==15.0.0 +pyarrow==15.0.1 # via # db-dtypes # feast (setup.py) @@ -662,7 +663,7 @@ pycodestyle==2.10.0 # via flake8 pycparser==2.21 # via cffi -pydantic==2.6.2 +pydantic==2.6.3 # via # fastapi # feast (setup.py) @@ -688,9 +689,9 @@ pymysql==1.1.0 # via feast (setup.py) pyodbc==5.1.0 # via feast (setup.py) -pyopenssl==24.0.0 +pyopenssl==24.1.0 # via snowflake-connector-python -pyparsing==3.1.1 +pyparsing==3.1.2 # via # great-expectations # httplib2 @@ -698,7 +699,7 @@ pyproject-hooks==1.0.0 # via # build # pip-tools -pyspark==3.5.0 +pyspark==3.5.1 # via feast (setup.py) pytest==7.4.4 # via @@ -724,7 +725,7 @@ pytest-timeout==1.4.2 # via feast (setup.py) pytest-xdist==3.5.0 # via feast (setup.py) -python-dateutil==2.8.2 +python-dateutil==2.9.0.post0 # via # arrow # botocore @@ -792,7 +793,7 @@ requests==2.31.0 # snowflake-connector-python # sphinx # trino -requests-oauthlib==1.3.1 +requests-oauthlib==1.4.0 # via kubernetes responses==0.25.0 # via moto @@ -804,7 +805,7 @@ rfc3986-validator==0.1.1 # via # jsonschema # jupyter-events -rich==13.7.0 +rich==13.7.1 # via ibis-framework rockset==2.1.1 # via feast (setup.py) @@ -835,11 +836,10 @@ six==1.16.0 # kubernetes # mock # multipledispatch - # pandavro # python-dateutil # rfc3339-validator # thriftpy2 -sniffio==1.3.0 +sniffio==1.3.1 # via # anyio # httpx @@ -865,7 +865,7 @@ sphinxcontrib-qthelp==1.0.3 # via sphinx sphinxcontrib-serializinghtml==1.1.5 # via sphinx -sqlalchemy[mypy]==1.4.51 +sqlalchemy[mypy]==1.4.52 # via # feast (setup.py) # sqlalchemy @@ -887,7 +887,7 @@ terminado==0.18.0 # jupyter-server-terminals testcontainers==3.7.1 # via feast (setup.py) -thriftpy2==0.4.17 +thriftpy2==0.4.20 # via happybase tinycss2==1.2.1 # via nbconvert @@ -903,7 +903,7 @@ tomli==2.0.1 # pip-tools # pyproject-hooks # pytest -tomlkit==0.12.3 +tomlkit==0.12.4 # via snowflake-connector-python toolz==0.12.1 # via @@ -948,27 +948,27 @@ types-protobuf==3.19.22 # mypy-protobuf types-pymysql==1.1.0.1 # via feast (setup.py) -types-pyopenssl==24.0.0.20240130 +types-pyopenssl==24.0.0.20240311 # via types-redis -types-python-dateutil==2.8.19.20240106 +types-python-dateutil==2.8.19.20240311 # via # arrow # feast (setup.py) types-pytz==2024.1.0.20240203 # via feast (setup.py) -types-pyyaml==6.0.12.12 +types-pyyaml==6.0.12.20240311 # via feast (setup.py) -types-redis==4.6.0.20240218 +types-redis==4.6.0.20240311 # via feast (setup.py) types-requests==2.30.0.0 # via feast (setup.py) -types-setuptools==69.1.0.20240223 +types-setuptools==69.1.0.20240310 # via feast (setup.py) types-tabulate==0.9.0.20240106 # via feast (setup.py) types-urllib3==1.26.25.14 # via types-requests -typing-extensions==4.9.0 +typing-extensions==4.10.0 # via # annotated-types # anyio @@ -1009,7 +1009,7 @@ urllib3==1.26.18 # responses # rockset # snowflake-connector-python -uvicorn[standard]==0.27.1 +uvicorn[standard]==0.28.0 # via feast (setup.py) uvloop==0.19.0 # via uvicorn diff --git a/sdk/python/requirements/py3.8-requirements.txt b/sdk/python/requirements/py3.8-requirements.txt index d00a77ee6f..dceca2c94c 100644 --- a/sdk/python/requirements/py3.8-requirements.txt +++ b/sdk/python/requirements/py3.8-requirements.txt @@ -44,7 +44,7 @@ dill==0.3.8 # via feast (setup.py) exceptiongroup==1.2.0 # via anyio -fastapi==0.109.2 +fastapi==0.110.0 # via feast (setup.py) fissix==21.11.13 # via bowler @@ -52,18 +52,6 @@ fsspec==2024.2.0 # via dask greenlet==3.0.3 # via sqlalchemy -grpcio==1.62.0 - # via - # feast (setup.py) - # grpcio-health-checking - # grpcio-reflection - # grpcio-tools -grpcio-health-checking==1.62.0 - # via feast (setup.py) -grpcio-reflection==1.62.0 - # via feast (setup.py) -grpcio-tools==1.62.0 - # via feast (setup.py) gunicorn==21.2.0 # via feast (setup.py) h11==0.14.0 @@ -86,7 +74,7 @@ importlib-metadata==6.11.0 # dask # feast (setup.py) # typeguard -importlib-resources==6.1.1 +importlib-resources==6.1.3 # via # feast (setup.py) # jsonschema @@ -105,7 +93,7 @@ mmh3==4.1.0 # via feast (setup.py) moreorless==0.4.0 # via bowler -mypy==1.8.0 +mypy==1.9.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -116,7 +104,7 @@ numpy==1.24.4 # feast (setup.py) # pandas # pyarrow -packaging==23.2 +packaging==24.0 # via # dask # gunicorn @@ -128,17 +116,14 @@ pkgutil-resolve-name==1.3.10 # via jsonschema proto-plus==1.23.0 # via feast (setup.py) -protobuf==4.23.4 +protobuf==4.25.3 # via # feast (setup.py) - # grpcio-health-checking - # grpcio-reflection - # grpcio-tools # mypy-protobuf # proto-plus -pyarrow==15.0.0 +pyarrow==15.0.1 # via feast (setup.py) -pydantic==2.6.2 +pydantic==2.6.3 # via # fastapi # feast (setup.py) @@ -146,7 +131,7 @@ pydantic-core==2.16.3 # via pydantic pygments==2.17.2 # via feast (setup.py) -python-dateutil==2.8.2 +python-dateutil==2.9.0.post0 # via pandas python-dotenv==1.0.1 # via uvicorn @@ -169,11 +154,11 @@ rpds-py==0.18.0 # referencing six==1.16.0 # via python-dateutil -sniffio==1.3.0 +sniffio==1.3.1 # via # anyio # httpx -sqlalchemy[mypy]==1.4.51 +sqlalchemy[mypy]==1.4.52 # via # feast (setup.py) # sqlalchemy @@ -197,9 +182,9 @@ tqdm==4.66.2 # via feast (setup.py) typeguard==4.1.5 # via feast (setup.py) -types-protobuf==4.24.0.20240129 +types-protobuf==4.24.0.20240311 # via mypy-protobuf -typing-extensions==4.9.0 +typing-extensions==4.10.0 # via # annotated-types # anyio @@ -211,9 +196,11 @@ typing-extensions==4.9.0 # starlette # typeguard # uvicorn +tzdata==2024.1 + # via pandas urllib3==2.2.1 # via requests -uvicorn[standard]==0.27.1 +uvicorn[standard]==0.28.0 # via feast (setup.py) uvloop==0.19.0 # via uvicorn @@ -227,6 +214,3 @@ zipp==3.17.0 # via # importlib-metadata # importlib-resources - -# The following packages are considered to be unsafe in a requirements file: -# setuptools diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index 99f7ee0285..7f1082f55b 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -43,13 +43,13 @@ attrs==23.2.0 # referencing avro==1.10.0 # via feast (setup.py) -azure-core==1.30.0 +azure-core==1.30.1 # via # azure-identity # azure-storage-blob azure-identity==1.15.0 # via feast (setup.py) -azure-storage-blob==12.19.0 +azure-storage-blob==12.19.1 # via feast (setup.py) babel==2.14.0 # via @@ -63,18 +63,18 @@ black==22.12.0 # via feast (setup.py) bleach==6.1.0 # via nbconvert -boto3==1.34.49 +boto3==1.34.59 # via # feast (setup.py) # moto -botocore==1.34.49 +botocore==1.34.59 # via # boto3 # moto # s3transfer bowler==0.9.0 # via feast (setup.py) -build==1.0.3 +build==1.1.1 # via # feast (setup.py) # pip-tools @@ -82,7 +82,7 @@ bytewax==0.15.1 # via feast (setup.py) cachecontrol==0.14.0 # via firebase-admin -cachetools==5.3.2 +cachetools==5.3.3 # via google-auth cassandra-driver==3.29.0 # via feast (setup.py) @@ -127,8 +127,10 @@ comm==0.2.1 # ipykernel # ipywidgets coverage[toml]==7.4.3 - # via pytest-cov -cryptography==42.0.4 + # via + # coverage + # pytest-cov +cryptography==42.0.5 # via # azure-identity # azure-storage-blob @@ -177,7 +179,7 @@ execnet==2.0.2 # via pytest-xdist executing==2.0.1 # via stack-data -fastapi==0.109.2 +fastapi==0.110.0 # via feast (setup.py) fastjsonschema==2.19.1 # via nbformat @@ -213,9 +215,9 @@ google-api-core[grpc]==2.17.1 # google-cloud-datastore # google-cloud-firestore # google-cloud-storage -google-api-python-client==2.119.0 +google-api-python-client==2.121.0 # via firebase-admin -google-auth==2.28.1 +google-auth==2.28.2 # via # google-api-core # google-api-python-client @@ -226,7 +228,9 @@ google-auth==2.28.1 google-auth-httplib2==0.2.0 # via google-api-python-client google-cloud-bigquery[pandas]==3.12.0 - # via feast (setup.py) + # via + # feast (setup.py) + # google-cloud-bigquery google-cloud-bigquery-storage==2.24.0 # via feast (setup.py) google-cloud-bigtable==2.23.0 @@ -242,7 +246,7 @@ google-cloud-datastore==2.19.0 # via feast (setup.py) google-cloud-firestore==2.15.0 # via firebase-admin -google-cloud-storage==2.14.0 +google-cloud-storage==2.15.0 # via # feast (setup.py) # firebase-admin @@ -260,13 +264,13 @@ googleapis-common-protos[grpc]==1.62.0 # google-api-core # grpc-google-iam-v1 # grpcio-status -great-expectations==0.18.9 +great-expectations==0.18.10 # via feast (setup.py) greenlet==3.0.3 # via sqlalchemy grpc-google-iam-v1==0.13.0 # via google-cloud-bigtable -grpcio==1.62.0 +grpcio==1.62.1 # via # feast (setup.py) # google-api-core @@ -278,15 +282,15 @@ grpcio==1.62.0 # grpcio-status # grpcio-testing # grpcio-tools -grpcio-health-checking==1.62.0 +grpcio-health-checking==1.62.1 # via feast (setup.py) -grpcio-reflection==1.62.0 +grpcio-reflection==1.62.1 # via feast (setup.py) -grpcio-status==1.62.0 +grpcio-status==1.62.1 # via google-api-core -grpcio-testing==1.62.0 +grpcio-testing==1.62.1 # via feast (setup.py) -grpcio-tools==1.62.0 +grpcio-tools==1.62.1 # via feast (setup.py) gunicorn==21.2.0 # via feast (setup.py) @@ -341,11 +345,11 @@ importlib-metadata==6.11.0 # nbconvert # sphinx # typeguard -importlib-resources==6.1.1 +importlib-resources==6.1.3 # via feast (setup.py) iniconfig==2.0.0 # via pytest -ipykernel==6.29.2 +ipykernel==6.29.3 # via jupyterlab ipython==8.18.1 # via @@ -377,7 +381,7 @@ jmespath==1.0.1 # via # boto3 # botocore -json5==0.9.17 +json5==0.9.22 # via jupyterlab-server jsonpatch==1.33 # via great-expectations @@ -411,9 +415,9 @@ jupyter-core==5.7.1 # nbformat jupyter-events==0.9.0 # via jupyter-server -jupyter-lsp==2.2.2 +jupyter-lsp==2.2.4 # via jupyterlab -jupyter-server==2.12.5 +jupyter-server==2.13.0 # via # jupyter-lsp # jupyterlab @@ -422,7 +426,7 @@ jupyter-server==2.12.5 # notebook-shim jupyter-server-terminals==0.5.2 # via jupyter-server -jupyterlab==4.1.2 +jupyterlab==4.1.4 # via notebook jupyterlab-pygments==0.3.0 # via nbconvert @@ -445,7 +449,7 @@ markupsafe==2.1.5 # jinja2 # nbconvert # werkzeug -marshmallow==3.20.2 +marshmallow==3.21.1 # via great-expectations matplotlib-inline==0.1.6 # via @@ -475,13 +479,13 @@ msal==1.27.0 # msal-extensions msal-extensions==1.1.0 # via azure-identity -msgpack==1.0.7 +msgpack==1.0.8 # via cachecontrol multipledispatch==1.0.0 # via ibis-framework multiprocess==0.70.16 # via bytewax -mypy==1.8.0 +mypy==1.9.0 # via # feast (setup.py) # sqlalchemy @@ -493,7 +497,7 @@ mypy-protobuf==3.1.0 # via feast (setup.py) nbclient==0.9.0 # via nbconvert -nbconvert==7.16.1 +nbconvert==7.16.2 # via jupyter-server nbformat==5.9.2 # via @@ -505,7 +509,7 @@ nest-asyncio==1.6.0 # via ipykernel nodeenv==1.8.0 # via pre-commit -notebook==7.1.0 +notebook==7.1.1 # via great-expectations notebook-shim==0.2.4 # via @@ -525,7 +529,7 @@ oauthlib==3.2.2 # via requests-oauthlib overrides==7.7.0 # via jupyter-server -packaging==23.2 +packaging==24.0 # via # build # dask @@ -546,13 +550,14 @@ packaging==23.2 # pytest # snowflake-connector-python # sphinx -pandas==2.2.0 +pandas==2.2.1 # via # altair # db-dtypes # feast (setup.py) # google-cloud-bigquery # great-expectations + # ibis-framework # snowflake-connector-python pandocfilters==1.5.1 # via nbconvert @@ -568,7 +573,7 @@ pbr==6.0.0 # via mock pexpect==4.9.0 # via ipython -pip-tools==7.4.0 +pip-tools==7.4.1 # via feast (setup.py) platformdirs==3.11.0 # via @@ -596,7 +601,7 @@ proto-plus==1.23.0 # google-cloud-bigtable # google-cloud-datastore # google-cloud-firestore -protobuf==4.23.4 +protobuf==4.25.3 # via # feast (setup.py) # google-api-core @@ -633,7 +638,7 @@ py-cpuinfo==9.0.0 # via pytest-benchmark py4j==0.10.9.7 # via pyspark -pyarrow==15.0.0 +pyarrow==15.0.1 # via # db-dtypes # feast (setup.py) @@ -654,7 +659,7 @@ pycodestyle==2.10.0 # via flake8 pycparser==2.21 # via cffi -pydantic==2.6.2 +pydantic==2.6.3 # via # fastapi # feast (setup.py) @@ -680,9 +685,9 @@ pymysql==1.1.0 # via feast (setup.py) pyodbc==5.1.0 # via feast (setup.py) -pyopenssl==24.0.0 +pyopenssl==24.1.0 # via snowflake-connector-python -pyparsing==3.1.1 +pyparsing==3.1.2 # via # great-expectations # httplib2 @@ -690,7 +695,7 @@ pyproject-hooks==1.0.0 # via # build # pip-tools -pyspark==3.5.0 +pyspark==3.5.1 # via feast (setup.py) pytest==7.4.4 # via @@ -716,7 +721,7 @@ pytest-timeout==1.4.2 # via feast (setup.py) pytest-xdist==3.5.0 # via feast (setup.py) -python-dateutil==2.8.2 +python-dateutil==2.9.0.post0 # via # arrow # botocore @@ -783,7 +788,7 @@ requests==2.31.0 # snowflake-connector-python # sphinx # trino -requests-oauthlib==1.3.1 +requests-oauthlib==1.4.0 # via kubernetes responses==0.25.0 # via moto @@ -795,7 +800,7 @@ rfc3986-validator==0.1.1 # via # jsonschema # jupyter-events -rich==13.7.0 +rich==13.7.1 # via ibis-framework rockset==2.1.1 # via feast (setup.py) @@ -828,14 +833,16 @@ six==1.16.0 # python-dateutil # rfc3339-validator # thriftpy2 -sniffio==1.3.0 +sniffio==1.3.1 # via # anyio # httpx snowballstemmer==2.2.0 # via sphinx snowflake-connector-python[pandas]==3.7.1 - # via feast (setup.py) + # via + # feast (setup.py) + # snowflake-connector-python sortedcontainers==2.4.0 # via snowflake-connector-python soupsieve==2.5 @@ -854,7 +861,7 @@ sphinxcontrib-qthelp==1.0.7 # via sphinx sphinxcontrib-serializinghtml==1.1.10 # via sphinx -sqlalchemy[mypy]==1.4.51 +sqlalchemy[mypy]==1.4.52 # via # feast (setup.py) # sqlalchemy @@ -866,7 +873,7 @@ stack-data==0.6.3 # via ipython starlette==0.36.3 # via fastapi -substrait==0.12.1 +substrait==0.14.0 # via ibis-substrait tabulate==0.9.0 # via feast (setup.py) @@ -878,7 +885,7 @@ terminado==0.18.0 # jupyter-server-terminals testcontainers==3.7.1 # via feast (setup.py) -thriftpy2==0.4.17 +thriftpy2==0.4.20 # via happybase tinycss2==1.2.1 # via nbconvert @@ -894,7 +901,7 @@ tomli==2.0.1 # pip-tools # pyproject-hooks # pytest -tomlkit==0.12.3 +tomlkit==0.12.4 # via snowflake-connector-python toolz==0.12.1 # via @@ -939,27 +946,27 @@ types-protobuf==3.19.22 # mypy-protobuf types-pymysql==1.1.0.1 # via feast (setup.py) -types-pyopenssl==24.0.0.20240130 +types-pyopenssl==24.0.0.20240311 # via types-redis -types-python-dateutil==2.8.19.20240106 +types-python-dateutil==2.8.19.20240311 # via # arrow # feast (setup.py) types-pytz==2024.1.0.20240203 # via feast (setup.py) -types-pyyaml==6.0.12.12 +types-pyyaml==6.0.12.20240311 # via feast (setup.py) -types-redis==4.6.0.20240218 +types-redis==4.6.0.20240311 # via feast (setup.py) types-requests==2.30.0.0 # via feast (setup.py) -types-setuptools==69.1.0.20240223 +types-setuptools==69.1.0.20240310 # via feast (setup.py) types-tabulate==0.9.0.20240106 # via feast (setup.py) types-urllib3==1.26.25.14 # via types-requests -typing-extensions==4.9.0 +typing-extensions==4.10.0 # via # anyio # async-lru @@ -1000,8 +1007,10 @@ urllib3==1.26.18 # responses # rockset # snowflake-connector-python -uvicorn[standard]==0.27.1 - # via feast (setup.py) +uvicorn[standard]==0.28.0 + # via + # feast (setup.py) + # uvicorn uvloop==0.19.0 # via uvicorn virtualenv==20.23.0 diff --git a/sdk/python/requirements/py3.9-requirements.txt b/sdk/python/requirements/py3.9-requirements.txt index 4364dc62bf..090d013494 100644 --- a/sdk/python/requirements/py3.9-requirements.txt +++ b/sdk/python/requirements/py3.9-requirements.txt @@ -44,7 +44,7 @@ dill==0.3.8 # via feast (setup.py) exceptiongroup==1.2.0 # via anyio -fastapi==0.109.2 +fastapi==0.110.0 # via feast (setup.py) fissix==21.11.13 # via bowler @@ -52,18 +52,6 @@ fsspec==2024.2.0 # via dask greenlet==3.0.3 # via sqlalchemy -grpcio==1.62.0 - # via - # feast (setup.py) - # grpcio-health-checking - # grpcio-reflection - # grpcio-tools -grpcio-health-checking==1.62.0 - # via feast (setup.py) -grpcio-reflection==1.62.0 - # via feast (setup.py) -grpcio-tools==1.62.0 - # via feast (setup.py) gunicorn==21.2.0 # via feast (setup.py) h11==0.14.0 @@ -86,7 +74,7 @@ importlib-metadata==6.11.0 # dask # feast (setup.py) # typeguard -importlib-resources==6.1.1 +importlib-resources==6.1.3 # via feast (setup.py) jinja2==3.1.3 # via feast (setup.py) @@ -102,7 +90,7 @@ mmh3==4.1.0 # via feast (setup.py) moreorless==0.4.0 # via bowler -mypy==1.8.0 +mypy==1.9.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -113,27 +101,24 @@ numpy==1.24.4 # feast (setup.py) # pandas # pyarrow -packaging==23.2 +packaging==24.0 # via # dask # gunicorn -pandas==2.2.0 +pandas==2.2.1 # via feast (setup.py) partd==1.4.1 # via dask proto-plus==1.23.0 # via feast (setup.py) -protobuf==4.23.4 +protobuf==4.25.3 # via # feast (setup.py) - # grpcio-health-checking - # grpcio-reflection - # grpcio-tools # mypy-protobuf # proto-plus -pyarrow==15.0.0 +pyarrow==15.0.1 # via feast (setup.py) -pydantic==2.6.2 +pydantic==2.6.3 # via # fastapi # feast (setup.py) @@ -141,7 +126,7 @@ pydantic-core==2.16.3 # via pydantic pygments==2.17.2 # via feast (setup.py) -python-dateutil==2.8.2 +python-dateutil==2.9.0.post0 # via pandas python-dotenv==1.0.1 # via uvicorn @@ -164,11 +149,11 @@ rpds-py==0.18.0 # referencing six==1.16.0 # via python-dateutil -sniffio==1.3.0 +sniffio==1.3.1 # via # anyio # httpx -sqlalchemy[mypy]==1.4.51 +sqlalchemy[mypy]==1.4.52 # via # feast (setup.py) # sqlalchemy @@ -192,9 +177,9 @@ tqdm==4.66.2 # via feast (setup.py) typeguard==4.1.5 # via feast (setup.py) -types-protobuf==4.24.0.20240129 +types-protobuf==4.24.0.20240311 # via mypy-protobuf -typing-extensions==4.9.0 +typing-extensions==4.10.0 # via # anyio # fastapi @@ -205,10 +190,14 @@ typing-extensions==4.9.0 # starlette # typeguard # uvicorn +tzdata==2024.1 + # via pandas urllib3==2.2.1 # via requests -uvicorn[standard]==0.27.1 - # via feast (setup.py) +uvicorn[standard]==0.28.0 + # via + # feast (setup.py) + # uvicorn uvloop==0.19.0 # via uvicorn volatile==2.1.0 @@ -221,6 +210,3 @@ zipp==3.17.0 # via # importlib-metadata # importlib-resources - -# The following packages are considered to be unsafe in a requirements file: -# setuptools diff --git a/setup.py b/setup.py index 0459cbd6eb..4da59c4e21 100644 --- a/setup.py +++ b/setup.py @@ -51,7 +51,7 @@ "numpy>=1.22,<1.25", "pandas>=1.4.3,<3", # Higher than 4.23.4 seems to cause a seg fault - "protobuf<4.23.4,>3.20", + "protobuf>=4.24.0,<5.0.0", "proto-plus>=1.20.0,<2", "pyarrow>=4", "pydantic>=2.0.0", From 34cabfb29a2692180dc6b6dda8bba9062beca4d2 Mon Sep 17 00:00:00 2001 From: locnt241 <73770977+ElliotNguyen68@users.noreply.github.com> Date: Wed, 13 Mar 2024 02:31:37 +0700 Subject: [PATCH 072/122] fix: Raise error when not able read of file source spark source (#4005) fix: raise error when not able to read path sparksource Signed-off-by: tanlocnguyen Co-authored-by: tanlocnguyen --- .../offline_stores/contrib/spark_offline_store/spark_source.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark_source.py b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark_source.py index 1ff7e6de58..8cd392ce5d 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark_source.py @@ -8,7 +8,7 @@ from feast import flags_helper from feast.data_source import DataSource -from feast.errors import DataSourceNoNameException +from feast.errors import DataSourceNoNameException, DataSourceNotFoundException from feast.infra.offline_stores.offline_utils import get_temp_entity_table_name from feast.protos.feast.core.DataSource_pb2 import DataSource as DataSourceProto from feast.protos.feast.core.SavedDataset_pb2 import ( @@ -179,6 +179,7 @@ def get_table_query_string(self) -> str: logger.exception( "Spark read of file source failed.\n" + traceback.format_exc() ) + raise DataSourceNotFoundException(self.path) tmp_table_name = get_temp_entity_table_name() df.createOrReplaceTempView(tmp_table_name) From e73dae3cc8b0436c28daa9f4e7b7f56daf9cd21d Mon Sep 17 00:00:00 2001 From: cburroughs Date: Sat, 16 Mar 2024 06:48:40 -0400 Subject: [PATCH 073/122] chore: Loosen mypy-protobuf version constraint (#4009) chore: loosen mypy-protobuf version constraint Having this pinned to an exact version increases the likelihood of conflict with user code or other libraries. Changelog: https://github.com/nipunn1313/mypy-protobuf/blob/main/CHANGELOG.md Signed-off-by: Chris Burroughs --- .../requirements/py3.10-ci-requirements.txt | 54 ++++++++----------- .../requirements/py3.10-requirements.txt | 12 ++--- .../requirements/py3.8-ci-requirements.txt | 38 +++++++------ .../requirements/py3.8-requirements.txt | 10 ++-- .../requirements/py3.9-ci-requirements.txt | 54 ++++++++----------- .../requirements/py3.9-requirements.txt | 12 ++--- setup.py | 9 ++-- 7 files changed, 79 insertions(+), 110 deletions(-) diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index 54dc41fe5b..638d9ae19d 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -63,11 +63,11 @@ black==22.12.0 # via feast (setup.py) bleach==6.1.0 # via nbconvert -boto3==1.34.59 +boto3==1.34.60 # via # feast (setup.py) # moto -botocore==1.34.59 +botocore==1.34.60 # via # boto3 # moto @@ -122,14 +122,12 @@ colorama==0.4.6 # via # feast (setup.py) # great-expectations -comm==0.2.1 +comm==0.2.2 # via # ipykernel # ipywidgets coverage[toml]==7.4.3 - # via - # coverage - # pytest-cov + # via pytest-cov cryptography==42.0.5 # via # azure-identity @@ -215,7 +213,7 @@ google-api-core[grpc]==2.17.1 # google-cloud-datastore # google-cloud-firestore # google-cloud-storage -google-api-python-client==2.121.0 +google-api-python-client==2.122.0 # via firebase-admin google-auth==2.28.2 # via @@ -228,9 +226,7 @@ google-auth==2.28.2 google-auth-httplib2==0.2.0 # via google-api-python-client google-cloud-bigquery[pandas]==3.12.0 - # via - # feast (setup.py) - # google-cloud-bigquery + # via feast (setup.py) google-cloud-bigquery-storage==2.24.0 # via feast (setup.py) google-cloud-bigtable==2.23.0 @@ -258,7 +254,7 @@ google-resumable-media==2.7.0 # via # google-cloud-bigquery # google-cloud-storage -googleapis-common-protos[grpc]==1.62.0 +googleapis-common-protos[grpc]==1.63.0 # via # feast (setup.py) # google-api-core @@ -391,12 +387,12 @@ jsonschema[format-nongpl]==4.21.1 # nbformat jsonschema-specifications==2023.12.1 # via jsonschema -jupyter-client==8.6.0 +jupyter-client==8.6.1 # via # ipykernel # jupyter-server # nbclient -jupyter-core==5.7.1 +jupyter-core==5.7.2 # via # ipykernel # jupyter-client @@ -405,7 +401,7 @@ jupyter-core==5.7.1 # nbclient # nbconvert # nbformat -jupyter-events==0.9.0 +jupyter-events==0.9.1 # via jupyter-server jupyter-lsp==2.2.4 # via jupyterlab @@ -416,13 +412,13 @@ jupyter-server==2.13.0 # jupyterlab-server # notebook # notebook-shim -jupyter-server-terminals==0.5.2 +jupyter-server-terminals==0.5.3 # via jupyter-server jupyterlab==4.1.4 # via notebook jupyterlab-pygments==0.3.0 # via nbconvert -jupyterlab-server==2.25.3 +jupyterlab-server==2.25.4 # via # jupyterlab # notebook @@ -485,13 +481,13 @@ mypy-extensions==1.0.0 # via # black # mypy -mypy-protobuf==3.1.0 +mypy-protobuf==3.3.0 # via feast (setup.py) -nbclient==0.9.0 +nbclient==0.9.1 # via nbconvert nbconvert==7.16.2 # via jupyter-server -nbformat==5.9.2 +nbformat==5.10.2 # via # great-expectations # jupyter-server @@ -651,7 +647,7 @@ pycodestyle==2.10.0 # via flake8 pycparser==2.21 # via cffi -pydantic==2.6.3 +pydantic==2.6.4 # via # fastapi # feast (setup.py) @@ -830,9 +826,7 @@ sniffio==1.3.1 snowballstemmer==2.2.0 # via sphinx snowflake-connector-python[pandas]==3.7.1 - # via - # feast (setup.py) - # snowflake-connector-python + # via feast (setup.py) sortedcontainers==2.4.0 # via snowflake-connector-python soupsieve==2.5 @@ -852,9 +846,7 @@ sphinxcontrib-qthelp==1.0.7 sphinxcontrib-serializinghtml==1.1.10 # via sphinx sqlalchemy[mypy]==1.4.52 - # via - # feast (setup.py) - # sqlalchemy + # via feast (setup.py) sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy sqlglot==20.11.0 @@ -869,7 +861,7 @@ tabulate==0.9.0 # via feast (setup.py) tenacity==8.2.3 # via feast (setup.py) -terminado==0.18.0 +terminado==0.18.1 # via # jupyter-server # jupyter-server-terminals @@ -911,7 +903,7 @@ tqdm==4.66.2 # via # feast (setup.py) # great-expectations -traitlets==5.14.1 +traitlets==5.14.2 # via # comm # ipykernel @@ -994,9 +986,7 @@ urllib3==1.26.18 # responses # rockset uvicorn[standard]==0.28.0 - # via - # feast (setup.py) - # uvicorn + # via feast (setup.py) uvloop==0.19.0 # via uvicorn virtualenv==20.23.0 @@ -1023,7 +1013,7 @@ websockets==12.0 # via uvicorn werkzeug==3.0.1 # via moto -wheel==0.42.0 +wheel==0.43.0 # via pip-tools widgetsnbextension==4.0.10 # via ipywidgets diff --git a/sdk/python/requirements/py3.10-requirements.txt b/sdk/python/requirements/py3.10-requirements.txt index 91b42f8271..bbce1ecb7f 100644 --- a/sdk/python/requirements/py3.10-requirements.txt +++ b/sdk/python/requirements/py3.10-requirements.txt @@ -93,7 +93,7 @@ mypy==1.9.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy -mypy-protobuf==3.1.0 +mypy-protobuf==3.5.0 # via feast (setup.py) numpy==1.24.4 # via @@ -117,7 +117,7 @@ protobuf==4.25.3 # proto-plus pyarrow==15.0.1 # via feast (setup.py) -pydantic==2.6.3 +pydantic==2.6.4 # via # fastapi # feast (setup.py) @@ -153,9 +153,7 @@ sniffio==1.3.1 # anyio # httpx sqlalchemy[mypy]==1.4.52 - # via - # feast (setup.py) - # sqlalchemy + # via feast (setup.py) sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy starlette==0.36.3 @@ -193,9 +191,7 @@ tzdata==2024.1 urllib3==2.2.1 # via requests uvicorn[standard]==0.28.0 - # via - # feast (setup.py) - # uvicorn + # via feast (setup.py) uvloop==0.19.0 # via uvicorn volatile==2.1.0 diff --git a/sdk/python/requirements/py3.8-ci-requirements.txt b/sdk/python/requirements/py3.8-ci-requirements.txt index 18da48cc5c..c76294b6c2 100644 --- a/sdk/python/requirements/py3.8-ci-requirements.txt +++ b/sdk/python/requirements/py3.8-ci-requirements.txt @@ -67,11 +67,11 @@ black==22.12.0 # via feast (setup.py) bleach==6.1.0 # via nbconvert -boto3==1.34.59 +boto3==1.34.60 # via # feast (setup.py) # moto -botocore==1.34.59 +botocore==1.34.60 # via # boto3 # moto @@ -126,7 +126,7 @@ colorama==0.4.6 # via # feast (setup.py) # great-expectations -comm==0.2.1 +comm==0.2.2 # via # ipykernel # ipywidgets @@ -216,7 +216,7 @@ google-api-core[grpc]==2.17.1 # google-cloud-datastore # google-cloud-firestore # google-cloud-storage -google-api-python-client==2.121.0 +google-api-python-client==2.122.0 # via firebase-admin google-auth==2.28.2 # via @@ -257,7 +257,7 @@ google-resumable-media==2.7.0 # via # google-cloud-bigquery # google-cloud-storage -googleapis-common-protos[grpc]==1.62.0 +googleapis-common-protos[grpc]==1.63.0 # via # feast (setup.py) # google-api-core @@ -402,12 +402,12 @@ jsonschema[format-nongpl]==4.21.1 # nbformat jsonschema-specifications==2023.12.1 # via jsonschema -jupyter-client==8.6.0 +jupyter-client==8.6.1 # via # ipykernel # jupyter-server # nbclient -jupyter-core==5.7.1 +jupyter-core==5.7.2 # via # ipykernel # jupyter-client @@ -416,7 +416,7 @@ jupyter-core==5.7.1 # nbclient # nbconvert # nbformat -jupyter-events==0.9.0 +jupyter-events==0.9.1 # via jupyter-server jupyter-lsp==2.2.4 # via jupyterlab @@ -427,13 +427,13 @@ jupyter-server==2.13.0 # jupyterlab-server # notebook # notebook-shim -jupyter-server-terminals==0.5.2 +jupyter-server-terminals==0.5.3 # via jupyter-server jupyterlab==4.1.4 # via notebook jupyterlab-pygments==0.3.0 # via nbconvert -jupyterlab-server==2.25.3 +jupyterlab-server==2.25.4 # via # jupyterlab # notebook @@ -496,13 +496,13 @@ mypy-extensions==1.0.0 # via # black # mypy -mypy-protobuf==3.1.0 +mypy-protobuf==3.3.0 # via feast (setup.py) -nbclient==0.9.0 +nbclient==0.9.1 # via nbconvert nbconvert==7.16.2 # via jupyter-server -nbformat==5.9.2 +nbformat==5.10.2 # via # great-expectations # jupyter-server @@ -663,7 +663,7 @@ pycodestyle==2.10.0 # via flake8 pycparser==2.21 # via cffi -pydantic==2.6.3 +pydantic==2.6.4 # via # fastapi # feast (setup.py) @@ -866,9 +866,7 @@ sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 # via sphinx sqlalchemy[mypy]==1.4.52 - # via - # feast (setup.py) - # sqlalchemy + # via feast (setup.py) sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy sqlglot==10.6.4 @@ -881,7 +879,7 @@ tabulate==0.9.0 # via feast (setup.py) tenacity==8.2.3 # via feast (setup.py) -terminado==0.18.0 +terminado==0.18.1 # via # jupyter-server # jupyter-server-terminals @@ -923,7 +921,7 @@ tqdm==4.66.2 # via # feast (setup.py) # great-expectations -traitlets==5.14.1 +traitlets==5.14.2 # via # comm # ipykernel @@ -1037,7 +1035,7 @@ websockets==12.0 # via uvicorn werkzeug==3.0.1 # via moto -wheel==0.42.0 +wheel==0.43.0 # via pip-tools widgetsnbextension==4.0.10 # via ipywidgets diff --git a/sdk/python/requirements/py3.8-requirements.txt b/sdk/python/requirements/py3.8-requirements.txt index dceca2c94c..e689c011c5 100644 --- a/sdk/python/requirements/py3.8-requirements.txt +++ b/sdk/python/requirements/py3.8-requirements.txt @@ -38,7 +38,7 @@ cloudpickle==3.0.0 # via dask colorama==0.4.6 # via feast (setup.py) -dask==2023.5.0 +dask==2023.2.1 # via feast (setup.py) dill==0.3.8 # via feast (setup.py) @@ -97,7 +97,7 @@ mypy==1.9.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy -mypy-protobuf==3.1.0 +mypy-protobuf==3.5.0 # via feast (setup.py) numpy==1.24.4 # via @@ -123,7 +123,7 @@ protobuf==4.25.3 # proto-plus pyarrow==15.0.1 # via feast (setup.py) -pydantic==2.6.3 +pydantic==2.6.4 # via # fastapi # feast (setup.py) @@ -159,9 +159,7 @@ sniffio==1.3.1 # anyio # httpx sqlalchemy[mypy]==1.4.52 - # via - # feast (setup.py) - # sqlalchemy + # via feast (setup.py) sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy starlette==0.36.3 diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index 7f1082f55b..6e7e7a4ef2 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -63,11 +63,11 @@ black==22.12.0 # via feast (setup.py) bleach==6.1.0 # via nbconvert -boto3==1.34.59 +boto3==1.34.60 # via # feast (setup.py) # moto -botocore==1.34.59 +botocore==1.34.60 # via # boto3 # moto @@ -122,14 +122,12 @@ colorama==0.4.6 # via # feast (setup.py) # great-expectations -comm==0.2.1 +comm==0.2.2 # via # ipykernel # ipywidgets coverage[toml]==7.4.3 - # via - # coverage - # pytest-cov + # via pytest-cov cryptography==42.0.5 # via # azure-identity @@ -215,7 +213,7 @@ google-api-core[grpc]==2.17.1 # google-cloud-datastore # google-cloud-firestore # google-cloud-storage -google-api-python-client==2.121.0 +google-api-python-client==2.122.0 # via firebase-admin google-auth==2.28.2 # via @@ -228,9 +226,7 @@ google-auth==2.28.2 google-auth-httplib2==0.2.0 # via google-api-python-client google-cloud-bigquery[pandas]==3.12.0 - # via - # feast (setup.py) - # google-cloud-bigquery + # via feast (setup.py) google-cloud-bigquery-storage==2.24.0 # via feast (setup.py) google-cloud-bigtable==2.23.0 @@ -258,7 +254,7 @@ google-resumable-media==2.7.0 # via # google-cloud-bigquery # google-cloud-storage -googleapis-common-protos[grpc]==1.62.0 +googleapis-common-protos[grpc]==1.63.0 # via # feast (setup.py) # google-api-core @@ -399,12 +395,12 @@ jsonschema[format-nongpl]==4.21.1 # nbformat jsonschema-specifications==2023.12.1 # via jsonschema -jupyter-client==8.6.0 +jupyter-client==8.6.1 # via # ipykernel # jupyter-server # nbclient -jupyter-core==5.7.1 +jupyter-core==5.7.2 # via # ipykernel # jupyter-client @@ -413,7 +409,7 @@ jupyter-core==5.7.1 # nbclient # nbconvert # nbformat -jupyter-events==0.9.0 +jupyter-events==0.9.1 # via jupyter-server jupyter-lsp==2.2.4 # via jupyterlab @@ -424,13 +420,13 @@ jupyter-server==2.13.0 # jupyterlab-server # notebook # notebook-shim -jupyter-server-terminals==0.5.2 +jupyter-server-terminals==0.5.3 # via jupyter-server jupyterlab==4.1.4 # via notebook jupyterlab-pygments==0.3.0 # via nbconvert -jupyterlab-server==2.25.3 +jupyterlab-server==2.25.4 # via # jupyterlab # notebook @@ -493,13 +489,13 @@ mypy-extensions==1.0.0 # via # black # mypy -mypy-protobuf==3.1.0 +mypy-protobuf==3.3.0 # via feast (setup.py) -nbclient==0.9.0 +nbclient==0.9.1 # via nbconvert nbconvert==7.16.2 # via jupyter-server -nbformat==5.9.2 +nbformat==5.10.2 # via # great-expectations # jupyter-server @@ -659,7 +655,7 @@ pycodestyle==2.10.0 # via flake8 pycparser==2.21 # via cffi -pydantic==2.6.3 +pydantic==2.6.4 # via # fastapi # feast (setup.py) @@ -840,9 +836,7 @@ sniffio==1.3.1 snowballstemmer==2.2.0 # via sphinx snowflake-connector-python[pandas]==3.7.1 - # via - # feast (setup.py) - # snowflake-connector-python + # via feast (setup.py) sortedcontainers==2.4.0 # via snowflake-connector-python soupsieve==2.5 @@ -862,9 +856,7 @@ sphinxcontrib-qthelp==1.0.7 sphinxcontrib-serializinghtml==1.1.10 # via sphinx sqlalchemy[mypy]==1.4.52 - # via - # feast (setup.py) - # sqlalchemy + # via feast (setup.py) sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy sqlglot==20.11.0 @@ -879,7 +871,7 @@ tabulate==0.9.0 # via feast (setup.py) tenacity==8.2.3 # via feast (setup.py) -terminado==0.18.0 +terminado==0.18.1 # via # jupyter-server # jupyter-server-terminals @@ -921,7 +913,7 @@ tqdm==4.66.2 # via # feast (setup.py) # great-expectations -traitlets==5.14.1 +traitlets==5.14.2 # via # comm # ipykernel @@ -1008,9 +1000,7 @@ urllib3==1.26.18 # rockset # snowflake-connector-python uvicorn[standard]==0.28.0 - # via - # feast (setup.py) - # uvicorn + # via feast (setup.py) uvloop==0.19.0 # via uvicorn virtualenv==20.23.0 @@ -1037,7 +1027,7 @@ websockets==12.0 # via uvicorn werkzeug==3.0.1 # via moto -wheel==0.42.0 +wheel==0.43.0 # via pip-tools widgetsnbextension==4.0.10 # via ipywidgets diff --git a/sdk/python/requirements/py3.9-requirements.txt b/sdk/python/requirements/py3.9-requirements.txt index 090d013494..ac0954c167 100644 --- a/sdk/python/requirements/py3.9-requirements.txt +++ b/sdk/python/requirements/py3.9-requirements.txt @@ -94,7 +94,7 @@ mypy==1.9.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy -mypy-protobuf==3.1.0 +mypy-protobuf==3.5.0 # via feast (setup.py) numpy==1.24.4 # via @@ -118,7 +118,7 @@ protobuf==4.25.3 # proto-plus pyarrow==15.0.1 # via feast (setup.py) -pydantic==2.6.3 +pydantic==2.6.4 # via # fastapi # feast (setup.py) @@ -154,9 +154,7 @@ sniffio==1.3.1 # anyio # httpx sqlalchemy[mypy]==1.4.52 - # via - # feast (setup.py) - # sqlalchemy + # via feast (setup.py) sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy starlette==0.36.3 @@ -195,9 +193,7 @@ tzdata==2024.1 urllib3==2.2.1 # via requests uvicorn[standard]==0.28.0 - # via - # feast (setup.py) - # uvicorn + # via feast (setup.py) uvloop==0.19.0 # via uvicorn volatile==2.1.0 diff --git a/setup.py b/setup.py index 4da59c4e21..50b45da5db 100644 --- a/setup.py +++ b/setup.py @@ -44,7 +44,7 @@ "click>=7.0.0,<9.0.0", "colorama>=0.3.9,<1", "dill~=0.3.0", - "mypy-protobuf==3.1", + "mypy-protobuf>=3.1", "Jinja2>=2,<4", "jsonschema", "mmh3", @@ -67,7 +67,8 @@ "fastapi>=0.68.0", "uvicorn[standard]>=0.14.0,<1", "gunicorn", - "dask>=2021.1.0", + # https://github.com/dask/dask/issues/10996 + "dask>=2021.1.0,<2024.3.0", "bowler", # Needed for automatic repo upgrades # FastAPI does not correctly pull starlette dependency on httpx see thread(https://github.com/tiangolo/fastapi/issues/5656). "httpx>=0.23.3", @@ -222,7 +223,7 @@ for _r in MYSQL_REQUIRED: DOCS_REQUIRED.remove(_r) -DEV_REQUIRED = ["mypy-protobuf==3.1", "grpcio-testing~=1.0"] + CI_REQUIRED +DEV_REQUIRED = ["grpcio-testing~=1.0"] + CI_REQUIRED # Get git repo root directory repo_root = str(pathlib.Path(__file__).resolve().parent) @@ -400,7 +401,7 @@ def run(self): "setuptools_scm", "grpcio>=1.56.2,<2", "grpcio-tools>=1.56.2,<2", - "mypy-protobuf==3.1", + "mypy-protobuf>=3.1", "pybindgen==0.22.0", ], cmdclass={ From 78fae3eb45ce780956ca015a7fc95dbefc16961a Mon Sep 17 00:00:00 2001 From: Patrick McGleenon Date: Sat, 16 Mar 2024 11:02:09 +0000 Subject: [PATCH 074/122] docs: Updated hazelcast for missing code tag (#4012) Added ```endcode``` tag for code sample Signed-off-by: Patrick McGleenon --- docs/reference/online-stores/hazelcast.md | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/docs/reference/online-stores/hazelcast.md b/docs/reference/online-stores/hazelcast.md index ef65f42b31..e7a6cf757c 100644 --- a/docs/reference/online-stores/hazelcast.md +++ b/docs/reference/online-stores/hazelcast.md @@ -2,17 +2,17 @@ ## Description -Hazelcast online store is in alpha development. +The Hazelcast online store is in alpha development. The [Hazelcast](htpps://hazelcast.com) online store provides support for materializing feature values into a Hazelcast cluster for serving online features in real-time. -In order to use Hazelcast as online store, you need to have a running Hazelcast cluster. You can create a cluster using Hazelcast Viridian Serverless. See this [getting started](https://hazelcast.com/get-started/) page for more details. +In order to use Hazelcast as an online store, you need to have a running Hazelcast cluster. You can create a cluster using Hazelcast Viridian Serverless. See this [getting started](https://hazelcast.com/get-started/) page for more details. * Each feature view is mapped one-to-one to a specific Hazelcast IMap * This implementation inherits all strengths of Hazelcast such as high availability, fault-tolerance, and data distribution. * Secure TSL/SSL connection is supported by Hazelcast online store. * You can set TTL (Time-To-Live) setting for your features in Hazelcast cluster. -Each feature view corresponds to an IMap in Hazelcast cluster and the entries in that IMap corresponds to features of entities. +Each feature view corresponds to an IMap in Hazelcast cluster and the entries in that IMap correspond to features of entities. Each feature value stored separately and can be retrieved individually. ## Getting started @@ -33,6 +33,7 @@ online_store: cluster_members: ["localhost:5701"] key_ttl_seconds: 36000 ``` +{% endcode %} ## Functionality Matrix From 32e49d0ff9f0d5a78a19f6452847d218ac7fc482 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 16 Mar 2024 11:21:42 +0000 Subject: [PATCH 075/122] chore: Bump avro from 1.10.0 to 1.11.3 in /sdk/python/requirements (#3967) Bumps [avro](https://github.com/apache/avro) from 1.10.0 to 1.11.3. - [Release notes](https://github.com/apache/avro/releases) - [Commits](https://github.com/apache/avro/compare/release-1.10.0...release-1.11.3) --- updated-dependencies: - dependency-name: avro dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- sdk/python/requirements/py3.10-ci-requirements.txt | 2 +- sdk/python/requirements/py3.8-ci-requirements.txt | 2 +- sdk/python/requirements/py3.9-ci-requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index 638d9ae19d..b00b85822c 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -41,7 +41,7 @@ attrs==23.2.0 # bowler # jsonschema # referencing -avro==1.10.0 +avro==1.11.3 # via feast (setup.py) azure-core==1.30.1 # via diff --git a/sdk/python/requirements/py3.8-ci-requirements.txt b/sdk/python/requirements/py3.8-ci-requirements.txt index c76294b6c2..3d3dbe764b 100644 --- a/sdk/python/requirements/py3.8-ci-requirements.txt +++ b/sdk/python/requirements/py3.8-ci-requirements.txt @@ -41,7 +41,7 @@ attrs==23.2.0 # bowler # jsonschema # referencing -avro==1.10.0 +avro==1.11.3 # via feast (setup.py) azure-core==1.30.1 # via diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index 6e7e7a4ef2..de0a507f8c 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -41,7 +41,7 @@ attrs==23.2.0 # bowler # jsonschema # referencing -avro==1.10.0 +avro==1.11.3 # via feast (setup.py) azure-core==1.30.1 # via From 277b891ffa1193914b123672010e588573dcaa98 Mon Sep 17 00:00:00 2001 From: Francisco Javier Arceo Date: Sun, 17 Mar 2024 18:04:58 -0400 Subject: [PATCH 076/122] feat: Update pyproject.toml to use Python 3.9 as default (#4011) --- pyproject.toml | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index c89f1d9cc7..bfe2bc9fd0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,7 @@ build-backend = "setuptools.build_meta" [tool.black] line-length = 88 -target-version = ['py38'] +target-version = ['py39'] include = '\.pyi?$' exclude = ''' ( diff --git a/setup.py b/setup.py index 50b45da5db..b3ed889e0b 100644 --- a/setup.py +++ b/setup.py @@ -393,7 +393,7 @@ def run(self): "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.9", ], entry_points={"console_scripts": ["feast=feast.cli:cli"]}, use_scm_version=use_scm_version, From be5277207c4e4ce02366d654ccf1dac717ae6660 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 17 Mar 2024 22:18:36 +0000 Subject: [PATCH 077/122] chore: Bump google.golang.org/protobuf from 1.30.0 to 1.33.0 (#4013) Bumps google.golang.org/protobuf from 1.30.0 to 1.33.0. --- updated-dependencies: - dependency-name: google.golang.org/protobuf dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 2 +- go.sum | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/go.mod b/go.mod index 6def933985..1dccc1dddb 100644 --- a/go.mod +++ b/go.mod @@ -15,7 +15,7 @@ require ( github.com/spaolacci/murmur3 v1.1.0 github.com/stretchr/testify v1.7.0 google.golang.org/grpc v1.56.3 - google.golang.org/protobuf v1.30.0 + google.golang.org/protobuf v1.33.0 ) require ( diff --git a/go.sum b/go.sum index 9a33f2c9ee..39f2d4d514 100644 --- a/go.sum +++ b/go.sum @@ -1826,8 +1826,9 @@ google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQ google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= google.golang.org/protobuf v1.29.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= -google.golang.org/protobuf v1.30.0 h1:kPPoIgf3TsEvrm0PFe15JQ+570QVxYzEvvHqChK+cng= google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= +google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= From a0f7472f200300f3a45aa404922dd67bb4ad237f Mon Sep 17 00:00:00 2001 From: lokeshrangineni Date: Mon, 18 Mar 2024 16:54:23 -0400 Subject: [PATCH 078/122] feat: Dropping the support for python 3.8 version from feast (#4010) * Dropping the support for python 3.8 version from feast Signed-off-by: Lokesh Rangineni Signed-off-by: Lokesh Rangineni * updating the pyproject.toml to use the python 3.9 version. Signed-off-by: Lokesh Rangineni Signed-off-by: Lokesh Rangineni * Dropping the support for python 3.8 but these are required to merge the PR as the PR build needs these files because it runs based on the master files. We will be deleting these files once the PR is merged. Signed-off-by: Lokesh Rangineni * Adding missed file. dropping the support for python 3.8. Dropping the support for python 3.8 but these are required to merge the PR as the PR build needs these files because it runs based on the master files. We will be deleting these files once the PR is merged. Signed-off-by: Lokesh Rangineni Signed-off-by: Lokesh Rangineni * Trying to fix the integration test failures with drop python 3.8 version PR. Signed-off-by: Lokesh Rangineni Signed-off-by: Lokesh Rangineni --------- Signed-off-by: Lokesh Rangineni --- .../fork_pr_integration_tests_aws.yml | 2 +- .../fork_pr_integration_tests_gcp.yml | 2 +- .../fork_pr_integration_tests_snowflake.yml | 2 +- .github/workflows/build_wheels.yml | 4 +-- .github/workflows/java_master_only.yml | 6 ++-- .github/workflows/java_pr.yml | 8 ++--- .github/workflows/linter.yml | 4 +-- .github/workflows/master_only.yml | 2 +- .github/workflows/nightly-ci.yml | 4 +-- .github/workflows/publish.yml | 2 +- environment-setup.md | 23 +++++++++++++ .../docker-compose/feast10/Dockerfile | 2 +- .../feature_servers/multicloud/Dockerfile | 2 +- .../feature_servers/multicloud/Dockerfile.dev | 2 +- .../snowflake_python_udfs_creation.sql | 32 +++++++++---------- .../snowflake/snowpark/snowflake_udfs.py | 32 +++++++++---------- setup.py | 3 +- 17 files changed, 77 insertions(+), 55 deletions(-) create mode 100644 environment-setup.md diff --git a/.github/fork_workflows/fork_pr_integration_tests_aws.yml b/.github/fork_workflows/fork_pr_integration_tests_aws.yml index 7261833ae6..49fd16ef5d 100644 --- a/.github/fork_workflows/fork_pr_integration_tests_aws.yml +++ b/.github/fork_workflows/fork_pr_integration_tests_aws.yml @@ -67,7 +67,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: [ "3.8" ] + python-version: [ "3.9" ] os: [ ubuntu-latest ] env: OS: ${{ matrix.os }} diff --git a/.github/fork_workflows/fork_pr_integration_tests_gcp.yml b/.github/fork_workflows/fork_pr_integration_tests_gcp.yml index 1a05c068b5..cf85cc8873 100644 --- a/.github/fork_workflows/fork_pr_integration_tests_gcp.yml +++ b/.github/fork_workflows/fork_pr_integration_tests_gcp.yml @@ -9,7 +9,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: [ "3.8" ] + python-version: [ "3.9" ] os: [ ubuntu-latest ] env: OS: ${{ matrix.os }} diff --git a/.github/fork_workflows/fork_pr_integration_tests_snowflake.yml b/.github/fork_workflows/fork_pr_integration_tests_snowflake.yml index 9327f5c729..0b1c8a48ce 100644 --- a/.github/fork_workflows/fork_pr_integration_tests_snowflake.yml +++ b/.github/fork_workflows/fork_pr_integration_tests_snowflake.yml @@ -9,7 +9,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: [ "3.8" ] + python-version: [ "3.9" ] os: [ ubuntu-latest ] env: OS: ${{ matrix.os }} diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index 6e6539cf9e..f0851f5bb0 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -59,7 +59,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v3 with: - python-version: "3.8" + python-version: "3.9" architecture: x64 - name: Setup Node uses: actions/setup-node@v3 @@ -137,7 +137,7 @@ jobs: strategy: matrix: os: [ubuntu-latest, macos-latest ] - python-version: [ "3.8", "3.9", "3.10"] + python-version: ["3.9", "3.10"] from-source: [ True, False ] env: # this script is for testing servers diff --git a/.github/workflows/java_master_only.yml b/.github/workflows/java_master_only.yml index d82f69dd3c..79b456e571 100644 --- a/.github/workflows/java_master_only.yml +++ b/.github/workflows/java_master_only.yml @@ -25,7 +25,7 @@ jobs: uses: actions/setup-python@v3 id: setup-python with: - python-version: "3.8" + python-version: "3.9" architecture: x64 - name: Authenticate to Google Cloud uses: 'google-github-actions/auth@v1' @@ -95,7 +95,7 @@ jobs: if: github.repository == 'feast-dev/feast' runs-on: ubuntu-latest env: - PYTHON: 3.8 + PYTHON: 3.9 steps: - uses: actions/checkout@v3 with: @@ -110,7 +110,7 @@ jobs: uses: actions/setup-python@v3 id: setup-python with: - python-version: 3.8 + python-version: 3.9 architecture: x64 - name: Get pip cache dir id: pip-cache diff --git a/.github/workflows/java_pr.yml b/.github/workflows/java_pr.yml index d362a6bb54..b78b5297d2 100644 --- a/.github/workflows/java_pr.yml +++ b/.github/workflows/java_pr.yml @@ -88,7 +88,7 @@ jobs: uses: actions/setup-python@v3 id: setup-python with: - python-version: "3.8" + python-version: "3.9" architecture: x64 - name: Authenticate to Google Cloud uses: 'google-github-actions/auth@v1' @@ -111,7 +111,7 @@ jobs: runs-on: ubuntu-latest needs: unit-test-java env: - PYTHON: 3.8 + PYTHON: 3.9 steps: - uses: actions/checkout@v3 with: @@ -128,7 +128,7 @@ jobs: architecture: x64 - uses: actions/setup-python@v3 with: - python-version: '3.8' + python-version: '3.9' architecture: 'x64' - uses: actions/cache@v2 with: @@ -158,7 +158,7 @@ jobs: uses: actions/setup-python@v3 id: setup-python with: - python-version: 3.8 + python-version: 3.9 architecture: x64 - name: Get pip cache dir id: pip-cache diff --git a/.github/workflows/linter.yml b/.github/workflows/linter.yml index a4a42a11ed..a1747db135 100644 --- a/.github/workflows/linter.yml +++ b/.github/workflows/linter.yml @@ -6,14 +6,14 @@ jobs: lint-python: runs-on: [ubuntu-latest] env: - PYTHON: 3.8 + PYTHON: 3.9 steps: - uses: actions/checkout@v3 - name: Setup Python id: setup-python uses: actions/setup-python@v3 with: - python-version: "3.8" + python-version: "3.9" architecture: x64 - name: Get pip cache dir id: pip-cache diff --git a/.github/workflows/master_only.yml b/.github/workflows/master_only.yml index 580ea3171b..ded5310b50 100644 --- a/.github/workflows/master_only.yml +++ b/.github/workflows/master_only.yml @@ -65,7 +65,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: [ "3.8", "3.9", "3.10" ] + python-version: ["3.9", "3.10" ] os: [ ubuntu-latest ] env: OS: ${{ matrix.os }} diff --git a/.github/workflows/nightly-ci.yml b/.github/workflows/nightly-ci.yml index 0e1df81262..e39f2e1c00 100644 --- a/.github/workflows/nightly-ci.yml +++ b/.github/workflows/nightly-ci.yml @@ -36,7 +36,7 @@ jobs: uses: actions/setup-python@v3 id: setup-python with: - python-version: "3.8" + python-version: "3.9" architecture: x64 - name: Set up AWS SDK uses: aws-actions/configure-aws-credentials@v1 @@ -124,7 +124,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: [ "3.8" ] + python-version: [ "3.9" ] os: [ ubuntu-latest ] env: OS: ${{ matrix.os }} diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 135d1d3a8d..432ab4bb58 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -160,7 +160,7 @@ jobs: architecture: x64 - uses: actions/setup-python@v3 with: - python-version: '3.7' + python-version: '3.9' architecture: 'x64' - uses: actions/cache@v2 with: diff --git a/environment-setup.md b/environment-setup.md new file mode 100644 index 0000000000..a6c30c2aa2 --- /dev/null +++ b/environment-setup.md @@ -0,0 +1,23 @@ +1. install anaconda, install docker +2. create an environment for feast, selecting python 3.9. Activate the environment: +```bash +conda create --name feast python=3.9 +conda activate feast +``` +3. install dependencies: +```bash +pip install pip-tools +brew install mysql +brew install xz protobuf openssl zlib +pip install cryptography -U +conda install protobuf +conda install pymssql +pip install -e ".[dev]" +make install-protoc-dependencies PYTHON=3.9 +make install-python-ci-dependencies PYTHON=3.9 +``` +4. start the docker daemon +5. run unit tests: +```bash +make test-python +``` \ No newline at end of file diff --git a/java/serving/src/test/resources/docker-compose/feast10/Dockerfile b/java/serving/src/test/resources/docker-compose/feast10/Dockerfile index dee7dcf84c..7e36658cae 100644 --- a/java/serving/src/test/resources/docker-compose/feast10/Dockerfile +++ b/java/serving/src/test/resources/docker-compose/feast10/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.8 +FROM python:3.9 WORKDIR /usr/src/ diff --git a/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile b/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile index 4527c5b156..5d2d425cda 100644 --- a/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile +++ b/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.8 +FROM python:3.9 # Input the feast version to install # This requires feast package to be available in pypi before building this image diff --git a/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile.dev b/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile.dev index 015e3c7ee8..900578f55d 100644 --- a/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile.dev +++ b/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile.dev @@ -1,4 +1,4 @@ -FROM python:3.8 +FROM python:3.9 # Input the feast version to install # This requires feast package to be available in pypi before building this image diff --git a/sdk/python/feast/infra/utils/snowflake/snowpark/snowflake_python_udfs_creation.sql b/sdk/python/feast/infra/utils/snowflake/snowpark/snowflake_python_udfs_creation.sql index a444c0b7c5..e39b12c1f7 100644 --- a/sdk/python/feast/infra/utils/snowflake/snowpark/snowflake_python_udfs_creation.sql +++ b/sdk/python/feast/infra/utils/snowflake/snowpark/snowflake_python_udfs_creation.sql @@ -1,7 +1,7 @@ CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_binary_to_bytes_proto(df BINARY) RETURNS BINARY LANGUAGE PYTHON - RUNTIME_VERSION = '3.8' + RUNTIME_VERSION = '3.9' PACKAGES = ('protobuf', 'pandas') HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_binary_to_bytes_proto' IMPORTS = ('@STAGE_HOLDER/feast.zip'); @@ -9,7 +9,7 @@ CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_binary_to_bytes_proto CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_varchar_to_string_proto(df VARCHAR) RETURNS BINARY LANGUAGE PYTHON - RUNTIME_VERSION = '3.8' + RUNTIME_VERSION = '3.9' PACKAGES = ('protobuf', 'pandas') HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_varchar_to_string_proto' IMPORTS = ('@STAGE_HOLDER/feast.zip'); @@ -17,7 +17,7 @@ CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_varchar_to_string_pro CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_array_bytes_to_list_bytes_proto(df ARRAY) RETURNS BINARY LANGUAGE PYTHON - RUNTIME_VERSION = '3.8' + RUNTIME_VERSION = '3.9' PACKAGES = ('protobuf', 'pandas') HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_bytes_to_list_bytes_proto' IMPORTS = ('@STAGE_HOLDER/feast.zip'); @@ -25,7 +25,7 @@ CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_array_bytes_to_list_b CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_array_varchar_to_list_string_proto(df ARRAY) RETURNS BINARY LANGUAGE PYTHON - RUNTIME_VERSION = '3.8' + RUNTIME_VERSION = '3.9' PACKAGES = ('protobuf', 'pandas') HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_varchar_to_list_string_proto' IMPORTS = ('@STAGE_HOLDER/feast.zip'); @@ -33,7 +33,7 @@ CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_array_varchar_to_list CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_array_number_to_list_int32_proto(df ARRAY) RETURNS BINARY LANGUAGE PYTHON - RUNTIME_VERSION = '3.8' + RUNTIME_VERSION = '3.9' PACKAGES = ('protobuf', 'pandas') HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_number_to_list_int32_proto' IMPORTS = ('@STAGE_HOLDER/feast.zip'); @@ -41,7 +41,7 @@ CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_array_number_to_list_ CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_array_number_to_list_int64_proto(df ARRAY) RETURNS BINARY LANGUAGE PYTHON - RUNTIME_VERSION = '3.8' + RUNTIME_VERSION = '3.9' PACKAGES = ('protobuf', 'pandas') HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_number_to_list_int64_proto' IMPORTS = ('@STAGE_HOLDER/feast.zip'); @@ -49,7 +49,7 @@ CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_array_number_to_list_ CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_array_float_to_list_double_proto(df ARRAY) RETURNS BINARY LANGUAGE PYTHON - RUNTIME_VERSION = '3.8' + RUNTIME_VERSION = '3.9' PACKAGES = ('protobuf', 'pandas') HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_float_to_list_double_proto' IMPORTS = ('@STAGE_HOLDER/feast.zip'); @@ -57,7 +57,7 @@ CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_array_float_to_list_d CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_array_boolean_to_list_bool_proto(df ARRAY) RETURNS BINARY LANGUAGE PYTHON - RUNTIME_VERSION = '3.8' + RUNTIME_VERSION = '3.9' PACKAGES = ('protobuf', 'pandas') HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_boolean_to_list_bool_proto' IMPORTS = ('@STAGE_HOLDER/feast.zip'); @@ -65,7 +65,7 @@ CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_array_boolean_to_list CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_array_timestamp_to_list_unix_timestamp_proto(df ARRAY) RETURNS BINARY LANGUAGE PYTHON - RUNTIME_VERSION = '3.8' + RUNTIME_VERSION = '3.9' PACKAGES = ('protobuf', 'pandas') HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_timestamp_to_list_unix_timestamp_proto' IMPORTS = ('@STAGE_HOLDER/feast.zip'); @@ -73,7 +73,7 @@ CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_array_timestamp_to_li CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_number_to_int32_proto(df NUMBER) RETURNS BINARY LANGUAGE PYTHON - RUNTIME_VERSION = '3.8' + RUNTIME_VERSION = '3.9' PACKAGES = ('protobuf', 'pandas') HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_number_to_int32_proto' IMPORTS = ('@STAGE_HOLDER/feast.zip'); @@ -81,7 +81,7 @@ CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_number_to_int32_proto CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_number_to_int64_proto(df NUMBER) RETURNS BINARY LANGUAGE PYTHON - RUNTIME_VERSION = '3.8' + RUNTIME_VERSION = '3.9' PACKAGES = ('protobuf', 'pandas') HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_number_to_int64_proto' IMPORTS = ('@STAGE_HOLDER/feast.zip'); @@ -89,7 +89,7 @@ CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_number_to_int64_proto CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_float_to_double_proto(df DOUBLE) RETURNS BINARY LANGUAGE PYTHON - RUNTIME_VERSION = '3.8' + RUNTIME_VERSION = '3.9' PACKAGES = ('protobuf', 'pandas') HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_float_to_double_proto' IMPORTS = ('@STAGE_HOLDER/feast.zip'); @@ -97,7 +97,7 @@ CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_float_to_double_proto CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_boolean_to_bool_proto(df BOOLEAN) RETURNS BINARY LANGUAGE PYTHON - RUNTIME_VERSION = '3.8' + RUNTIME_VERSION = '3.9' PACKAGES = ('protobuf', 'pandas') HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_boolean_to_bool_boolean_proto' IMPORTS = ('@STAGE_HOLDER/feast.zip'); @@ -105,7 +105,7 @@ CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_boolean_to_bool_proto CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_timestamp_to_unix_timestamp_proto(df NUMBER) RETURNS BINARY LANGUAGE PYTHON - RUNTIME_VERSION = '3.8' + RUNTIME_VERSION = '3.9' PACKAGES = ('protobuf', 'pandas') HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_timestamp_to_unix_timestamp_proto' IMPORTS = ('@STAGE_HOLDER/feast.zip'); @@ -113,7 +113,7 @@ CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_timestamp_to_unix_tim CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_serialize_entity_keys(names ARRAY, data ARRAY, types ARRAY) RETURNS BINARY LANGUAGE PYTHON - RUNTIME_VERSION = '3.8' + RUNTIME_VERSION = '3.9' PACKAGES = ('protobuf', 'pandas') HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_serialize_entity_keys' IMPORTS = ('@STAGE_HOLDER/feast.zip'); @@ -121,7 +121,7 @@ CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_serialize_entity_keys(names ARR CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_entity_key_proto_to_string(names ARRAY, data ARRAY, types ARRAY) RETURNS BINARY LANGUAGE PYTHON - RUNTIME_VERSION = '3.8' + RUNTIME_VERSION = '3.9' PACKAGES = ('protobuf', 'pandas') HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_entity_key_proto_to_string' IMPORTS = ('@STAGE_HOLDER/feast.zip') diff --git a/sdk/python/feast/infra/utils/snowflake/snowpark/snowflake_udfs.py b/sdk/python/feast/infra/utils/snowflake/snowpark/snowflake_udfs.py index f5d5f10631..ebba3e9b84 100644 --- a/sdk/python/feast/infra/utils/snowflake/snowpark/snowflake_udfs.py +++ b/sdk/python/feast/infra/utils/snowflake/snowpark/snowflake_udfs.py @@ -18,7 +18,7 @@ CREATE OR REPLACE FUNCTION feast_snowflake_binary_to_bytes_proto(df BINARY) RETURNS BINARY LANGUAGE PYTHON - RUNTIME_VERSION = '3.8' + RUNTIME_VERSION = '3.9' PACKAGES = ('protobuf', 'pandas') HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_binary_to_bytes_proto' IMPORTS = ('@feast_stage/feast.zip'); @@ -41,7 +41,7 @@ def feast_snowflake_binary_to_bytes_proto(df): CREATE OR REPLACE FUNCTION feast_snowflake_varchar_to_string_proto(df VARCHAR) RETURNS BINARY LANGUAGE PYTHON - RUNTIME_VERSION = '3.8' + RUNTIME_VERSION = '3.9' PACKAGES = ('protobuf', 'pandas') HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_varchar_to_string_proto' IMPORTS = ('@feast_stage/feast.zip'); @@ -64,7 +64,7 @@ def feast_snowflake_varchar_to_string_proto(df): CREATE OR REPLACE FUNCTION feast_snowflake_array_bytes_to_list_bytes_proto(df ARRAY) RETURNS BINARY LANGUAGE PYTHON - RUNTIME_VERSION = '3.8' + RUNTIME_VERSION = '3.9' PACKAGES = ('protobuf', 'pandas') HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_bytes_to_list_bytes_proto' IMPORTS = ('@feast_stage/feast.zip'); @@ -90,7 +90,7 @@ def feast_snowflake_array_bytes_to_list_bytes_proto(df): CREATE OR REPLACE FUNCTION feast_snowflake_array_varchar_to_list_string_proto(df ARRAY) RETURNS BINARY LANGUAGE PYTHON - RUNTIME_VERSION = '3.8' + RUNTIME_VERSION = '3.9' PACKAGES = ('protobuf', 'pandas') HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_varchar_to_list_string_proto' IMPORTS = ('@feast_stage/feast.zip'); @@ -114,7 +114,7 @@ def feast_snowflake_array_varchar_to_list_string_proto(df): CREATE OR REPLACE FUNCTION feast_snowflake_array_number_to_list_int32_proto(df ARRAY) RETURNS BINARY LANGUAGE PYTHON - RUNTIME_VERSION = '3.8' + RUNTIME_VERSION = '3.9' PACKAGES = ('protobuf', 'pandas') HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_number_to_list_int32_proto' IMPORTS = ('@feast_stage/feast.zip'); @@ -138,7 +138,7 @@ def feast_snowflake_array_number_to_list_int32_proto(df): CREATE OR REPLACE FUNCTION feast_snowflake_array_number_to_list_int64_proto(df ARRAY) RETURNS BINARY LANGUAGE PYTHON - RUNTIME_VERSION = '3.8' + RUNTIME_VERSION = '3.9' PACKAGES = ('protobuf', 'pandas') HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_number_to_list_int64_proto' IMPORTS = ('@feast_stage/feast.zip'); @@ -162,7 +162,7 @@ def feast_snowflake_array_number_to_list_int64_proto(df): CREATE OR REPLACE FUNCTION feast_snowflake_array_float_to_list_double_proto(df ARRAY) RETURNS BINARY LANGUAGE PYTHON - RUNTIME_VERSION = '3.8' + RUNTIME_VERSION = '3.9' PACKAGES = ('protobuf', 'pandas') HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_float_to_list_double_proto' IMPORTS = ('@feast_stage/feast.zip'); @@ -188,7 +188,7 @@ def feast_snowflake_array_float_to_list_double_proto(df): CREATE OR REPLACE FUNCTION feast_snowflake_array_boolean_to_list_bool_proto(df ARRAY) RETURNS BINARY LANGUAGE PYTHON - RUNTIME_VERSION = '3.8' + RUNTIME_VERSION = '3.9' PACKAGES = ('protobuf', 'pandas') HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_boolean_to_list_bool_proto' IMPORTS = ('@feast_stage/feast.zip'); @@ -212,7 +212,7 @@ def feast_snowflake_array_boolean_to_list_bool_proto(df): CREATE OR REPLACE FUNCTION feast_snowflake_array_timestamp_to_list_unix_timestamp_proto(df ARRAY) RETURNS BINARY LANGUAGE PYTHON - RUNTIME_VERSION = '3.8' + RUNTIME_VERSION = '3.9' PACKAGES = ('protobuf', 'pandas') HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_timestamp_to_list_unix_timestamp_proto' IMPORTS = ('@feast_stage/feast.zip'); @@ -238,7 +238,7 @@ def feast_snowflake_array_timestamp_to_list_unix_timestamp_proto(df): CREATE OR REPLACE FUNCTION feast_snowflake_number_to_int32_proto(df NUMBER) RETURNS BINARY LANGUAGE PYTHON - RUNTIME_VERSION = '3.8' + RUNTIME_VERSION = '3.9' PACKAGES = ('protobuf', 'pandas') HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_number_to_int32_proto' IMPORTS = ('@feast_stage/feast.zip'); @@ -261,7 +261,7 @@ def feast_snowflake_number_to_int32_proto(df): CREATE OR REPLACE FUNCTION feast_snowflake_number_to_int64_proto(df NUMBER) RETURNS BINARY LANGUAGE PYTHON - RUNTIME_VERSION = '3.8' + RUNTIME_VERSION = '3.9' PACKAGES = ('protobuf', 'pandas') HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_number_to_int64_proto' IMPORTS = ('@feast_stage/feast.zip'); @@ -286,7 +286,7 @@ def feast_snowflake_number_to_int64_proto(df): CREATE OR REPLACE FUNCTION feast_snowflake_float_to_double_proto(df DOUBLE) RETURNS BINARY LANGUAGE PYTHON - RUNTIME_VERSION = '3.8' + RUNTIME_VERSION = '3.9' PACKAGES = ('protobuf', 'pandas') HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_float_to_double_proto' IMPORTS = ('@feast_stage/feast.zip'); @@ -309,7 +309,7 @@ def feast_snowflake_float_to_double_proto(df): CREATE OR REPLACE FUNCTION feast_snowflake_boolean_to_bool_proto(df BOOLEAN) RETURNS BINARY LANGUAGE PYTHON - RUNTIME_VERSION = '3.8' + RUNTIME_VERSION = '3.9' PACKAGES = ('protobuf', 'pandas') HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_boolean_to_bool_boolean_proto' IMPORTS = ('@feast_stage/feast.zip'); @@ -332,7 +332,7 @@ def feast_snowflake_boolean_to_bool_boolean_proto(df): CREATE OR REPLACE FUNCTION feast_snowflake_timestamp_to_unix_timestamp_proto(df NUMBER) RETURNS BINARY LANGUAGE PYTHON - RUNTIME_VERSION = '3.8' + RUNTIME_VERSION = '3.9' PACKAGES = ('protobuf', 'pandas') HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_timestamp_to_unix_timestamp_proto' IMPORTS = ('@feast_stage/feast.zip'); @@ -358,7 +358,7 @@ def feast_snowflake_timestamp_to_unix_timestamp_proto(df): CREATE OR REPLACE FUNCTION feast_serialize_entity_keys(names ARRAY, data ARRAY, types ARRAY) RETURNS BINARY LANGUAGE PYTHON - RUNTIME_VERSION = '3.8' + RUNTIME_VERSION = '3.9' PACKAGES = ('protobuf', 'pandas') HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_serialize_entity_keys' IMPORTS = ('@feast_stage/feast.zip') @@ -405,7 +405,7 @@ def feast_serialize_entity_keys(df): CREATE OR REPLACE FUNCTION feast_entity_key_proto_to_string(names ARRAY, data ARRAY, types ARRAY) RETURNS BINARY LANGUAGE PYTHON - RUNTIME_VERSION = '3.8' + RUNTIME_VERSION = '3.9' PACKAGES = ('protobuf', 'pandas') HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_entity_key_proto_to_string' IMPORTS = ('@feast_stage/feast.zip') diff --git a/setup.py b/setup.py index b3ed889e0b..92d7356411 100644 --- a/setup.py +++ b/setup.py @@ -38,7 +38,7 @@ DESCRIPTION = "Python SDK for Feast" URL = "https://github.com/feast-dev/feast" AUTHOR = "Feast" -REQUIRES_PYTHON = ">=3.8.0" +REQUIRES_PYTHON = ">=3.9.0" REQUIRED = [ "click>=7.0.0,<9.0.0", @@ -195,7 +195,6 @@ "types-setuptools", "types-tabulate", "virtualenv<20.24.2", - "pandas>=1.4.3,<2; python_version < '3.9'", ] + GCP_REQUIRED + REDIS_REQUIRED From 7b93c3dba0b277c712acf393bd225b08b6ee78d0 Mon Sep 17 00:00:00 2001 From: Tornike Gurgenidze Date: Tue, 19 Mar 2024 20:41:42 +0400 Subject: [PATCH 079/122] chore: Clean up dependencies in setup.py (#4004) chore: setup.py cleanup Signed-off-by: tokoko --- Makefile | 3 +- .../requirements/py3.10-ci-requirements.txt | 45 +++++++++---------- .../requirements/py3.10-requirements.txt | 36 +++++---------- .../requirements/py3.9-ci-requirements.txt | 45 +++++++++---------- .../requirements/py3.9-requirements.txt | 36 +++++---------- setup.py | 16 ++----- 6 files changed, 72 insertions(+), 109 deletions(-) diff --git a/Makefile b/Makefile index 1a1d3b6e92..0eac7e03a2 100644 --- a/Makefile +++ b/Makefile @@ -38,7 +38,8 @@ build: protos build-java build-docker install-python-ci-dependencies: python -m piptools sync sdk/python/requirements/py$(PYTHON)-ci-requirements.txt - COMPILE_GO=true python setup.py develop + pip install --no-deps -e . + python setup.py build_python_protos --inplace lock-python-ci-dependencies: python -m piptools compile -U --extra ci --output-file sdk/python/requirements/py$(PYTHON)-ci-requirements.txt diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index b00b85822c..8f0ef90d77 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -41,8 +41,6 @@ attrs==23.2.0 # bowler # jsonschema # referencing -avro==1.11.3 - # via feast (setup.py) azure-core==1.30.1 # via # azure-identity @@ -63,11 +61,11 @@ black==22.12.0 # via feast (setup.py) bleach==6.1.0 # via nbconvert -boto3==1.34.60 +boto3==1.34.65 # via # feast (setup.py) # moto -botocore==1.34.60 +botocore==1.34.65 # via # boto3 # moto @@ -126,7 +124,7 @@ comm==0.2.2 # via # ipykernel # ipywidgets -coverage[toml]==7.4.3 +coverage[toml]==7.4.4 # via pytest-cov cryptography==42.0.5 # via @@ -242,7 +240,7 @@ google-cloud-datastore==2.19.0 # via feast (setup.py) google-cloud-firestore==2.15.0 # via firebase-admin -google-cloud-storage==2.15.0 +google-cloud-storage==2.16.0 # via # feast (setup.py) # firebase-admin @@ -260,7 +258,7 @@ googleapis-common-protos[grpc]==1.63.0 # google-api-core # grpc-google-iam-v1 # grpcio-status -great-expectations==0.18.10 +great-expectations==0.18.11 # via feast (setup.py) greenlet==3.0.3 # via sqlalchemy @@ -333,7 +331,7 @@ importlib-metadata==6.11.0 # via # dask # feast (setup.py) -importlib-resources==6.1.3 +importlib-resources==6.3.1 # via feast (setup.py) iniconfig==2.0.0 # via pytest @@ -369,7 +367,7 @@ jmespath==1.0.1 # via # boto3 # botocore -json5==0.9.22 +json5==0.9.24 # via jupyterlab-server jsonpatch==1.33 # via great-expectations @@ -401,7 +399,7 @@ jupyter-core==5.7.2 # nbclient # nbconvert # nbformat -jupyter-events==0.9.1 +jupyter-events==0.10.0 # via jupyter-server jupyter-lsp==2.2.4 # via jupyterlab @@ -414,7 +412,7 @@ jupyter-server==2.13.0 # notebook-shim jupyter-server-terminals==0.5.3 # via jupyter-server -jupyterlab==4.1.4 +jupyterlab==4.1.5 # via notebook jupyterlab-pygments==0.3.0 # via nbconvert @@ -483,11 +481,11 @@ mypy-extensions==1.0.0 # mypy mypy-protobuf==3.3.0 # via feast (setup.py) -nbclient==0.9.1 +nbclient==0.10.0 # via nbconvert nbconvert==7.16.2 # via jupyter-server -nbformat==5.10.2 +nbformat==5.10.3 # via # great-expectations # jupyter-server @@ -497,7 +495,7 @@ nest-asyncio==1.6.0 # via ipykernel nodeenv==1.8.0 # via pre-commit -notebook==7.1.1 +notebook==7.1.2 # via great-expectations notebook-shim==0.2.4 # via @@ -583,7 +581,6 @@ prompt-toolkit==3.0.43 # via ipython proto-plus==1.23.0 # via - # feast (setup.py) # google-cloud-bigquery # google-cloud-bigquery-storage # google-cloud-bigtable @@ -626,7 +623,7 @@ py-cpuinfo==9.0.0 # via pytest-benchmark py4j==0.10.9.7 # via pyspark -pyarrow==15.0.1 +pyarrow==15.0.2 # via # db-dtypes # feast (setup.py) @@ -750,7 +747,7 @@ pyzmq==25.1.2 # jupyter-server redis==4.6.0 # via feast (setup.py) -referencing==0.33.0 +referencing==0.34.0 # via # jsonschema # jsonschema-specifications @@ -800,7 +797,7 @@ rsa==4.9 # via google-auth ruamel-yaml==0.17.17 # via great-expectations -s3transfer==0.10.0 +s3transfer==0.10.1 # via boto3 scipy==1.12.0 # via great-expectations @@ -846,7 +843,9 @@ sphinxcontrib-qthelp==1.0.7 sphinxcontrib-serializinghtml==1.1.10 # via sphinx sqlalchemy[mypy]==1.4.52 - # via feast (setup.py) + # via + # feast (setup.py) + # sqlalchemy sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy sqlglot==20.11.0 @@ -855,7 +854,7 @@ stack-data==0.6.3 # via ipython starlette==0.36.3 # via fastapi -substrait==0.14.0 +substrait==0.14.1 # via ibis-substrait tabulate==0.9.0 # via feast (setup.py) @@ -930,7 +929,7 @@ types-pymysql==1.1.0.1 # via feast (setup.py) types-pyopenssl==24.0.0.20240311 # via types-redis -types-python-dateutil==2.8.19.20240311 +types-python-dateutil==2.9.0.20240316 # via # arrow # feast (setup.py) @@ -942,7 +941,7 @@ types-redis==4.6.0.20240311 # via feast (setup.py) types-requests==2.30.0.0 # via feast (setup.py) -types-setuptools==69.1.0.20240310 +types-setuptools==69.2.0.20240317 # via feast (setup.py) types-tabulate==0.9.0.20240106 # via feast (setup.py) @@ -1021,7 +1020,7 @@ wrapt==1.16.0 # via testcontainers xmltodict==0.13.0 # via moto -zipp==3.17.0 +zipp==3.18.1 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: diff --git a/sdk/python/requirements/py3.10-requirements.txt b/sdk/python/requirements/py3.10-requirements.txt index bbce1ecb7f..e17a588538 100644 --- a/sdk/python/requirements/py3.10-requirements.txt +++ b/sdk/python/requirements/py3.10-requirements.txt @@ -8,7 +8,6 @@ annotated-types==0.6.0 # via pydantic anyio==4.3.0 # via - # httpx # starlette # watchfiles appdirs==1.4.4 @@ -21,10 +20,7 @@ attrs==23.2.0 bowler==0.9.0 # via feast (setup.py) certifi==2024.2.2 - # via - # httpcore - # httpx - # requests + # via requests charset-normalizer==3.3.2 # via requests click==8.1.7 @@ -48,32 +44,25 @@ fastapi==0.110.0 # via feast (setup.py) fissix==21.11.13 # via bowler -fsspec==2024.2.0 +fsspec==2024.3.1 # via dask greenlet==3.0.3 # via sqlalchemy gunicorn==21.2.0 # via feast (setup.py) h11==0.14.0 - # via - # httpcore - # uvicorn -httpcore==1.0.4 - # via httpx + # via uvicorn httptools==0.6.1 # via uvicorn -httpx==0.27.0 - # via feast (setup.py) idna==3.6 # via # anyio - # httpx # requests importlib-metadata==6.11.0 # via # dask # feast (setup.py) -importlib-resources==6.1.3 +importlib-resources==6.3.1 # via feast (setup.py) jinja2==3.1.3 # via feast (setup.py) @@ -108,14 +97,11 @@ pandas==2.2.1 # via feast (setup.py) partd==1.4.1 # via dask -proto-plus==1.23.0 - # via feast (setup.py) protobuf==4.25.3 # via # feast (setup.py) # mypy-protobuf - # proto-plus -pyarrow==15.0.1 +pyarrow==15.0.2 # via feast (setup.py) pydantic==2.6.4 # via @@ -136,7 +122,7 @@ pyyaml==6.0.1 # dask # feast (setup.py) # uvicorn -referencing==0.33.0 +referencing==0.34.0 # via # jsonschema # jsonschema-specifications @@ -149,11 +135,11 @@ rpds-py==0.18.0 six==1.16.0 # via python-dateutil sniffio==1.3.1 - # via - # anyio - # httpx + # via anyio sqlalchemy[mypy]==1.4.52 - # via feast (setup.py) + # via + # feast (setup.py) + # sqlalchemy sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy starlette==0.36.3 @@ -200,5 +186,5 @@ watchfiles==0.21.0 # via uvicorn websockets==12.0 # via uvicorn -zipp==3.17.0 +zipp==3.18.1 # via importlib-metadata diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index de0a507f8c..dc96554431 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -41,8 +41,6 @@ attrs==23.2.0 # bowler # jsonschema # referencing -avro==1.11.3 - # via feast (setup.py) azure-core==1.30.1 # via # azure-identity @@ -63,11 +61,11 @@ black==22.12.0 # via feast (setup.py) bleach==6.1.0 # via nbconvert -boto3==1.34.60 +boto3==1.34.65 # via # feast (setup.py) # moto -botocore==1.34.60 +botocore==1.34.65 # via # boto3 # moto @@ -126,7 +124,7 @@ comm==0.2.2 # via # ipykernel # ipywidgets -coverage[toml]==7.4.3 +coverage[toml]==7.4.4 # via pytest-cov cryptography==42.0.5 # via @@ -242,7 +240,7 @@ google-cloud-datastore==2.19.0 # via feast (setup.py) google-cloud-firestore==2.15.0 # via firebase-admin -google-cloud-storage==2.15.0 +google-cloud-storage==2.16.0 # via # feast (setup.py) # firebase-admin @@ -260,7 +258,7 @@ googleapis-common-protos[grpc]==1.63.0 # google-api-core # grpc-google-iam-v1 # grpcio-status -great-expectations==0.18.10 +great-expectations==0.18.11 # via feast (setup.py) greenlet==3.0.3 # via sqlalchemy @@ -341,7 +339,7 @@ importlib-metadata==6.11.0 # nbconvert # sphinx # typeguard -importlib-resources==6.1.3 +importlib-resources==6.3.1 # via feast (setup.py) iniconfig==2.0.0 # via pytest @@ -377,7 +375,7 @@ jmespath==1.0.1 # via # boto3 # botocore -json5==0.9.22 +json5==0.9.24 # via jupyterlab-server jsonpatch==1.33 # via great-expectations @@ -409,7 +407,7 @@ jupyter-core==5.7.2 # nbclient # nbconvert # nbformat -jupyter-events==0.9.1 +jupyter-events==0.10.0 # via jupyter-server jupyter-lsp==2.2.4 # via jupyterlab @@ -422,7 +420,7 @@ jupyter-server==2.13.0 # notebook-shim jupyter-server-terminals==0.5.3 # via jupyter-server -jupyterlab==4.1.4 +jupyterlab==4.1.5 # via notebook jupyterlab-pygments==0.3.0 # via nbconvert @@ -491,11 +489,11 @@ mypy-extensions==1.0.0 # mypy mypy-protobuf==3.3.0 # via feast (setup.py) -nbclient==0.9.1 +nbclient==0.10.0 # via nbconvert nbconvert==7.16.2 # via jupyter-server -nbformat==5.10.2 +nbformat==5.10.3 # via # great-expectations # jupyter-server @@ -505,7 +503,7 @@ nest-asyncio==1.6.0 # via ipykernel nodeenv==1.8.0 # via pre-commit -notebook==7.1.1 +notebook==7.1.2 # via great-expectations notebook-shim==0.2.4 # via @@ -591,7 +589,6 @@ prompt-toolkit==3.0.43 # via ipython proto-plus==1.23.0 # via - # feast (setup.py) # google-cloud-bigquery # google-cloud-bigquery-storage # google-cloud-bigtable @@ -634,7 +631,7 @@ py-cpuinfo==9.0.0 # via pytest-benchmark py4j==0.10.9.7 # via pyspark -pyarrow==15.0.1 +pyarrow==15.0.2 # via # db-dtypes # feast (setup.py) @@ -758,7 +755,7 @@ pyzmq==25.1.2 # jupyter-server redis==4.6.0 # via feast (setup.py) -referencing==0.33.0 +referencing==0.34.0 # via # jsonschema # jsonschema-specifications @@ -810,7 +807,7 @@ ruamel-yaml==0.17.17 # via great-expectations ruamel-yaml-clib==0.2.8 # via ruamel-yaml -s3transfer==0.10.0 +s3transfer==0.10.1 # via boto3 scipy==1.12.0 # via great-expectations @@ -856,7 +853,9 @@ sphinxcontrib-qthelp==1.0.7 sphinxcontrib-serializinghtml==1.1.10 # via sphinx sqlalchemy[mypy]==1.4.52 - # via feast (setup.py) + # via + # feast (setup.py) + # sqlalchemy sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy sqlglot==20.11.0 @@ -865,7 +864,7 @@ stack-data==0.6.3 # via ipython starlette==0.36.3 # via fastapi -substrait==0.14.0 +substrait==0.14.1 # via ibis-substrait tabulate==0.9.0 # via feast (setup.py) @@ -940,7 +939,7 @@ types-pymysql==1.1.0.1 # via feast (setup.py) types-pyopenssl==24.0.0.20240311 # via types-redis -types-python-dateutil==2.8.19.20240311 +types-python-dateutil==2.9.0.20240316 # via # arrow # feast (setup.py) @@ -952,7 +951,7 @@ types-redis==4.6.0.20240311 # via feast (setup.py) types-requests==2.30.0.0 # via feast (setup.py) -types-setuptools==69.1.0.20240310 +types-setuptools==69.2.0.20240317 # via feast (setup.py) types-tabulate==0.9.0.20240106 # via feast (setup.py) @@ -1035,7 +1034,7 @@ wrapt==1.16.0 # via testcontainers xmltodict==0.13.0 # via moto -zipp==3.17.0 +zipp==3.18.1 # via # importlib-metadata # importlib-resources diff --git a/sdk/python/requirements/py3.9-requirements.txt b/sdk/python/requirements/py3.9-requirements.txt index ac0954c167..f2228ade02 100644 --- a/sdk/python/requirements/py3.9-requirements.txt +++ b/sdk/python/requirements/py3.9-requirements.txt @@ -8,7 +8,6 @@ annotated-types==0.6.0 # via pydantic anyio==4.3.0 # via - # httpx # starlette # watchfiles appdirs==1.4.4 @@ -21,10 +20,7 @@ attrs==23.2.0 bowler==0.9.0 # via feast (setup.py) certifi==2024.2.2 - # via - # httpcore - # httpx - # requests + # via requests charset-normalizer==3.3.2 # via requests click==8.1.7 @@ -48,33 +44,26 @@ fastapi==0.110.0 # via feast (setup.py) fissix==21.11.13 # via bowler -fsspec==2024.2.0 +fsspec==2024.3.1 # via dask greenlet==3.0.3 # via sqlalchemy gunicorn==21.2.0 # via feast (setup.py) h11==0.14.0 - # via - # httpcore - # uvicorn -httpcore==1.0.4 - # via httpx + # via uvicorn httptools==0.6.1 # via uvicorn -httpx==0.27.0 - # via feast (setup.py) idna==3.6 # via # anyio - # httpx # requests importlib-metadata==6.11.0 # via # dask # feast (setup.py) # typeguard -importlib-resources==6.1.3 +importlib-resources==6.3.1 # via feast (setup.py) jinja2==3.1.3 # via feast (setup.py) @@ -109,14 +98,11 @@ pandas==2.2.1 # via feast (setup.py) partd==1.4.1 # via dask -proto-plus==1.23.0 - # via feast (setup.py) protobuf==4.25.3 # via # feast (setup.py) # mypy-protobuf - # proto-plus -pyarrow==15.0.1 +pyarrow==15.0.2 # via feast (setup.py) pydantic==2.6.4 # via @@ -137,7 +123,7 @@ pyyaml==6.0.1 # dask # feast (setup.py) # uvicorn -referencing==0.33.0 +referencing==0.34.0 # via # jsonschema # jsonschema-specifications @@ -150,11 +136,11 @@ rpds-py==0.18.0 six==1.16.0 # via python-dateutil sniffio==1.3.1 - # via - # anyio - # httpx + # via anyio sqlalchemy[mypy]==1.4.52 - # via feast (setup.py) + # via + # feast (setup.py) + # sqlalchemy sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy starlette==0.36.3 @@ -202,7 +188,7 @@ watchfiles==0.21.0 # via uvicorn websockets==12.0 # via uvicorn -zipp==3.17.0 +zipp==3.18.1 # via # importlib-metadata # importlib-resources diff --git a/setup.py b/setup.py index 92d7356411..b32d03ed77 100644 --- a/setup.py +++ b/setup.py @@ -52,7 +52,6 @@ "pandas>=1.4.3,<3", # Higher than 4.23.4 seems to cause a seg fault "protobuf>=4.24.0,<5.0.0", - "proto-plus>=1.20.0,<2", "pyarrow>=4", "pydantic>=2.0.0", "pygments>=2.12.0,<3", @@ -70,8 +69,6 @@ # https://github.com/dask/dask/issues/10996 "dask>=2021.1.0,<2024.3.0", "bowler", # Needed for automatic repo upgrades - # FastAPI does not correctly pull starlette dependency on httpx see thread(https://github.com/tiangolo/fastapi/issues/5656). - "httpx>=0.23.3", "importlib-resources>=6.0.0,<7", "importlib_metadata>=6.8.0,<7", ] @@ -163,11 +160,12 @@ "black>=22.6.0,<23", "isort>=5,<6", "grpcio-testing>=1.56.2,<2", + # FastAPI does not correctly pull starlette dependency on httpx see thread(https://github.com/tiangolo/fastapi/issues/5656). + "httpx>=0.23.3", "minio==7.1.0", "mock==2.0.0", "moto<5", "mypy>=1.4.1", - "avro==1.10.0", "urllib3>=1.25.4,<3", "psutil==5.9.0", "py>=1.11.0", # https://github.com/pytest-dev/pytest/issues/10420 @@ -215,14 +213,8 @@ + GRPCIO_REQUIRED ) - -# rtd builds fail because of mysql not being installed in their environment. -# We can add mysql there, but it's not strictly needed. This will be faster for builds. -DOCS_REQUIRED = CI_REQUIRED.copy() -for _r in MYSQL_REQUIRED: - DOCS_REQUIRED.remove(_r) - -DEV_REQUIRED = ["grpcio-testing~=1.0"] + CI_REQUIRED +DOCS_REQUIRED = CI_REQUIRED +DEV_REQUIRED = CI_REQUIRED # Get git repo root directory repo_root = str(pathlib.Path(__file__).resolve().parent) From 6d9156b3d6372d654048ea2bfb7eec3f3908d038 Mon Sep 17 00:00:00 2001 From: brijesh-vora-sp <137945907+brijesh-vora-sp@users.noreply.github.com> Date: Wed, 20 Mar 2024 14:30:24 -0500 Subject: [PATCH 080/122] fix: Hashlib md5 errors in FIPS for python 3.9+ (#4019) * SAASMLOPS-1208 fix hashlib md5 errors in FIPS for python 3.9+ Signed-off-by: Brijesh Vora * SAASMLOPS-1208 drop the version check Signed-off-by: Brijesh Vora --------- Signed-off-by: Brijesh Vora --- sdk/python/feast/usage.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sdk/python/feast/usage.py b/sdk/python/feast/usage.py index 18bb497182..c6919eed4c 100644 --- a/sdk/python/feast/usage.py +++ b/sdk/python/feast/usage.py @@ -50,7 +50,8 @@ "env_signature": hashlib.md5( ",".join( sorted([k for k in os.environ.keys() if not k.startswith("FEAST")]) - ).encode() + ).encode(), + usedforsecurity=False, ).hexdigest(), } From d8d75676cbaf565b6a6a097f33c49f56b852dcd7 Mon Sep 17 00:00:00 2001 From: locnt241 <73770977+ElliotNguyen68@users.noreply.github.com> Date: Thu, 21 Mar 2024 12:48:27 +0700 Subject: [PATCH 081/122] feat: Enable Arrow-based columnar data transfers (#3996) feat: Enable Arrow-based columnar data transfers Signed-off-by: tanlocnguyen Co-authored-by: tanlocnguyen --- docs/reference/offline-stores/spark.md | 2 ++ .../feast/templates/spark/feature_repo/feature_store.yaml | 2 ++ 2 files changed, 4 insertions(+) diff --git a/docs/reference/offline-stores/spark.md b/docs/reference/offline-stores/spark.md index 3cca2aab1a..2e2facba64 100644 --- a/docs/reference/offline-stores/spark.md +++ b/docs/reference/offline-stores/spark.md @@ -30,6 +30,8 @@ offline_store: spark.sql.catalogImplementation: "hive" spark.sql.parser.quotedRegexColumnNames: "true" spark.sql.session.timeZone: "UTC" + spark.sql.execution.arrow.fallback.enabled: "true" + spark.sql.execution.arrow.pyspark.enabled: "true" online_store: path: data/online_store.db ``` diff --git a/sdk/python/feast/templates/spark/feature_repo/feature_store.yaml b/sdk/python/feast/templates/spark/feature_repo/feature_store.yaml index f72c7c65f4..08383a29e1 100644 --- a/sdk/python/feast/templates/spark/feature_repo/feature_store.yaml +++ b/sdk/python/feast/templates/spark/feature_repo/feature_store.yaml @@ -12,6 +12,8 @@ offline_store: spark.sql.catalogImplementation: "hive" spark.sql.parser.quotedRegexColumnNames: "true" spark.sql.session.timeZone: "UTC" + spark.sql.execution.arrow.fallback.enabled: "true" + spark.sql.execution.arrow.pyspark.enabled: "true" online_store: path: data/online_store.db entity_key_serialization_version: 2 From dc4671ed7e28b4157112a81ee0a70925d02db8e8 Mon Sep 17 00:00:00 2001 From: Job Almekinders <55230856+job-almekinders@users.noreply.github.com> Date: Thu, 21 Mar 2024 15:31:20 +0100 Subject: [PATCH 082/122] fix: Remove parentheses in pull_latest_from_table_or_query (#4026) Remove parentheses in pull_latest_from_table_or_query Signed-off-by: Job Almekinders --- .../offline_stores/contrib/postgres_offline_store/postgres.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py index 9b300d7bf4..17a8e20f78 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py +++ b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py @@ -94,7 +94,7 @@ def pull_latest_from_table_or_query( FROM ( SELECT {a_field_string}, ROW_NUMBER() OVER({partition_by_join_key_string} ORDER BY {timestamp_desc_string}) AS _feast_row - FROM ({from_expression}) a + FROM {from_expression} a WHERE a."{timestamp_field}" BETWEEN '{start_date}'::timestamptz AND '{end_date}'::timestamptz ) b WHERE _feast_row = 1 From e815562aad76490bab374623f2b9a921f448b9fc Mon Sep 17 00:00:00 2001 From: Tornike Gurgenidze Date: Thu, 21 Mar 2024 19:51:41 +0400 Subject: [PATCH 083/122] chore: Deprecate PostgreSQLRegistryStore (#4025) --- .../registry/contrib/postgres/postgres_registry_store.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/sdk/python/feast/infra/registry/contrib/postgres/postgres_registry_store.py b/sdk/python/feast/infra/registry/contrib/postgres/postgres_registry_store.py index 362ec9f485..877e0a018a 100644 --- a/sdk/python/feast/infra/registry/contrib/postgres/postgres_registry_store.py +++ b/sdk/python/feast/infra/registry/contrib/postgres/postgres_registry_store.py @@ -1,3 +1,4 @@ +import warnings from typing import Optional import psycopg2 @@ -37,6 +38,11 @@ def __init__(self, config: PostgresRegistryConfig, registry_path: str): sslcert_path=getattr(config, "sslcert_path", None), sslrootcert_path=getattr(config, "sslrootcert_path", None), ) + warnings.warn( + "PostgreSQLRegistryStore is deprecated and will be removed in the future releases. Please use SqlRegistry instead.", + DeprecationWarning, + ) + self.table_name = config.path self.cache_ttl_seconds = config.cache_ttl_seconds From c58ef74c18554d823f7957bf602184c744bb7ed7 Mon Sep 17 00:00:00 2001 From: Francisco Javier Arceo Date: Sun, 24 Mar 2024 13:24:23 -0400 Subject: [PATCH 084/122] feat: Updating protos to separate transformation (#4018) * feat: updating protos to separate transformation Signed-off-by: Francisco Javier Arceo * fixed stuff...i think Signed-off-by: Francisco Javier Arceo * updated tests and registry diff function Signed-off-by: Francisco Javier Arceo * updated base registry Signed-off-by: Francisco Javier Arceo * updated react component Signed-off-by: Francisco Javier Arceo * formatted Signed-off-by: Francisco Javier Arceo * updated stream feature view proto Signed-off-by: Francisco Javier Arceo * making the proto changes backwards compatable Signed-off-by: Francisco Javier Arceo * trying to make this backwards compatible Signed-off-by: Francisco Javier Arceo * caught a bug and fixed the linter * actually linted Signed-off-by: Francisco Javier Arceo * updated ui component Signed-off-by: Francisco Javier Arceo * accidentally commented out fixtures * Updated Signed-off-by: Francisco Javier Arceo * incrementing protos * updated tests Signed-off-by: Francisco Javier Arceo * fixed linting issue and made backwards compatible Signed-off-by: Francisco Javier Arceo * added more tests Signed-off-by: Francisco Javier Arceo --------- Signed-off-by: Francisco Javier Arceo --- protos/feast/core/OnDemandFeatureView.proto | 14 +- protos/feast/core/StreamFeatureView.proto | 7 +- protos/feast/core/Transformation.proto | 33 +++ sdk/python/feast/diff/registry_diff.py | 21 +- .../feast/infra/registry/base_registry.py | 14 +- sdk/python/feast/on_demand_feature_view.py | 54 +++- .../feast/on_demand_pandas_transformation.py | 4 +- .../on_demand_substrait_transformation.py | 4 +- sdk/python/feast/stream_feature_view.py | 33 ++- .../tests/unit/diff/test_registry_diff.py | 5 +- sdk/python/tests/unit/test_feature_views.py | 205 +------------- .../tests/unit/test_on_demand_feature_view.py | 66 +++++ .../tests/unit/test_stream_feature_view.py | 252 ++++++++++++++++++ .../OnDemandFeatureViewOverviewTab.tsx | 2 +- 14 files changed, 482 insertions(+), 232 deletions(-) create mode 100644 protos/feast/core/Transformation.proto create mode 100644 sdk/python/tests/unit/test_stream_feature_view.py diff --git a/protos/feast/core/OnDemandFeatureView.proto b/protos/feast/core/OnDemandFeatureView.proto index c43b33c1d2..cd3ceba150 100644 --- a/protos/feast/core/OnDemandFeatureView.proto +++ b/protos/feast/core/OnDemandFeatureView.proto @@ -27,6 +27,7 @@ import "feast/core/FeatureView.proto"; import "feast/core/FeatureViewProjection.proto"; import "feast/core/Feature.proto"; import "feast/core/DataSource.proto"; +import "feast/core/Transformation.proto"; message OnDemandFeatureView { // User-specified specifications of this feature view. @@ -49,9 +50,11 @@ message OnDemandFeatureViewSpec { map sources = 4; oneof transformation { - UserDefinedFunction user_defined_function = 5; - OnDemandSubstraitTransformation on_demand_substrait_transformation = 9; + UserDefinedFunction user_defined_function = 5 [deprecated = true]; + OnDemandSubstraitTransformation on_demand_substrait_transformation = 9 [deprecated = true]; } + // Oneof with {user_defined_function, on_demand_substrait_transformation} + FeatureTransformationV2 feature_transformation = 10; // Description of the on demand feature view. string description = 6; @@ -61,6 +64,7 @@ message OnDemandFeatureViewSpec { // Owner of the on demand feature view. string owner = 8; + string mode = 11; } message OnDemandFeatureViewMeta { @@ -81,6 +85,8 @@ message OnDemandSource { // Serialized representation of python function. message UserDefinedFunction { + option deprecated = true; + // The function name string name = 1; @@ -92,5 +98,7 @@ message UserDefinedFunction { } message OnDemandSubstraitTransformation { + option deprecated = true; + bytes substrait_plan = 1; -} \ No newline at end of file +} diff --git a/protos/feast/core/StreamFeatureView.proto b/protos/feast/core/StreamFeatureView.proto index 3181bdf360..cb7da0faf3 100644 --- a/protos/feast/core/StreamFeatureView.proto +++ b/protos/feast/core/StreamFeatureView.proto @@ -29,6 +29,7 @@ import "feast/core/FeatureView.proto"; import "feast/core/Feature.proto"; import "feast/core/DataSource.proto"; import "feast/core/Aggregation.proto"; +import "feast/core/Transformation.proto"; message StreamFeatureView { // User-specified specifications of this feature view. @@ -77,7 +78,8 @@ message StreamFeatureViewSpec { bool online = 12; // Serialized function that is encoded in the streamfeatureview - UserDefinedFunction user_defined_function = 13; + UserDefinedFunction user_defined_function = 13 [deprecated = true]; + // Mode of execution string mode = 14; @@ -87,5 +89,8 @@ message StreamFeatureViewSpec { // Timestamp field for aggregation string timestamp_field = 16; + + // Oneof with {user_defined_function, on_demand_substrait_transformation} + FeatureTransformationV2 feature_transformation = 17; } diff --git a/protos/feast/core/Transformation.proto b/protos/feast/core/Transformation.proto new file mode 100644 index 0000000000..cde2833fa4 --- /dev/null +++ b/protos/feast/core/Transformation.proto @@ -0,0 +1,33 @@ +syntax = "proto3"; +package feast.core; + +option go_package = "github.com/feast-dev/feast/go/protos/feast/core"; +option java_outer_classname = "FeatureTransformationProto"; +option java_package = "feast.proto.core"; + +import "google/protobuf/duration.proto"; + +// Serialized representation of python function. +message UserDefinedFunctionV2 { + // The function name + string name = 1; + + // The python-syntax function body (serialized by dill) + bytes body = 2; + + // The string representation of the udf + string body_text = 3; +} + +// A feature transformation executed as a user-defined function +message FeatureTransformationV2 { + // Note this Transformation starts at 5 for backwards compatibility + oneof transformation { + UserDefinedFunctionV2 user_defined_function = 1; + OnDemandSubstraitTransformationV2 on_demand_substrait_transformation = 2; + } +} + +message OnDemandSubstraitTransformationV2 { + bytes substrait_plan = 1; +} diff --git a/sdk/python/feast/diff/registry_diff.py b/sdk/python/feast/diff/registry_diff.py index 15f880e392..120f5d697a 100644 --- a/sdk/python/feast/diff/registry_diff.py +++ b/sdk/python/feast/diff/registry_diff.py @@ -1,3 +1,4 @@ +import warnings from dataclasses import dataclass from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, TypeVar, cast @@ -144,11 +145,25 @@ def diff_registry_objects( if _field.name in FIELDS_TO_IGNORE: continue elif getattr(current_spec, _field.name) != getattr(new_spec, _field.name): - if _field.name == "user_defined_function": + # TODO: Delete "transformation" after we've safely deprecated it from the proto + if _field.name in ["transformation", "feature_transformation"]: + warnings.warn( + "transformation will be deprecated in the future please use feature_transformation instead.", + DeprecationWarning, + ) current_spec = cast(OnDemandFeatureViewSpec, current_spec) new_spec = cast(OnDemandFeatureViewSpec, new_spec) - current_udf = current_spec.user_defined_function - new_udf = new_spec.user_defined_function + # Check if the old proto is populated and use that if it is + deprecated_udf = current_spec.user_defined_function + feature_transformation_udf = ( + current_spec.feature_transformation.user_defined_function + ) + current_udf = ( + deprecated_udf + if deprecated_udf.body_text != "" + else feature_transformation_udf + ) + new_udf = new_spec.feature_transformation.user_defined_function for _udf_field in current_udf.DESCRIPTOR.fields: if _udf_field.name == "body": continue diff --git a/sdk/python/feast/infra/registry/base_registry.py b/sdk/python/feast/infra/registry/base_registry.py index 9ee3bbbabc..d3d82a80b0 100644 --- a/sdk/python/feast/infra/registry/base_registry.py +++ b/sdk/python/feast/infra/registry/base_registry.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import json +import warnings from abc import ABC, abstractmethod from collections import defaultdict from datetime import datetime @@ -662,10 +663,16 @@ def to_dict(self, project: str) -> Dict[str, List[Any]]: key=lambda on_demand_feature_view: on_demand_feature_view.name, ): odfv_dict = self._message_to_sorted_dict(on_demand_feature_view.to_proto()) - - odfv_dict["spec"]["userDefinedFunction"][ + # We are logging a warning because the registry object may be read from a proto that is not updated + # i.e., we have to submit dual writes but in order to ensure the read behavior succeeds we have to load + # both objects to compare any changes in the registry + warnings.warn( + "We will be deprecating the usage of spec.userDefinedFunction in a future release please upgrade cautiously.", + DeprecationWarning, + ) + odfv_dict["spec"]["featureTransformation"]["userDefinedFunction"][ "body" - ] = on_demand_feature_view.transformation.udf_string + ] = on_demand_feature_view.feature_transformation.udf_string registry_dict["onDemandFeatureViews"].append(odfv_dict) for request_feature_view in sorted( self.list_request_feature_views(project=project), @@ -684,6 +691,7 @@ def to_dict(self, project: str) -> Dict[str, List[Any]]: "body" ] = stream_feature_view.udf_string registry_dict["streamFeatureViews"].append(sfv_dict) + for saved_dataset in sorted( self.list_saved_datasets(project=project), key=lambda item: item.name ): diff --git a/sdk/python/feast/on_demand_feature_view.py b/sdk/python/feast/on_demand_feature_view.py index 586286a3d4..61e55bb0c0 100644 --- a/sdk/python/feast/on_demand_feature_view.py +++ b/sdk/python/feast/on_demand_feature_view.py @@ -27,6 +27,12 @@ OnDemandFeatureViewSpec, OnDemandSource, ) +from feast.protos.feast.core.Transformation_pb2 import ( + FeatureTransformationV2 as FeatureTransformationProto, +) +from feast.protos.feast.core.Transformation_pb2 import ( + UserDefinedFunctionV2 as UserDefinedFunctionProto, +) from feast.type_map import ( feast_value_type_to_pandas_type, python_type_to_feast_value_type, @@ -63,6 +69,7 @@ class OnDemandFeatureView(BaseFeatureView): source_feature_view_projections: Dict[str, FeatureViewProjection] source_request_sources: Dict[str, RequestSource] transformation: Union[OnDemandPandasTransformation] + feature_transformation: Union[OnDemandPandasTransformation] description: str tags: Dict[str, str] owner: str @@ -83,6 +90,7 @@ def __init__( # noqa: C901 udf: Optional[FunctionType] = None, udf_string: str = "", transformation: Optional[Union[OnDemandPandasTransformation]] = None, + feature_transformation: Optional[Union[OnDemandPandasTransformation]] = None, description: str = "", tags: Optional[Dict[str, str]] = None, owner: str = "", @@ -101,6 +109,7 @@ def __init__( # noqa: C901 dataframes as inputs. udf_string (deprecated): The source code version of the udf (for diffing and displaying in Web UI) transformation: The user defined transformation. + feature_transformation: The user defined transformation. description (optional): A human-readable description. tags (optional): A dictionary of key-value pairs to store arbitrary metadata. owner (optional): The owner of the on demand feature view, typically the email @@ -139,6 +148,7 @@ def __init__( # noqa: C901 ] = odfv_source.projection self.transformation = transformation + self.feature_transformation = self.transformation @property def proto_class(self) -> Type[OnDemandFeatureViewProto]: @@ -151,6 +161,7 @@ def __copy__(self): sources=list(self.source_feature_view_projections.values()) + list(self.source_request_sources.values()), transformation=self.transformation, + feature_transformation=self.transformation, description=self.description, tags=self.tags, owner=self.owner, @@ -172,6 +183,7 @@ def __eq__(self, other): != other.source_feature_view_projections or self.source_request_sources != other.source_request_sources or self.transformation != other.transformation + or self.feature_transformation != other.feature_transformation ): return False @@ -205,16 +217,19 @@ def to_proto(self) -> OnDemandFeatureViewProto: request_data_source=request_sources.to_proto() ) - spec = OnDemandFeatureViewSpec( - name=self.name, - features=[feature.to_proto() for feature in self.features], - sources=sources, + feature_transformation = FeatureTransformationProto( user_defined_function=self.transformation.to_proto() if type(self.transformation) == OnDemandPandasTransformation else None, - on_demand_substrait_transformation=self.transformation.to_proto() # type: ignore + on_demand_substrait_transformation=self.transformation.to_proto() if type(self.transformation) == OnDemandSubstraitTransformation - else None, + else None, # type: ignore + ) + spec = OnDemandFeatureViewSpec( + name=self.name, + features=[feature.to_proto() for feature in self.features], + sources=sources, + feature_transformation=feature_transformation, description=self.description, tags=self.tags, owner=self.owner, @@ -254,18 +269,37 @@ def from_proto(cls, on_demand_feature_view_proto: OnDemandFeatureViewProto): ) if ( - on_demand_feature_view_proto.spec.WhichOneof("transformation") + on_demand_feature_view_proto.spec.feature_transformation.WhichOneof( + "transformation" + ) == "user_defined_function" + and on_demand_feature_view_proto.spec.feature_transformation.user_defined_function.body_text + != "" ): transformation = OnDemandPandasTransformation.from_proto( - on_demand_feature_view_proto.spec.user_defined_function + on_demand_feature_view_proto.spec.feature_transformation.user_defined_function ) elif ( - on_demand_feature_view_proto.spec.WhichOneof("transformation") + on_demand_feature_view_proto.spec.feature_transformation.WhichOneof( + "transformation" + ) == "on_demand_substrait_transformation" ): transformation = OnDemandSubstraitTransformation.from_proto( - on_demand_feature_view_proto.spec.on_demand_substrait_transformation + on_demand_feature_view_proto.spec.feature_transformation.on_demand_substrait_transformation + ) + elif ( + hasattr(on_demand_feature_view_proto.spec, "user_defined_function") + and on_demand_feature_view_proto.spec.feature_transformation.user_defined_function.body_text + == "" + ): + backwards_compatible_udf = UserDefinedFunctionProto( + name=on_demand_feature_view_proto.spec.user_defined_function.name, + body=on_demand_feature_view_proto.spec.user_defined_function.body, + body_text=on_demand_feature_view_proto.spec.user_defined_function.body_text, + ) + transformation = OnDemandPandasTransformation.from_proto( + user_defined_function_proto=backwards_compatible_udf, ) else: raise Exception("At least one transformation type needs to be provided") diff --git a/sdk/python/feast/on_demand_pandas_transformation.py b/sdk/python/feast/on_demand_pandas_transformation.py index 32cb44b429..48f5263051 100644 --- a/sdk/python/feast/on_demand_pandas_transformation.py +++ b/sdk/python/feast/on_demand_pandas_transformation.py @@ -3,8 +3,8 @@ import dill import pandas as pd -from feast.protos.feast.core.OnDemandFeatureView_pb2 import ( - UserDefinedFunction as UserDefinedFunctionProto, +from feast.protos.feast.core.Transformation_pb2 import ( + UserDefinedFunctionV2 as UserDefinedFunctionProto, ) diff --git a/sdk/python/feast/on_demand_substrait_transformation.py b/sdk/python/feast/on_demand_substrait_transformation.py index 4e92e77dc8..0666739125 100644 --- a/sdk/python/feast/on_demand_substrait_transformation.py +++ b/sdk/python/feast/on_demand_substrait_transformation.py @@ -2,8 +2,8 @@ import pyarrow import pyarrow.substrait as substrait # type: ignore # noqa -from feast.protos.feast.core.OnDemandFeatureView_pb2 import ( - OnDemandSubstraitTransformation as OnDemandSubstraitTransformationProto, +from feast.protos.feast.core.Transformation_pb2 import ( + OnDemandSubstraitTransformationV2 as OnDemandSubstraitTransformationProto, ) diff --git a/sdk/python/feast/stream_feature_view.py b/sdk/python/feast/stream_feature_view.py index 13abbc5e28..e8741a75fe 100644 --- a/sdk/python/feast/stream_feature_view.py +++ b/sdk/python/feast/stream_feature_view.py @@ -15,6 +15,7 @@ from feast.entity import Entity from feast.feature_view import FeatureView from feast.field import Field +from feast.on_demand_pandas_transformation import OnDemandPandasTransformation from feast.protos.feast.core.DataSource_pb2 import DataSource as DataSourceProto from feast.protos.feast.core.OnDemandFeatureView_pb2 import ( UserDefinedFunction as UserDefinedFunctionProto, @@ -25,6 +26,12 @@ from feast.protos.feast.core.StreamFeatureView_pb2 import ( StreamFeatureViewSpec as StreamFeatureViewSpecProto, ) +from feast.protos.feast.core.Transformation_pb2 import ( + FeatureTransformationV2 as FeatureTransformationProto, +) +from feast.protos.feast.core.Transformation_pb2 import ( + UserDefinedFunctionV2 as UserDefinedFunctionProtoV2, +) warnings.simplefilter("once", RuntimeWarning) @@ -73,6 +80,7 @@ class StreamFeatureView(FeatureView): materialization_intervals: List[Tuple[datetime, datetime]] udf: Optional[FunctionType] udf_string: Optional[str] + feature_transformation: Optional[OnDemandPandasTransformation] def __init__( self, @@ -91,6 +99,7 @@ def __init__( timestamp_field: Optional[str] = "", udf: Optional[FunctionType] = None, udf_string: Optional[str] = "", + feature_transformation: Optional[Union[OnDemandPandasTransformation]] = None, ): if not flags_helper.is_test(): warnings.warn( @@ -118,6 +127,7 @@ def __init__( self.timestamp_field = timestamp_field or "" self.udf = udf self.udf_string = udf_string + self.feature_transformation = feature_transformation super().__init__( name=name, @@ -171,19 +181,30 @@ def to_proto(self): stream_source_proto = self.stream_source.to_proto() stream_source_proto.data_source_class_type = f"{self.stream_source.__class__.__module__}.{self.stream_source.__class__.__name__}" - udf_proto = None + udf_proto, feature_transformation = None, None if self.udf: udf_proto = UserDefinedFunctionProto( name=self.udf.__name__, body=dill.dumps(self.udf, recurse=True), body_text=self.udf_string, ) + udf_proto_v2 = UserDefinedFunctionProtoV2( + name=self.udf.__name__, + body=dill.dumps(self.udf, recurse=True), + body_text=self.udf_string, + ) + + feature_transformation = FeatureTransformationProto( + user_defined_function=udf_proto_v2, + ) + spec = StreamFeatureViewSpecProto( name=self.name, entities=self.entities, entity_columns=[field.to_proto() for field in self.entity_columns], features=[field.to_proto() for field in self.schema], user_defined_function=udf_proto, + feature_transformation=feature_transformation, description=self.description, tags=self.tags, owner=self.owner, @@ -220,6 +241,11 @@ def from_proto(cls, sfv_proto): if sfv_proto.spec.HasField("user_defined_function") else None ) + feature_transformation = ( + sfv_proto.spec.feature_transformation.user_defined_function.body_text + if sfv_proto.spec.HasField("feature_transformation") + else None + ) stream_feature_view = cls( name=sfv_proto.spec.name, description=sfv_proto.spec.description, @@ -238,6 +264,7 @@ def from_proto(cls, sfv_proto): mode=sfv_proto.spec.mode, udf=udf, udf_string=udf_string, + feature_transformation=feature_transformation, aggregations=[ Aggregation.from_proto(agg_proto) for agg_proto in sfv_proto.spec.aggregations @@ -294,6 +321,7 @@ def __copy__(self): timestamp_field=self.timestamp_field, source=self.stream_source if self.stream_source else self.batch_source, udf=self.udf, + feature_transformation=self.feature_transformation, ) fv.entities = self.entities fv.features = copy.copy(self.features) @@ -343,6 +371,9 @@ def decorator(user_function): schema=schema, udf=user_function, udf_string=udf_string, + feature_transformation=OnDemandPandasTransformation( + user_function, udf_string + ), description=description, tags=tags, online=online, diff --git a/sdk/python/tests/unit/diff/test_registry_diff.py b/sdk/python/tests/unit/diff/test_registry_diff.py index ce40295f8b..c209f1e0e0 100644 --- a/sdk/python/tests/unit/diff/test_registry_diff.py +++ b/sdk/python/tests/unit/diff/test_registry_diff.py @@ -137,13 +137,14 @@ def post_changed(inputs: pd.DataFrame) -> pd.DataFrame: # if no code is changed assert len(feast_object_diffs.feast_object_property_diffs) == 3 assert feast_object_diffs.feast_object_property_diffs[0].property_name == "name" + # Note we should only now be looking at changes for the feature_transformation field assert ( feast_object_diffs.feast_object_property_diffs[1].property_name - == "user_defined_function.name" + == "feature_transformation.name" ) assert ( feast_object_diffs.feast_object_property_diffs[2].property_name - == "user_defined_function.body_text" + == "feature_transformation.body_text" ) diff --git a/sdk/python/tests/unit/test_feature_views.py b/sdk/python/tests/unit/test_feature_views.py index 2ad9680703..0220d1a8a9 100644 --- a/sdk/python/tests/unit/test_feature_views.py +++ b/sdk/python/tests/unit/test_feature_views.py @@ -1,22 +1,16 @@ -import copy from datetime import timedelta import pytest from typeguard import TypeCheckError -from feast.aggregation import Aggregation from feast.batch_feature_view import BatchFeatureView from feast.data_format import AvroFormat -from feast.data_source import KafkaSource, PushSource +from feast.data_source import KafkaSource from feast.entity import Entity from feast.feature_view import FeatureView from feast.field import Field from feast.infra.offline_stores.file_source import FileSource -from feast.protos.feast.core.StreamFeatureView_pb2 import ( - StreamFeatureView as StreamFeatureViewProto, -) from feast.protos.feast.types.Value_pb2 import ValueType -from feast.stream_feature_view import StreamFeatureView, stream_feature_view from feast.types import Float32 @@ -65,169 +59,10 @@ def test_create_batch_feature_view(): ) -def test_create_stream_feature_view(): - stream_source = KafkaSource( - name="kafka", - timestamp_field="event_timestamp", - kafka_bootstrap_servers="", - message_format=AvroFormat(""), - topic="topic", - batch_source=FileSource(path="some path"), - ) - StreamFeatureView( - name="test kafka stream feature view", - entities=[], - ttl=timedelta(days=30), - source=stream_source, - aggregations=[], - ) - - push_source = PushSource( - name="push source", batch_source=FileSource(path="some path") - ) - StreamFeatureView( - name="test push source feature view", - entities=[], - ttl=timedelta(days=30), - source=push_source, - aggregations=[], - ) - - with pytest.raises(TypeError): - StreamFeatureView( - name="test batch feature view", - entities=[], - ttl=timedelta(days=30), - aggregations=[], - ) - - with pytest.raises(ValueError): - StreamFeatureView( - name="test batch feature view", - entities=[], - ttl=timedelta(days=30), - source=FileSource(path="some path"), - aggregations=[], - ) - - def simple_udf(x: int): return x + 3 -def test_stream_feature_view_serialization(): - entity = Entity(name="driver_entity", join_keys=["test_key"]) - stream_source = KafkaSource( - name="kafka", - timestamp_field="event_timestamp", - kafka_bootstrap_servers="", - message_format=AvroFormat(""), - topic="topic", - batch_source=FileSource(path="some path"), - ) - - sfv = StreamFeatureView( - name="test kafka stream feature view", - entities=[entity], - ttl=timedelta(days=30), - owner="test@example.com", - online=True, - schema=[Field(name="dummy_field", dtype=Float32)], - description="desc", - aggregations=[ - Aggregation( - column="dummy_field", - function="max", - time_window=timedelta(days=1), - ) - ], - timestamp_field="event_timestamp", - mode="spark", - source=stream_source, - udf=simple_udf, - tags={}, - ) - - sfv_proto = sfv.to_proto() - - new_sfv = StreamFeatureView.from_proto(sfv_proto=sfv_proto) - assert new_sfv == sfv - - -def test_stream_feature_view_udfs(): - entity = Entity(name="driver_entity", join_keys=["test_key"]) - stream_source = KafkaSource( - name="kafka", - timestamp_field="event_timestamp", - kafka_bootstrap_servers="", - message_format=AvroFormat(""), - topic="topic", - batch_source=FileSource(path="some path"), - ) - - @stream_feature_view( - entities=[entity], - ttl=timedelta(days=30), - owner="test@example.com", - online=True, - schema=[Field(name="dummy_field", dtype=Float32)], - description="desc", - aggregations=[ - Aggregation( - column="dummy_field", - function="max", - time_window=timedelta(days=1), - ) - ], - timestamp_field="event_timestamp", - source=stream_source, - ) - def pandas_udf(pandas_df): - import pandas as pd - - assert type(pandas_df) == pd.DataFrame - df = pandas_df.transform(lambda x: x + 10, axis=1) - return df - - import pandas as pd - - df = pd.DataFrame({"A": [1, 2, 3], "B": [10, 20, 30]}) - sfv = pandas_udf - sfv_proto = sfv.to_proto() - new_sfv = StreamFeatureView.from_proto(sfv_proto) - new_df = new_sfv.udf(df) - - expected_df = pd.DataFrame({"A": [11, 12, 13], "B": [20, 30, 40]}) - - assert new_df.equals(expected_df) - - -def test_stream_feature_view_initialization_with_optional_fields_omitted(): - entity = Entity(name="driver_entity", join_keys=["test_key"]) - stream_source = KafkaSource( - name="kafka", - timestamp_field="event_timestamp", - kafka_bootstrap_servers="", - message_format=AvroFormat(""), - topic="topic", - batch_source=FileSource(path="some path"), - ) - - sfv = StreamFeatureView( - name="test kafka stream feature view", - entities=[entity], - schema=[], - description="desc", - timestamp_field="event_timestamp", - source=stream_source, - tags={}, - ) - sfv_proto = sfv.to_proto() - - new_sfv = StreamFeatureView.from_proto(sfv_proto=sfv_proto) - assert new_sfv == sfv - - def test_hash(): file_source = FileSource(name="my-file-source", path="test.parquet") feature_view_1 = FeatureView( @@ -282,41 +117,3 @@ def test_hash(): def test_field_types(): with pytest.raises(TypeCheckError): Field(name="name", dtype=ValueType.INT32) - - -def test_stream_feature_view_proto_type(): - stream_source = KafkaSource( - name="kafka", - timestamp_field="event_timestamp", - kafka_bootstrap_servers="", - message_format=AvroFormat(""), - topic="topic", - batch_source=FileSource(path="some path"), - ) - sfv = StreamFeatureView( - name="test stream featureview proto class", - entities=[], - ttl=timedelta(days=30), - source=stream_source, - aggregations=[], - ) - assert sfv.proto_class is StreamFeatureViewProto - - -def test_stream_feature_view_copy(): - stream_source = KafkaSource( - name="kafka", - timestamp_field="event_timestamp", - kafka_bootstrap_servers="", - message_format=AvroFormat(""), - topic="topic", - batch_source=FileSource(path="some path"), - ) - sfv = StreamFeatureView( - name="test stream featureview proto class", - entities=[], - ttl=timedelta(days=30), - source=stream_source, - aggregations=[], - ) - assert sfv == copy.copy(sfv) diff --git a/sdk/python/tests/unit/test_on_demand_feature_view.py b/sdk/python/tests/unit/test_on_demand_feature_view.py index 66d02c65d1..b83449519f 100644 --- a/sdk/python/tests/unit/test_on_demand_feature_view.py +++ b/sdk/python/tests/unit/test_on_demand_feature_view.py @@ -129,3 +129,69 @@ def test_hash(): assert on_demand_feature_view_5.transformation == OnDemandPandasTransformation( udf2, "udf2 source code" ) + assert ( + on_demand_feature_view_5.feature_transformation + == on_demand_feature_view_5.transformation + ) + + +@pytest.mark.filterwarnings("ignore:udf and udf_string parameters are deprecated") +def test_from_proto_backwards_compatable_udf(): + file_source = FileSource(name="my-file-source", path="test.parquet") + feature_view = FeatureView( + name="my-feature-view", + entities=[], + schema=[ + Field(name="feature1", dtype=Float32), + Field(name="feature2", dtype=Float32), + ], + source=file_source, + ) + sources = [feature_view] + on_demand_feature_view = OnDemandFeatureView( + name="my-on-demand-feature-view", + sources=sources, + schema=[ + Field(name="output1", dtype=Float32), + Field(name="output2", dtype=Float32), + ], + transformation=OnDemandPandasTransformation( + udf=udf1, udf_string="udf1 source code" + ), + ) + + # We need a proto with the "udf1 source code" in the user_defined_function.body_text + # and to populate it in feature_transformation + proto = on_demand_feature_view.to_proto() + assert ( + on_demand_feature_view.transformation.udf_string + == proto.spec.feature_transformation.user_defined_function.body_text + ) + # Because of the current set of code this is just confirming it is empty + assert proto.spec.user_defined_function.body_text == "" + assert proto.spec.user_defined_function.body == b"" + assert proto.spec.user_defined_function.name == "" + + # Assuming we pull it from the registry we set it to the feature_transformation proto values + proto.spec.user_defined_function.name = ( + proto.spec.feature_transformation.user_defined_function.name + ) + proto.spec.user_defined_function.body = ( + proto.spec.feature_transformation.user_defined_function.body + ) + proto.spec.user_defined_function.body_text = ( + proto.spec.feature_transformation.user_defined_function.body_text + ) + + # And now we're going to null the feature_transformation proto object before reserializing the entire proto + # proto.spec.user_defined_function.body_text = on_demand_feature_view.transformation.udf_string + proto.spec.feature_transformation.user_defined_function.name = "" + proto.spec.feature_transformation.user_defined_function.body = b"" + proto.spec.feature_transformation.user_defined_function.body_text = "" + + # And now we expect the to get the same object back under feature_transformation + reserialized_proto = OnDemandFeatureView.from_proto(proto) + assert ( + reserialized_proto.feature_transformation.udf_string + == on_demand_feature_view.feature_transformation.udf_string + ) diff --git a/sdk/python/tests/unit/test_stream_feature_view.py b/sdk/python/tests/unit/test_stream_feature_view.py new file mode 100644 index 0000000000..b53f9a593a --- /dev/null +++ b/sdk/python/tests/unit/test_stream_feature_view.py @@ -0,0 +1,252 @@ +import copy +from datetime import timedelta + +import pytest + +from feast.aggregation import Aggregation +from feast.batch_feature_view import BatchFeatureView +from feast.data_format import AvroFormat +from feast.data_source import KafkaSource, PushSource +from feast.entity import Entity +from feast.field import Field +from feast.infra.offline_stores.file_source import FileSource +from feast.protos.feast.core.StreamFeatureView_pb2 import ( + StreamFeatureView as StreamFeatureViewProto, +) +from feast.stream_feature_view import StreamFeatureView, stream_feature_view +from feast.types import Float32 + + +def test_create_batch_feature_view(): + batch_source = FileSource(path="some path") + BatchFeatureView( + name="test batch feature view", + entities=[], + ttl=timedelta(days=30), + source=batch_source, + ) + + with pytest.raises(TypeError): + BatchFeatureView( + name="test batch feature view", entities=[], ttl=timedelta(days=30) + ) + + stream_source = KafkaSource( + name="kafka", + timestamp_field="event_timestamp", + kafka_bootstrap_servers="", + message_format=AvroFormat(""), + topic="topic", + batch_source=FileSource(path="some path"), + ) + with pytest.raises(ValueError): + BatchFeatureView( + name="test batch feature view", + entities=[], + ttl=timedelta(days=30), + source=stream_source, + ) + + +def test_create_stream_feature_view(): + stream_source = KafkaSource( + name="kafka", + timestamp_field="event_timestamp", + kafka_bootstrap_servers="", + message_format=AvroFormat(""), + topic="topic", + batch_source=FileSource(path="some path"), + ) + StreamFeatureView( + name="test kafka stream feature view", + entities=[], + ttl=timedelta(days=30), + source=stream_source, + aggregations=[], + ) + + push_source = PushSource( + name="push source", batch_source=FileSource(path="some path") + ) + StreamFeatureView( + name="test push source feature view", + entities=[], + ttl=timedelta(days=30), + source=push_source, + aggregations=[], + ) + + with pytest.raises(TypeError): + StreamFeatureView( + name="test batch feature view", + entities=[], + ttl=timedelta(days=30), + aggregations=[], + ) + + with pytest.raises(ValueError): + StreamFeatureView( + name="test batch feature view", + entities=[], + ttl=timedelta(days=30), + source=FileSource(path="some path"), + aggregations=[], + ) + + +def simple_udf(x: int): + return x + 3 + + +def test_stream_feature_view_serialization(): + entity = Entity(name="driver_entity", join_keys=["test_key"]) + stream_source = KafkaSource( + name="kafka", + timestamp_field="event_timestamp", + kafka_bootstrap_servers="", + message_format=AvroFormat(""), + topic="topic", + batch_source=FileSource(path="some path"), + ) + + sfv = StreamFeatureView( + name="test kafka stream feature view", + entities=[entity], + ttl=timedelta(days=30), + owner="test@example.com", + online=True, + schema=[Field(name="dummy_field", dtype=Float32)], + description="desc", + aggregations=[ + Aggregation( + column="dummy_field", + function="max", + time_window=timedelta(days=1), + ) + ], + timestamp_field="event_timestamp", + mode="spark", + source=stream_source, + udf=simple_udf, + tags={}, + ) + + sfv_proto = sfv.to_proto() + + new_sfv = StreamFeatureView.from_proto(sfv_proto=sfv_proto) + assert new_sfv == sfv + assert ( + sfv_proto.spec.feature_transformation.user_defined_function.name == "simple_udf" + ) + + +def test_stream_feature_view_udfs(): + entity = Entity(name="driver_entity", join_keys=["test_key"]) + stream_source = KafkaSource( + name="kafka", + timestamp_field="event_timestamp", + kafka_bootstrap_servers="", + message_format=AvroFormat(""), + topic="topic", + batch_source=FileSource(path="some path"), + ) + + @stream_feature_view( + entities=[entity], + ttl=timedelta(days=30), + owner="test@example.com", + online=True, + schema=[Field(name="dummy_field", dtype=Float32)], + description="desc", + aggregations=[ + Aggregation( + column="dummy_field", + function="max", + time_window=timedelta(days=1), + ) + ], + timestamp_field="event_timestamp", + source=stream_source, + ) + def pandas_udf(pandas_df): + import pandas as pd + + assert type(pandas_df) == pd.DataFrame + df = pandas_df.transform(lambda x: x + 10, axis=1) + return df + + import pandas as pd + + df = pd.DataFrame({"A": [1, 2, 3], "B": [10, 20, 30]}) + sfv = pandas_udf + sfv_proto = sfv.to_proto() + new_sfv = StreamFeatureView.from_proto(sfv_proto) + new_df = new_sfv.udf(df) + + expected_df = pd.DataFrame({"A": [11, 12, 13], "B": [20, 30, 40]}) + + assert new_df.equals(expected_df) + + +def test_stream_feature_view_initialization_with_optional_fields_omitted(): + entity = Entity(name="driver_entity", join_keys=["test_key"]) + stream_source = KafkaSource( + name="kafka", + timestamp_field="event_timestamp", + kafka_bootstrap_servers="", + message_format=AvroFormat(""), + topic="topic", + batch_source=FileSource(path="some path"), + ) + + sfv = StreamFeatureView( + name="test kafka stream feature view", + entities=[entity], + schema=[], + description="desc", + timestamp_field="event_timestamp", + source=stream_source, + tags={}, + ) + sfv_proto = sfv.to_proto() + + new_sfv = StreamFeatureView.from_proto(sfv_proto=sfv_proto) + assert new_sfv == sfv + + +def test_stream_feature_view_proto_type(): + stream_source = KafkaSource( + name="kafka", + timestamp_field="event_timestamp", + kafka_bootstrap_servers="", + message_format=AvroFormat(""), + topic="topic", + batch_source=FileSource(path="some path"), + ) + sfv = StreamFeatureView( + name="test stream featureview proto class", + entities=[], + ttl=timedelta(days=30), + source=stream_source, + aggregations=[], + ) + assert sfv.proto_class is StreamFeatureViewProto + + +def test_stream_feature_view_copy(): + stream_source = KafkaSource( + name="kafka", + timestamp_field="event_timestamp", + kafka_bootstrap_servers="", + message_format=AvroFormat(""), + topic="topic", + batch_source=FileSource(path="some path"), + ) + sfv = StreamFeatureView( + name="test stream featureview proto class", + entities=[], + ttl=timedelta(days=30), + source=stream_source, + aggregations=[], + ) + assert sfv == copy.copy(sfv) diff --git a/ui/src/pages/feature-views/OnDemandFeatureViewOverviewTab.tsx b/ui/src/pages/feature-views/OnDemandFeatureViewOverviewTab.tsx index ee8e41bbf6..aac3f6ac5b 100644 --- a/ui/src/pages/feature-views/OnDemandFeatureViewOverviewTab.tsx +++ b/ui/src/pages/feature-views/OnDemandFeatureViewOverviewTab.tsx @@ -57,7 +57,7 @@ const OnDemandFeatureViewOverviewTab = ({ - {data?.spec?.userDefinedFunction?.bodyText} + {data?.spec?.featureTransformation?.userDefinedFunction?.bodyText} From 9b98eafccbf39b41186bfb3ebd36af20d57bd509 Mon Sep 17 00:00:00 2001 From: Francisco Javier Arceo Date: Mon, 25 Mar 2024 14:03:27 -0400 Subject: [PATCH 085/122] feat: Rename OnDemandTransformations to Transformations (#4038) * feat: updating protos to separate transformation Signed-off-by: Francisco Javier Arceo * fixed stuff...i think Signed-off-by: Francisco Javier Arceo * updated tests and registry diff function Signed-off-by: Francisco Javier Arceo * updated base registry Signed-off-by: Francisco Javier Arceo * updated react component Signed-off-by: Francisco Javier Arceo * formatted Signed-off-by: Francisco Javier Arceo * updated stream feature view proto Signed-off-by: Francisco Javier Arceo * making the proto changes backwards compatable Signed-off-by: Francisco Javier Arceo * trying to make this backwards compatible Signed-off-by: Francisco Javier Arceo * caught a bug and fixed the linter Signed-off-by: Francisco Javier Arceo * actually linted Signed-off-by: Francisco Javier Arceo * updated ui component Signed-off-by: Francisco Javier Arceo * accidentally commented out fixtures Signed-off-by: Francisco Javier Arceo * Updated Signed-off-by: Francisco Javier Arceo * incrementing protos Signed-off-by: Francisco Javier Arceo * updated tests Signed-off-by: Francisco Javier Arceo * fixed linting issue and made backwards compatible Signed-off-by: Francisco Javier Arceo * feat: Renaming OnDemandTransformations to Transformations Signed-off-by: Francisco Javier Arceo * updated proto name Signed-off-by: Francisco Javier Arceo * renamed substrait proto Signed-off-by: Francisco Javier Arceo * renamed substrait proto Signed-off-by: Francisco Javier Arceo * updated * updated Signed-off-by: Francisco Javier Arceo * updated integration test * missed one Signed-off-by: Francisco Javier Arceo * updated to include Substrait type * linter Signed-off-by: Francisco Javier Arceo --------- Signed-off-by: Francisco Javier Arceo --- protos/feast/core/OnDemandFeatureView.proto | 12 +--- protos/feast/core/Transformation.proto | 5 +- sdk/python/feast/diff/registry_diff.py | 18 +++--- .../feast/infra/registry/base_registry.py | 33 ++++++++-- sdk/python/feast/on_demand_feature_view.py | 62 +++++++++---------- sdk/python/feast/stream_feature_view.py | 10 ++- sdk/python/feast/transformation/__init__.py | 0 .../pandas_transformation.py} | 8 +-- .../substrait_transformation.py} | 20 +++--- .../feature_repos/universal/feature_views.py | 6 +- .../tests/unit/test_on_demand_feature_view.py | 25 +++----- 11 files changed, 102 insertions(+), 97 deletions(-) create mode 100644 sdk/python/feast/transformation/__init__.py rename sdk/python/feast/{on_demand_pandas_transformation.py => transformation/pandas_transformation.py} (85%) rename sdk/python/feast/{on_demand_substrait_transformation.py => transformation/substrait_transformation.py} (57%) diff --git a/protos/feast/core/OnDemandFeatureView.proto b/protos/feast/core/OnDemandFeatureView.proto index cd3ceba150..7a5fec1650 100644 --- a/protos/feast/core/OnDemandFeatureView.proto +++ b/protos/feast/core/OnDemandFeatureView.proto @@ -49,10 +49,8 @@ message OnDemandFeatureViewSpec { // Map of sources for this feature view. map sources = 4; - oneof transformation { - UserDefinedFunction user_defined_function = 5 [deprecated = true]; - OnDemandSubstraitTransformation on_demand_substrait_transformation = 9 [deprecated = true]; - } + UserDefinedFunction user_defined_function = 5 [deprecated = true]; + // Oneof with {user_defined_function, on_demand_substrait_transformation} FeatureTransformationV2 feature_transformation = 10; @@ -96,9 +94,3 @@ message UserDefinedFunction { // The string representation of the udf string body_text = 3; } - -message OnDemandSubstraitTransformation { - option deprecated = true; - - bytes substrait_plan = 1; -} diff --git a/protos/feast/core/Transformation.proto b/protos/feast/core/Transformation.proto index cde2833fa4..36f1e691fe 100644 --- a/protos/feast/core/Transformation.proto +++ b/protos/feast/core/Transformation.proto @@ -21,13 +21,12 @@ message UserDefinedFunctionV2 { // A feature transformation executed as a user-defined function message FeatureTransformationV2 { - // Note this Transformation starts at 5 for backwards compatibility oneof transformation { UserDefinedFunctionV2 user_defined_function = 1; - OnDemandSubstraitTransformationV2 on_demand_substrait_transformation = 2; + SubstraitTransformationV2 substrait_transformation = 2; } } -message OnDemandSubstraitTransformationV2 { +message SubstraitTransformationV2 { bytes substrait_plan = 1; } diff --git a/sdk/python/feast/diff/registry_diff.py b/sdk/python/feast/diff/registry_diff.py index 120f5d697a..41b2142226 100644 --- a/sdk/python/feast/diff/registry_diff.py +++ b/sdk/python/feast/diff/registry_diff.py @@ -1,4 +1,3 @@ -import warnings from dataclasses import dataclass from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, TypeVar, cast @@ -145,22 +144,23 @@ def diff_registry_objects( if _field.name in FIELDS_TO_IGNORE: continue elif getattr(current_spec, _field.name) != getattr(new_spec, _field.name): - # TODO: Delete "transformation" after we've safely deprecated it from the proto - if _field.name in ["transformation", "feature_transformation"]: - warnings.warn( - "transformation will be deprecated in the future please use feature_transformation instead.", - DeprecationWarning, - ) + if _field.name == "feature_transformation": current_spec = cast(OnDemandFeatureViewSpec, current_spec) new_spec = cast(OnDemandFeatureViewSpec, new_spec) # Check if the old proto is populated and use that if it is - deprecated_udf = current_spec.user_defined_function feature_transformation_udf = ( current_spec.feature_transformation.user_defined_function ) + if ( + current_spec.HasField("user_defined_function") + and not feature_transformation_udf + ): + deprecated_udf = current_spec.user_defined_function + else: + deprecated_udf = None current_udf = ( deprecated_udf - if deprecated_udf.body_text != "" + if deprecated_udf is not None else feature_transformation_udf ) new_udf = new_spec.feature_transformation.user_defined_function diff --git a/sdk/python/feast/infra/registry/base_registry.py b/sdk/python/feast/infra/registry/base_registry.py index d3d82a80b0..583206941e 100644 --- a/sdk/python/feast/infra/registry/base_registry.py +++ b/sdk/python/feast/infra/registry/base_registry.py @@ -33,6 +33,8 @@ from feast.request_feature_view import RequestFeatureView from feast.saved_dataset import SavedDataset, ValidationReference from feast.stream_feature_view import StreamFeatureView +from feast.transformation.pandas_transformation import PandasTransformation +from feast.transformation.substrait_transformation import SubstraitTransformation class BaseRegistry(ABC): @@ -670,10 +672,33 @@ def to_dict(self, project: str) -> Dict[str, List[Any]]: "We will be deprecating the usage of spec.userDefinedFunction in a future release please upgrade cautiously.", DeprecationWarning, ) - odfv_dict["spec"]["featureTransformation"]["userDefinedFunction"][ - "body" - ] = on_demand_feature_view.feature_transformation.udf_string - registry_dict["onDemandFeatureViews"].append(odfv_dict) + if on_demand_feature_view.feature_transformation: + if isinstance( + on_demand_feature_view.feature_transformation, PandasTransformation + ): + if "userDefinedFunction" not in odfv_dict["spec"]: + odfv_dict["spec"]["userDefinedFunction"] = {} + odfv_dict["spec"]["userDefinedFunction"][ + "body" + ] = on_demand_feature_view.feature_transformation.udf_string + odfv_dict["spec"]["featureTransformation"]["userDefinedFunction"][ + "body" + ] = on_demand_feature_view.feature_transformation.udf_string + elif isinstance( + on_demand_feature_view.feature_transformation, + SubstraitTransformation, + ): + odfv_dict["spec"]["featureTransformation"]["substraitPlan"][ + "body" + ] = on_demand_feature_view.feature_transformation.substrait_plan + else: + odfv_dict["spec"]["featureTransformation"]["userDefinedFunction"][ + "body" + ] = None + odfv_dict["spec"]["featureTransformation"]["substraitPlan"][ + "body" + ] = None + registry_dict["onDemandFeatureViews"].append(odfv_dict) for request_feature_view in sorted( self.list_request_feature_views(project=project), key=lambda request_feature_view: request_feature_view.name, diff --git a/sdk/python/feast/on_demand_feature_view.py b/sdk/python/feast/on_demand_feature_view.py index 61e55bb0c0..ce416fff2a 100644 --- a/sdk/python/feast/on_demand_feature_view.py +++ b/sdk/python/feast/on_demand_feature_view.py @@ -17,8 +17,6 @@ from feast.feature_view import FeatureView from feast.feature_view_projection import FeatureViewProjection from feast.field import Field, from_value_type -from feast.on_demand_pandas_transformation import OnDemandPandasTransformation -from feast.on_demand_substrait_transformation import OnDemandSubstraitTransformation from feast.protos.feast.core.OnDemandFeatureView_pb2 import ( OnDemandFeatureView as OnDemandFeatureViewProto, ) @@ -33,6 +31,8 @@ from feast.protos.feast.core.Transformation_pb2 import ( UserDefinedFunctionV2 as UserDefinedFunctionProto, ) +from feast.transformation.pandas_transformation import PandasTransformation +from feast.transformation.substrait_transformation import SubstraitTransformation from feast.type_map import ( feast_value_type_to_pandas_type, python_type_to_feast_value_type, @@ -57,7 +57,7 @@ class OnDemandFeatureView(BaseFeatureView): sources with type FeatureViewProjection. source_request_sources: A map from input source names to the actual input sources with type RequestSource. - transformation: The user defined transformation. + feature_transformation: The user defined transformation. description: A human-readable description. tags: A dictionary of key-value pairs to store arbitrary metadata. owner: The owner of the on demand feature view, typically the email of the primary @@ -68,8 +68,7 @@ class OnDemandFeatureView(BaseFeatureView): features: List[Field] source_feature_view_projections: Dict[str, FeatureViewProjection] source_request_sources: Dict[str, RequestSource] - transformation: Union[OnDemandPandasTransformation] - feature_transformation: Union[OnDemandPandasTransformation] + feature_transformation: Union[PandasTransformation, SubstraitTransformation] description: str tags: Dict[str, str] owner: str @@ -89,8 +88,9 @@ def __init__( # noqa: C901 ], udf: Optional[FunctionType] = None, udf_string: str = "", - transformation: Optional[Union[OnDemandPandasTransformation]] = None, - feature_transformation: Optional[Union[OnDemandPandasTransformation]] = None, + feature_transformation: Optional[ + Union[PandasTransformation, SubstraitTransformation] + ] = None, description: str = "", tags: Optional[Dict[str, str]] = None, owner: str = "", @@ -108,7 +108,6 @@ def __init__( # noqa: C901 udf (deprecated): The user defined transformation function, which must take pandas dataframes as inputs. udf_string (deprecated): The source code version of the udf (for diffing and displaying in Web UI) - transformation: The user defined transformation. feature_transformation: The user defined transformation. description (optional): A human-readable description. tags (optional): A dictionary of key-value pairs to store arbitrary metadata. @@ -123,13 +122,13 @@ def __init__( # noqa: C901 owner=owner, ) - if not transformation: + if not feature_transformation: if udf: warnings.warn( "udf and udf_string parameters are deprecated. Please use transformation=OnDemandPandasTransformation(udf, udf_string) instead.", DeprecationWarning, ) - transformation = OnDemandPandasTransformation(udf, udf_string) + feature_transformation = PandasTransformation(udf, udf_string) else: raise Exception( "OnDemandFeatureView needs to be initialized with either transformation or udf arguments" @@ -147,8 +146,7 @@ def __init__( # noqa: C901 odfv_source.name ] = odfv_source.projection - self.transformation = transformation - self.feature_transformation = self.transformation + self.feature_transformation = feature_transformation @property def proto_class(self) -> Type[OnDemandFeatureViewProto]: @@ -160,8 +158,7 @@ def __copy__(self): schema=self.features, sources=list(self.source_feature_view_projections.values()) + list(self.source_request_sources.values()), - transformation=self.transformation, - feature_transformation=self.transformation, + feature_transformation=self.feature_transformation, description=self.description, tags=self.tags, owner=self.owner, @@ -182,7 +179,6 @@ def __eq__(self, other): self.source_feature_view_projections != other.source_feature_view_projections or self.source_request_sources != other.source_request_sources - or self.transformation != other.transformation or self.feature_transformation != other.feature_transformation ): return False @@ -218,12 +214,12 @@ def to_proto(self) -> OnDemandFeatureViewProto: ) feature_transformation = FeatureTransformationProto( - user_defined_function=self.transformation.to_proto() - if type(self.transformation) == OnDemandPandasTransformation + user_defined_function=self.feature_transformation.to_proto() + if isinstance(self.feature_transformation, PandasTransformation) + else None, + substrait_transformation=self.feature_transformation.to_proto() + if isinstance(self.feature_transformation, SubstraitTransformation) else None, - on_demand_substrait_transformation=self.transformation.to_proto() - if type(self.transformation) == OnDemandSubstraitTransformation - else None, # type: ignore ) spec = OnDemandFeatureViewSpec( name=self.name, @@ -276,17 +272,17 @@ def from_proto(cls, on_demand_feature_view_proto: OnDemandFeatureViewProto): and on_demand_feature_view_proto.spec.feature_transformation.user_defined_function.body_text != "" ): - transformation = OnDemandPandasTransformation.from_proto( + transformation = PandasTransformation.from_proto( on_demand_feature_view_proto.spec.feature_transformation.user_defined_function ) elif ( on_demand_feature_view_proto.spec.feature_transformation.WhichOneof( "transformation" ) - == "on_demand_substrait_transformation" + == "substrait_transformation" ): - transformation = OnDemandSubstraitTransformation.from_proto( - on_demand_feature_view_proto.spec.feature_transformation.on_demand_substrait_transformation + transformation = SubstraitTransformation.from_proto( + on_demand_feature_view_proto.spec.feature_transformation.substrait_transformation ) elif ( hasattr(on_demand_feature_view_proto.spec, "user_defined_function") @@ -298,7 +294,7 @@ def from_proto(cls, on_demand_feature_view_proto: OnDemandFeatureViewProto): body=on_demand_feature_view_proto.spec.user_defined_function.body, body_text=on_demand_feature_view_proto.spec.user_defined_function.body_text, ) - transformation = OnDemandPandasTransformation.from_proto( + transformation = PandasTransformation.from_proto( user_defined_function_proto=backwards_compatible_udf, ) else: @@ -314,7 +310,7 @@ def from_proto(cls, on_demand_feature_view_proto: OnDemandFeatureViewProto): for feature in on_demand_feature_view_proto.spec.features ], sources=sources, - transformation=transformation, + feature_transformation=transformation, description=on_demand_feature_view_proto.spec.description, tags=dict(on_demand_feature_view_proto.spec.tags), owner=on_demand_feature_view_proto.spec.owner, @@ -374,7 +370,9 @@ def get_transformed_features_df( # Compute transformed values and apply to each result row - df_with_transformed_features = self.transformation.transform(df_with_features) + df_with_transformed_features = self.feature_transformation.transform( + df_with_features + ) # Work out whether the correct columns names are used. rename_columns: Dict[str, str] = {} @@ -424,7 +422,7 @@ def infer_features(self) -> None: dtype = feast_value_type_to_pandas_type(field.dtype.to_value_type()) sample_val = rand_df_value[dtype] if dtype in rand_df_value else None df[f"{field.name}"] = pd.Series(sample_val, dtype=dtype) - output_df: pd.DataFrame = self.transformation.transform(df) + output_df: pd.DataFrame = self.feature_transformation.transform(df) inferred_features = [] for f, dt in zip(output_df.columns, output_df.dtypes): inferred_features.append( @@ -521,7 +519,7 @@ def decorator(user_function): input_fields: Field = [] for s in sources: - if type(s) == FeatureView: + if isinstance(s, FeatureView): fields = s.projection.features else: fields = s.features @@ -540,19 +538,19 @@ def decorator(user_function): expr = user_function(ibis.table(input_fields, "t")) - transformation = OnDemandSubstraitTransformation( + transformation = SubstraitTransformation( substrait_plan=compiler.compile(expr).SerializeToString() ) else: udf_string = dill.source.getsource(user_function) mainify(user_function) - transformation = OnDemandPandasTransformation(user_function, udf_string) + transformation = PandasTransformation(user_function, udf_string) on_demand_feature_view_obj = OnDemandFeatureView( name=user_function.__name__, sources=sources, schema=schema, - transformation=transformation, + feature_transformation=transformation, description=description, tags=tags, owner=owner, diff --git a/sdk/python/feast/stream_feature_view.py b/sdk/python/feast/stream_feature_view.py index e8741a75fe..0d1125d2bd 100644 --- a/sdk/python/feast/stream_feature_view.py +++ b/sdk/python/feast/stream_feature_view.py @@ -15,7 +15,6 @@ from feast.entity import Entity from feast.feature_view import FeatureView from feast.field import Field -from feast.on_demand_pandas_transformation import OnDemandPandasTransformation from feast.protos.feast.core.DataSource_pb2 import DataSource as DataSourceProto from feast.protos.feast.core.OnDemandFeatureView_pb2 import ( UserDefinedFunction as UserDefinedFunctionProto, @@ -32,6 +31,7 @@ from feast.protos.feast.core.Transformation_pb2 import ( UserDefinedFunctionV2 as UserDefinedFunctionProtoV2, ) +from feast.transformation.pandas_transformation import PandasTransformation warnings.simplefilter("once", RuntimeWarning) @@ -80,7 +80,7 @@ class StreamFeatureView(FeatureView): materialization_intervals: List[Tuple[datetime, datetime]] udf: Optional[FunctionType] udf_string: Optional[str] - feature_transformation: Optional[OnDemandPandasTransformation] + feature_transformation: Optional[PandasTransformation] def __init__( self, @@ -99,7 +99,7 @@ def __init__( timestamp_field: Optional[str] = "", udf: Optional[FunctionType] = None, udf_string: Optional[str] = "", - feature_transformation: Optional[Union[OnDemandPandasTransformation]] = None, + feature_transformation: Optional[Union[PandasTransformation]] = None, ): if not flags_helper.is_test(): warnings.warn( @@ -371,9 +371,7 @@ def decorator(user_function): schema=schema, udf=user_function, udf_string=udf_string, - feature_transformation=OnDemandPandasTransformation( - user_function, udf_string - ), + feature_transformation=PandasTransformation(user_function, udf_string), description=description, tags=tags, online=online, diff --git a/sdk/python/feast/transformation/__init__.py b/sdk/python/feast/transformation/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/on_demand_pandas_transformation.py b/sdk/python/feast/transformation/pandas_transformation.py similarity index 85% rename from sdk/python/feast/on_demand_pandas_transformation.py rename to sdk/python/feast/transformation/pandas_transformation.py index 48f5263051..76f17e2106 100644 --- a/sdk/python/feast/on_demand_pandas_transformation.py +++ b/sdk/python/feast/transformation/pandas_transformation.py @@ -8,7 +8,7 @@ ) -class OnDemandPandasTransformation: +class PandasTransformation: def __init__(self, udf: FunctionType, udf_string: str = ""): """ Creates an OnDemandPandasTransformation object. @@ -25,9 +25,9 @@ def transform(self, df: pd.DataFrame) -> pd.DataFrame: return self.udf.__call__(df) def __eq__(self, other): - if not isinstance(other, OnDemandPandasTransformation): + if not isinstance(other, PandasTransformation): raise TypeError( - "Comparisons should only involve OnDemandPandasTransformation class objects." + "Comparisons should only involve PandasTransformation class objects." ) if ( @@ -47,7 +47,7 @@ def to_proto(self) -> UserDefinedFunctionProto: @classmethod def from_proto(cls, user_defined_function_proto: UserDefinedFunctionProto): - return OnDemandPandasTransformation( + return PandasTransformation( udf=dill.loads(user_defined_function_proto.body), udf_string=user_defined_function_proto.body_text, ) diff --git a/sdk/python/feast/on_demand_substrait_transformation.py b/sdk/python/feast/transformation/substrait_transformation.py similarity index 57% rename from sdk/python/feast/on_demand_substrait_transformation.py rename to sdk/python/feast/transformation/substrait_transformation.py index 0666739125..b3dbe7a4b4 100644 --- a/sdk/python/feast/on_demand_substrait_transformation.py +++ b/sdk/python/feast/transformation/substrait_transformation.py @@ -3,14 +3,14 @@ import pyarrow.substrait as substrait # type: ignore # noqa from feast.protos.feast.core.Transformation_pb2 import ( - OnDemandSubstraitTransformationV2 as OnDemandSubstraitTransformationProto, + SubstraitTransformationV2 as SubstraitTransformationProto, ) -class OnDemandSubstraitTransformation: +class SubstraitTransformation: def __init__(self, substrait_plan: bytes): """ - Creates an OnDemandSubstraitTransformation object. + Creates an SubstraitTransformation object. Args: substrait_plan: The user-provided substrait plan. @@ -27,9 +27,9 @@ def table_provider(names, schema: pyarrow.Schema): return table.to_pandas() def __eq__(self, other): - if not isinstance(other, OnDemandSubstraitTransformation): + if not isinstance(other, SubstraitTransformation): raise TypeError( - "Comparisons should only involve OnDemandSubstraitTransformation class objects." + "Comparisons should only involve SubstraitTransformation class objects." ) if not super().__eq__(other): @@ -37,14 +37,14 @@ def __eq__(self, other): return self.substrait_plan == other.substrait_plan - def to_proto(self) -> OnDemandSubstraitTransformationProto: - return OnDemandSubstraitTransformationProto(substrait_plan=self.substrait_plan) + def to_proto(self) -> SubstraitTransformationProto: + return SubstraitTransformationProto(substrait_plan=self.substrait_plan) @classmethod def from_proto( cls, - on_demand_substrait_transformation_proto: OnDemandSubstraitTransformationProto, + substrait_transformation_proto: SubstraitTransformationProto, ): - return OnDemandSubstraitTransformation( - substrait_plan=on_demand_substrait_transformation_proto.substrait_plan + return SubstraitTransformation( + substrait_plan=substrait_transformation_proto.substrait_plan ) diff --git a/sdk/python/tests/integration/feature_repos/universal/feature_views.py b/sdk/python/tests/integration/feature_repos/universal/feature_views.py index 421ef41601..55d2ed8425 100644 --- a/sdk/python/tests/integration/feature_repos/universal/feature_views.py +++ b/sdk/python/tests/integration/feature_repos/universal/feature_views.py @@ -15,7 +15,7 @@ ) from feast.data_source import DataSource, RequestSource from feast.feature_view_projection import FeatureViewProjection -from feast.on_demand_feature_view import OnDemandPandasTransformation +from feast.on_demand_feature_view import PandasTransformation from feast.types import Array, FeastType, Float32, Float64, Int32, Int64 from tests.integration.feature_repos.universal.entities import ( customer, @@ -71,7 +71,7 @@ def conv_rate_plus_100_feature_view( name=conv_rate_plus_100.__name__, schema=[] if infer_features else _features, sources=sources, - transformation=OnDemandPandasTransformation( + feature_transformation=PandasTransformation( udf=conv_rate_plus_100, udf_string="raw udf source" ), ) @@ -110,7 +110,7 @@ def similarity_feature_view( name=similarity.__name__, sources=sources, schema=[] if infer_features else _fields, - transformation=OnDemandPandasTransformation( + feature_transformation=PandasTransformation( udf=similarity, udf_string="similarity raw udf" ), ) diff --git a/sdk/python/tests/unit/test_on_demand_feature_view.py b/sdk/python/tests/unit/test_on_demand_feature_view.py index b83449519f..d561bd8e84 100644 --- a/sdk/python/tests/unit/test_on_demand_feature_view.py +++ b/sdk/python/tests/unit/test_on_demand_feature_view.py @@ -18,10 +18,7 @@ from feast.feature_view import FeatureView from feast.field import Field from feast.infra.offline_stores.file_source import FileSource -from feast.on_demand_feature_view import ( - OnDemandFeatureView, - OnDemandPandasTransformation, -) +from feast.on_demand_feature_view import OnDemandFeatureView, PandasTransformation from feast.types import Float32 @@ -59,7 +56,7 @@ def test_hash(): Field(name="output1", dtype=Float32), Field(name="output2", dtype=Float32), ], - transformation=OnDemandPandasTransformation( + feature_transformation=PandasTransformation( udf=udf1, udf_string="udf1 source code" ), ) @@ -70,7 +67,7 @@ def test_hash(): Field(name="output1", dtype=Float32), Field(name="output2", dtype=Float32), ], - transformation=OnDemandPandasTransformation( + feature_transformation=PandasTransformation( udf=udf1, udf_string="udf1 source code" ), ) @@ -81,7 +78,7 @@ def test_hash(): Field(name="output1", dtype=Float32), Field(name="output2", dtype=Float32), ], - transformation=OnDemandPandasTransformation( + feature_transformation=PandasTransformation( udf=udf2, udf_string="udf2 source code" ), ) @@ -92,7 +89,7 @@ def test_hash(): Field(name="output1", dtype=Float32), Field(name="output2", dtype=Float32), ], - transformation=OnDemandPandasTransformation( + feature_transformation=PandasTransformation( udf=udf2, udf_string="udf2 source code" ), description="test", @@ -126,17 +123,13 @@ def test_hash(): } assert len(s4) == 3 - assert on_demand_feature_view_5.transformation == OnDemandPandasTransformation( + assert on_demand_feature_view_5.feature_transformation == PandasTransformation( udf2, "udf2 source code" ) - assert ( - on_demand_feature_view_5.feature_transformation - == on_demand_feature_view_5.transformation - ) @pytest.mark.filterwarnings("ignore:udf and udf_string parameters are deprecated") -def test_from_proto_backwards_compatable_udf(): +def test_from_proto_backwards_compatible_udf(): file_source = FileSource(name="my-file-source", path="test.parquet") feature_view = FeatureView( name="my-feature-view", @@ -155,7 +148,7 @@ def test_from_proto_backwards_compatable_udf(): Field(name="output1", dtype=Float32), Field(name="output2", dtype=Float32), ], - transformation=OnDemandPandasTransformation( + feature_transformation=PandasTransformation( udf=udf1, udf_string="udf1 source code" ), ) @@ -164,7 +157,7 @@ def test_from_proto_backwards_compatable_udf(): # and to populate it in feature_transformation proto = on_demand_feature_view.to_proto() assert ( - on_demand_feature_view.transformation.udf_string + on_demand_feature_view.feature_transformation.udf_string == proto.spec.feature_transformation.user_defined_function.body_text ) # Because of the current set of code this is just confirming it is empty From e703b40582e676d4ec92551e79a444a9c0949f66 Mon Sep 17 00:00:00 2001 From: locnt241 <73770977+ElliotNguyen68@users.noreply.github.com> Date: Tue, 26 Mar 2024 09:33:58 +0700 Subject: [PATCH 086/122] fix: Add __eq__, __hash__ to SparkSource for correct comparison (#4028) * feat: Enable Arrow-based columnar data transfers Signed-off-by: tanlocnguyen * fix: Add __eq__, __hash__ to SparkSource for comparision Signed-off-by: tanlocnguyen * chore: simplify the logic Signed-off-by: tanlocnguyen --------- Signed-off-by: tanlocnguyen Co-authored-by: tanlocnguyen --- .../contrib/spark_offline_store/spark_source.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark_source.py b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark_source.py index 8cd392ce5d..0809043a01 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark_source.py @@ -185,6 +185,19 @@ def get_table_query_string(self) -> str: return f"`{tmp_table_name}`" + def __eq__(self, other): + base_eq = super().__eq__(other) + if not base_eq: + return False + return ( + self.table == other.table + and self.query == other.query + and self.path == other.path + ) + + def __hash__(self): + return super().__hash__() + class SparkOptions: allowed_formats = [format.value for format in SparkSourceFormat] From cf58ebed02eaa8f8da234f9446277bbbb5195681 Mon Sep 17 00:00:00 2001 From: Tornike Gurgenidze Date: Tue, 26 Mar 2024 06:34:28 +0400 Subject: [PATCH 087/122] chore: Remove all usage of `RequestFeatureView` (#4039) remove all usage of RequestFeatureViews Signed-off-by: tokoko --- protos/feast/core/Registry.proto | 2 - protos/feast/core/RequestFeatureView.proto | 51 ------ protos/feast/registry/RegistryServer.proto | 22 --- .../docs/source/feast.protos.feast.core.rst | 16 -- sdk/python/docs/source/feast.rst | 8 - sdk/python/feast/__init__.py | 2 - sdk/python/feast/cli.py | 1 - sdk/python/feast/diff/registry_diff.py | 6 - sdk/python/feast/feast_object.py | 4 - sdk/python/feast/feature_store.py | 165 ++---------------- .../feast/infra/registry/base_registry.py | 43 ----- .../feast/infra/registry/caching_registry.py | 29 --- .../infra/registry/proto_registry_utils.py | 24 --- sdk/python/feast/infra/registry/registry.py | 45 +---- sdk/python/feast/infra/registry/remote.py | 26 --- sdk/python/feast/infra/registry/snowflake.py | 48 +---- sdk/python/feast/infra/registry/sql.py | 45 +---- .../registry/snowflake_table_creation.sql | 9 - .../registry/snowflake_table_deletion.sql | 2 - sdk/python/feast/registry_server.py | 17 -- sdk/python/feast/repo_contents.py | 5 - sdk/python/feast/repo_operations.py | 8 - sdk/python/feast/request_feature_view.py | 137 --------------- 23 files changed, 19 insertions(+), 696 deletions(-) delete mode 100644 protos/feast/core/RequestFeatureView.proto delete mode 100644 sdk/python/feast/request_feature_view.py diff --git a/protos/feast/core/Registry.proto b/protos/feast/core/Registry.proto index 7d80d8c837..0c3f8a53f9 100644 --- a/protos/feast/core/Registry.proto +++ b/protos/feast/core/Registry.proto @@ -27,7 +27,6 @@ import "feast/core/FeatureTable.proto"; import "feast/core/FeatureView.proto"; import "feast/core/InfraObject.proto"; import "feast/core/OnDemandFeatureView.proto"; -import "feast/core/RequestFeatureView.proto"; import "feast/core/StreamFeatureView.proto"; import "feast/core/DataSource.proto"; import "feast/core/SavedDataset.proto"; @@ -41,7 +40,6 @@ message Registry { repeated FeatureView feature_views = 6; repeated DataSource data_sources = 12; repeated OnDemandFeatureView on_demand_feature_views = 8; - repeated RequestFeatureView request_feature_views = 9; repeated StreamFeatureView stream_feature_views = 14; repeated FeatureService feature_services = 7; repeated SavedDataset saved_datasets = 11; diff --git a/protos/feast/core/RequestFeatureView.proto b/protos/feast/core/RequestFeatureView.proto deleted file mode 100644 index 4049053c2b..0000000000 --- a/protos/feast/core/RequestFeatureView.proto +++ /dev/null @@ -1,51 +0,0 @@ -// -// Copyright 2021 The Feast Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - - -syntax = "proto3"; -package feast.core; - -option go_package = "github.com/feast-dev/feast/go/protos/feast/core"; -option java_outer_classname = "RequestFeatureViewProto"; -option java_package = "feast.proto.core"; - -import "feast/core/DataSource.proto"; - -message RequestFeatureView { - // User-specified specifications of this feature view. - RequestFeatureViewSpec spec = 1; -} - -// Next available id: 7 -message RequestFeatureViewSpec { - // Name of the feature view. Must be unique. Not updated. - string name = 1; - - // Name of Feast project that this feature view belongs to. - string project = 2; - - // Request data which contains the underlying data schema and list of associated features - DataSource request_data_source = 3; - - // Description of the request feature view. - string description = 4; - - // User defined metadata. - map tags = 5; - - // Owner of the request feature view. - string owner = 6; -} diff --git a/protos/feast/registry/RegistryServer.proto b/protos/feast/registry/RegistryServer.proto index ab324f9bd1..e99987eb2d 100644 --- a/protos/feast/registry/RegistryServer.proto +++ b/protos/feast/registry/RegistryServer.proto @@ -7,7 +7,6 @@ import "feast/core/Registry.proto"; import "feast/core/Entity.proto"; import "feast/core/DataSource.proto"; import "feast/core/FeatureView.proto"; -import "feast/core/RequestFeatureView.proto"; import "feast/core/StreamFeatureView.proto"; import "feast/core/OnDemandFeatureView.proto"; import "feast/core/FeatureService.proto"; @@ -28,10 +27,6 @@ service RegistryServer{ rpc GetFeatureView (GetFeatureViewRequest) returns (feast.core.FeatureView) {} rpc ListFeatureViews (ListFeatureViewsRequest) returns (ListFeatureViewsResponse) {} - // RequestFeatureView RPCs - rpc GetRequestFeatureView (GetRequestFeatureViewRequest) returns (feast.core.RequestFeatureView) {} - rpc ListRequestFeatureViews (ListRequestFeatureViewsRequest) returns (ListRequestFeatureViewsResponse) {} - // StreamFeatureView RPCs rpc GetStreamFeatureView (GetStreamFeatureViewRequest) returns (feast.core.StreamFeatureView) {} rpc ListStreamFeatureViews (ListStreamFeatureViewsRequest) returns (ListStreamFeatureViewsResponse) {} @@ -126,23 +121,6 @@ message ListFeatureViewsResponse { repeated feast.core.FeatureView feature_views = 1; } -// RequestFeatureView - -message GetRequestFeatureViewRequest { - string name = 1; - string project = 2; - bool allow_cache = 3; -} - -message ListRequestFeatureViewsRequest { - string project = 1; - bool allow_cache = 2; -} - -message ListRequestFeatureViewsResponse { - repeated feast.core.RequestFeatureView request_feature_views = 1; -} - // StreamFeatureView message GetStreamFeatureViewRequest { diff --git a/sdk/python/docs/source/feast.protos.feast.core.rst b/sdk/python/docs/source/feast.protos.feast.core.rst index aaed49cd73..5da16d2a26 100644 --- a/sdk/python/docs/source/feast.protos.feast.core.rst +++ b/sdk/python/docs/source/feast.protos.feast.core.rst @@ -228,22 +228,6 @@ feast.protos.feast.core.Registry\_pb2\_grpc module :undoc-members: :show-inheritance: -feast.protos.feast.core.RequestFeatureView\_pb2 module ------------------------------------------------------- - -.. automodule:: feast.protos.feast.core.RequestFeatureView_pb2 - :members: - :undoc-members: - :show-inheritance: - -feast.protos.feast.core.RequestFeatureView\_pb2\_grpc module ------------------------------------------------------------- - -.. automodule:: feast.protos.feast.core.RequestFeatureView_pb2_grpc - :members: - :undoc-members: - :show-inheritance: - feast.protos.feast.core.SavedDataset\_pb2 module ------------------------------------------------ diff --git a/sdk/python/docs/source/feast.rst b/sdk/python/docs/source/feast.rst index b0ed92c4cc..abb8783bf0 100644 --- a/sdk/python/docs/source/feast.rst +++ b/sdk/python/docs/source/feast.rst @@ -273,14 +273,6 @@ feast.repo\_upgrade module :undoc-members: :show-inheritance: -feast.request\_feature\_view module ------------------------------------ - -.. automodule:: feast.request_feature_view - :members: - :undoc-members: - :show-inheritance: - feast.saved\_dataset module --------------------------- diff --git a/sdk/python/feast/__init__.py b/sdk/python/feast/__init__.py index d043f1a973..3eff91d65f 100644 --- a/sdk/python/feast/__init__.py +++ b/sdk/python/feast/__init__.py @@ -22,7 +22,6 @@ from .field import Field from .on_demand_feature_view import OnDemandFeatureView from .repo_config import RepoConfig -from .request_feature_view import RequestFeatureView from .stream_feature_view import StreamFeatureView from .value_type import ValueType @@ -49,7 +48,6 @@ "BigQuerySource", "FileSource", "RedshiftSource", - "RequestFeatureView", "SnowflakeSource", "PushSource", "RequestSource", diff --git a/sdk/python/feast/cli.py b/sdk/python/feast/cli.py index 7ce8aaef2b..7673eee20d 100644 --- a/sdk/python/feast/cli.py +++ b/sdk/python/feast/cli.py @@ -381,7 +381,6 @@ def feature_view_list(ctx: click.Context): table = [] for feature_view in [ *store.list_feature_views(), - *store.list_request_feature_views(), *store.list_on_demand_feature_views(), ]: entities = set() diff --git a/sdk/python/feast/diff/registry_diff.py b/sdk/python/feast/diff/registry_diff.py index 41b2142226..106d34bf48 100644 --- a/sdk/python/feast/diff/registry_diff.py +++ b/sdk/python/feast/diff/registry_diff.py @@ -20,9 +20,6 @@ OnDemandFeatureView as OnDemandFeatureViewProto, ) from feast.protos.feast.core.OnDemandFeatureView_pb2 import OnDemandFeatureViewSpec -from feast.protos.feast.core.RequestFeatureView_pb2 import ( - RequestFeatureView as RequestFeatureViewProto, -) from feast.protos.feast.core.StreamFeatureView_pb2 import ( StreamFeatureView as StreamFeatureViewProto, ) @@ -110,7 +107,6 @@ def tag_objects_for_keep_delete_update_add( FeatureViewProto, FeatureServiceProto, OnDemandFeatureViewProto, - RequestFeatureViewProto, StreamFeatureViewProto, ValidationReferenceProto, ) @@ -339,7 +335,6 @@ def apply_diff_to_registry( elif feast_object_diff.feast_object_type in [ FeastObjectType.FEATURE_VIEW, FeastObjectType.ON_DEMAND_FEATURE_VIEW, - FeastObjectType.REQUEST_FEATURE_VIEW, FeastObjectType.STREAM_FEATURE_VIEW, ]: feature_view_obj = cast( @@ -383,7 +378,6 @@ def apply_diff_to_registry( elif feast_object_diff.feast_object_type in [ FeastObjectType.FEATURE_VIEW, FeastObjectType.ON_DEMAND_FEATURE_VIEW, - FeastObjectType.REQUEST_FEATURE_VIEW, FeastObjectType.STREAM_FEATURE_VIEW, ]: registry.apply_feature_view( diff --git a/sdk/python/feast/feast_object.py b/sdk/python/feast/feast_object.py index 7cccf26455..2d06d8d669 100644 --- a/sdk/python/feast/feast_object.py +++ b/sdk/python/feast/feast_object.py @@ -11,12 +11,10 @@ from .protos.feast.core.FeatureService_pb2 import FeatureServiceSpec from .protos.feast.core.FeatureView_pb2 import FeatureViewSpec from .protos.feast.core.OnDemandFeatureView_pb2 import OnDemandFeatureViewSpec -from .protos.feast.core.RequestFeatureView_pb2 import RequestFeatureViewSpec from .protos.feast.core.StreamFeatureView_pb2 import StreamFeatureViewSpec from .protos.feast.core.ValidationProfile_pb2 import ( ValidationReference as ValidationReferenceProto, ) -from .request_feature_view import RequestFeatureView from .saved_dataset import ValidationReference from .stream_feature_view import StreamFeatureView @@ -24,7 +22,6 @@ FeastObject = Union[ FeatureView, OnDemandFeatureView, - RequestFeatureView, BatchFeatureView, StreamFeatureView, Entity, @@ -36,7 +33,6 @@ FeastObjectSpecProto = Union[ FeatureViewSpec, OnDemandFeatureViewSpec, - RequestFeatureViewSpec, StreamFeatureViewSpec, EntitySpecV2, FeatureServiceSpec, diff --git a/sdk/python/feast/feature_store.py b/sdk/python/feast/feature_store.py index 44236248fe..9ac2c14527 100644 --- a/sdk/python/feast/feature_store.py +++ b/sdk/python/feast/feature_store.py @@ -91,7 +91,6 @@ from feast.protos.feast.types.Value_pb2 import RepeatedValue, Value from feast.repo_config import RepoConfig, load_repo_config from feast.repo_contents import RepoContents -from feast.request_feature_view import RequestFeatureView from feast.saved_dataset import SavedDataset, SavedDatasetStorage, ValidationReference from feast.stream_feature_view import StreamFeatureView from feast.type_map import python_values_to_proto_values @@ -266,23 +265,6 @@ def list_feature_views(self, allow_cache: bool = False) -> List[FeatureView]: """ return self._list_feature_views(allow_cache) - @log_exceptions_and_usage - def list_request_feature_views( - self, allow_cache: bool = False - ) -> List[RequestFeatureView]: - """ - Retrieves the list of feature views from the registry. - - Args: - allow_cache: Whether to allow returning entities from a cached registry. - - Returns: - A list of feature views. - """ - return self._registry.list_request_feature_views( - self.project, allow_cache=allow_cache - ) - def _list_feature_views( self, allow_cache: bool = False, @@ -562,7 +544,6 @@ def _validate_all_feature_views( self, views_to_update: List[FeatureView], odfvs_to_update: List[OnDemandFeatureView], - request_views_to_update: List[RequestFeatureView], sfvs_to_update: List[StreamFeatureView], ): """Validates all feature views.""" @@ -577,7 +558,6 @@ def _validate_all_feature_views( [ *views_to_update, *odfvs_to_update, - *request_views_to_update, *sfvs_to_update, ] ) @@ -716,7 +696,6 @@ def plan( ... feature_views=[driver_hourly_stats_view], ... on_demand_feature_views=list(), ... stream_feature_views=list(), - ... request_feature_views=list(), ... entities=[driver], ... feature_services=list())) # register entity and feature view """ @@ -724,7 +703,6 @@ def plan( self._validate_all_feature_views( desired_repo_contents.feature_views, desired_repo_contents.on_demand_feature_views, - desired_repo_contents.request_feature_views, desired_repo_contents.stream_feature_views, ) _validate_data_sources(desired_repo_contents.data_sources) @@ -781,7 +759,6 @@ def apply( Entity, FeatureView, OnDemandFeatureView, - RequestFeatureView, BatchFeatureView, StreamFeatureView, FeatureService, @@ -848,9 +825,6 @@ def apply( ) ] sfvs_to_update = [ob for ob in objects if isinstance(ob, StreamFeatureView)] - request_views_to_update = [ - ob for ob in objects if isinstance(ob, RequestFeatureView) - ] odfvs_to_update = [ob for ob in objects if isinstance(ob, OnDemandFeatureView)] services_to_update = [ob for ob in objects if isinstance(ob, FeatureService)] data_sources_set_to_update = { @@ -877,16 +851,6 @@ def apply( if fv.stream_source: data_sources_set_to_update.add(fv.stream_source) - if request_views_to_update: - warnings.warn( - "Request feature view is deprecated. " - "Please use request data source instead", - DeprecationWarning, - ) - - for rfv in request_views_to_update: - data_sources_set_to_update.add(rfv.request_source) - for odfv in odfvs_to_update: for v in odfv.source_request_sources.values(): data_sources_set_to_update.add(v) @@ -898,7 +862,7 @@ def apply( # Validate all feature views and make inferences. self._validate_all_feature_views( - views_to_update, odfvs_to_update, request_views_to_update, sfvs_to_update + views_to_update, odfvs_to_update, sfvs_to_update ) self._make_inferences( data_sources_to_update, @@ -912,9 +876,7 @@ def apply( # Add all objects to the registry and update the provider's infrastructure. for ds in data_sources_to_update: self._registry.apply_data_source(ds, project=self.project, commit=False) - for view in itertools.chain( - views_to_update, odfvs_to_update, request_views_to_update, sfvs_to_update - ): + for view in itertools.chain(views_to_update, odfvs_to_update, sfvs_to_update): self._registry.apply_feature_view(view, project=self.project, commit=False) for ent in entities_to_update: self._registry.apply_entity(ent, project=self.project, commit=False) @@ -943,9 +905,6 @@ def apply( and not isinstance(ob, StreamFeatureView) ) ] - request_views_to_delete = [ - ob for ob in objects_to_delete if isinstance(ob, RequestFeatureView) - ] odfvs_to_delete = [ ob for ob in objects_to_delete if isinstance(ob, OnDemandFeatureView) ] @@ -974,10 +933,6 @@ def apply( self._registry.delete_feature_view( view.name, project=self.project, commit=False ) - for request_view in request_views_to_delete: - self._registry.delete_feature_view( - request_view.name, project=self.project, commit=False - ) for odfv in odfvs_to_delete: self._registry.delete_feature_view( odfv.name, project=self.project, commit=False @@ -1088,43 +1043,26 @@ def get_historical_features( _feature_refs = self._get_features(features) ( all_feature_views, - all_request_feature_views, all_on_demand_feature_views, ) = self._get_feature_views_to_use(features) - if all_request_feature_views: - warnings.warn( - "Request feature view is deprecated. " - "Please use request data source instead", - DeprecationWarning, - ) - # TODO(achal): _group_feature_refs returns the on demand feature views, but it's not passed into the provider. # This is a weird interface quirk - we should revisit the `get_historical_features` to # pass in the on demand feature views as well. - fvs, odfvs, request_fvs, request_fv_refs = _group_feature_refs( + fvs, odfvs = _group_feature_refs( _feature_refs, all_feature_views, - all_request_feature_views, all_on_demand_feature_views, ) feature_views = list(view for view, _ in fvs) on_demand_feature_views = list(view for view, _ in odfvs) - request_feature_views = list(view for view, _ in request_fvs) set_usage_attribute("odfv", bool(on_demand_feature_views)) - set_usage_attribute("request_fv", bool(request_feature_views)) # Check that the right request data is present in the entity_df if type(entity_df) == pd.DataFrame: if self.config.coerce_tz_aware: entity_df = utils.make_df_tzaware(cast(pd.DataFrame, entity_df)) - for fv in request_feature_views: - for feature in fv.features: - if feature.name not in entity_df.columns: - raise RequestDataNotFoundInEntityDfException( - feature_name=feature.name, feature_view_name=fv.name - ) for odfv in on_demand_feature_views: odfv_request_data_schema = odfv.get_request_data_schema() for feature_name in odfv_request_data_schema.keys(): @@ -1135,9 +1073,6 @@ def get_historical_features( ) _validate_feature_refs(_feature_refs, full_feature_names) - # Drop refs that refer to RequestFeatureViews since they don't need to be fetched and - # already exist in the entity_df - _feature_refs = [ref for ref in _feature_refs if ref not in request_fv_refs] provider = self._get_provider() job = provider.get_historical_features( @@ -1615,19 +1550,11 @@ def _get_online_features( _feature_refs = self._get_features(features, allow_cache=True) ( requested_feature_views, - requested_request_feature_views, requested_on_demand_feature_views, ) = self._get_feature_views_to_use( features=features, allow_cache=True, hide_dummy_entity=False ) - if requested_request_feature_views: - warnings.warn( - "Request feature view is deprecated. " - "Please use request data source instead", - DeprecationWarning, - ) - ( entity_name_to_join_key_map, entity_type_map, @@ -1648,19 +1575,12 @@ def _get_online_features( num_rows = _validate_entity_values(entity_proto_values) _validate_feature_refs(_feature_refs, full_feature_names) - ( - grouped_refs, - grouped_odfv_refs, - grouped_request_fv_refs, - _, - ) = _group_feature_refs( + (grouped_refs, grouped_odfv_refs,) = _group_feature_refs( _feature_refs, requested_feature_views, - requested_request_feature_views, requested_on_demand_feature_views, ) set_usage_attribute("odfv", bool(grouped_odfv_refs)) - set_usage_attribute("request_fv", bool(grouped_request_fv_refs)) # All requested features should be present in the result. requested_result_row_names = { @@ -1673,23 +1593,14 @@ def _get_online_features( feature_views = list(view for view, _ in grouped_refs) - needed_request_data, needed_request_fv_features = self.get_needed_request_data( - grouped_odfv_refs, grouped_request_fv_refs - ) + needed_request_data = self.get_needed_request_data(grouped_odfv_refs) join_key_values: Dict[str, List[Value]] = {} request_data_features: Dict[str, List[Value]] = {} # Entity rows may be either entities or request data. for join_key_or_entity_name, values in entity_proto_values.items(): # Found request data - if ( - join_key_or_entity_name in needed_request_data - or join_key_or_entity_name in needed_request_fv_features - ): - if join_key_or_entity_name in needed_request_fv_features: - # If the data was requested as a feature then - # make sure it appears in the result. - requested_result_row_names.add(join_key_or_entity_name) + if join_key_or_entity_name in needed_request_data: request_data_features[join_key_or_entity_name] = values else: if join_key_or_entity_name in join_keys_set: @@ -1711,7 +1622,7 @@ def _get_online_features( join_key_values[join_key] = values self.ensure_request_data_values_exist( - needed_request_data, needed_request_fv_features, request_data_features + needed_request_data, request_data_features ) # Populate online features response proto with join keys and request data features @@ -1870,33 +1781,21 @@ def _populate_result_rows_from_columnar( @staticmethod def get_needed_request_data( grouped_odfv_refs: List[Tuple[OnDemandFeatureView, List[str]]], - grouped_request_fv_refs: List[Tuple[RequestFeatureView, List[str]]], - ) -> Tuple[Set[str], Set[str]]: + ) -> Set[str]: needed_request_data: Set[str] = set() - needed_request_fv_features: Set[str] = set() for odfv, _ in grouped_odfv_refs: odfv_request_data_schema = odfv.get_request_data_schema() needed_request_data.update(odfv_request_data_schema.keys()) - for request_fv, _ in grouped_request_fv_refs: - for feature in request_fv.features: - needed_request_fv_features.add(feature.name) - return needed_request_data, needed_request_fv_features + return needed_request_data @staticmethod def ensure_request_data_values_exist( needed_request_data: Set[str], - needed_request_fv_features: Set[str], request_data_features: Dict[str, List[Any]], ): - if len(needed_request_data) + len(needed_request_fv_features) != len( - request_data_features.keys() - ): + if len(needed_request_data) != len(request_data_features.keys()): missing_features = [ - x - for x in itertools.chain( - needed_request_data, needed_request_fv_features - ) - if x not in request_data_features + x for x in needed_request_data if x not in request_data_features ] raise RequestDataNotFoundInEntityRowsException( feature_names=missing_features @@ -2161,7 +2060,7 @@ def _get_feature_views_to_use( features: Optional[Union[List[str], FeatureService]], allow_cache=False, hide_dummy_entity: bool = True, - ) -> Tuple[List[FeatureView], List[RequestFeatureView], List[OnDemandFeatureView]]: + ) -> Tuple[List[FeatureView], List[OnDemandFeatureView]]: fvs = { fv.name: fv for fv in [ @@ -2172,13 +2071,6 @@ def _get_feature_views_to_use( ] } - request_fvs = { - fv.name: fv - for fv in self._registry.list_request_feature_views( - project=self.project, allow_cache=allow_cache - ) - } - od_fvs = { fv.name: fv for fv in self._registry.list_on_demand_feature_views( @@ -2187,7 +2079,7 @@ def _get_feature_views_to_use( } if isinstance(features, FeatureService): - fvs_to_use, request_fvs_to_use, od_fvs_to_use = [], [], [] + fvs_to_use, od_fvs_to_use = [], [] for fv_name, projection in [ (projection.name, projection) for projection in features.feature_view_projections @@ -2196,10 +2088,6 @@ def _get_feature_views_to_use( fvs_to_use.append( fvs[fv_name].with_projection(copy.copy(projection)) ) - elif fv_name in request_fvs: - request_fvs_to_use.append( - request_fvs[fv_name].with_projection(copy.copy(projection)) - ) elif fv_name in od_fvs: odfv = od_fvs[fv_name].with_projection(copy.copy(projection)) od_fvs_to_use.append(odfv) @@ -2214,11 +2102,10 @@ def _get_feature_views_to_use( f"{fv_name} which doesn't exist. Please make sure that you have created the feature view" f'{fv_name} and that you have registered it by running "apply".' ) - views_to_use = (fvs_to_use, request_fvs_to_use, od_fvs_to_use) + views_to_use = (fvs_to_use, od_fvs_to_use) else: views_to_use = ( [*fvs.values()], - [*request_fvs.values()], [*od_fvs.values()], ) @@ -2456,24 +2343,15 @@ def _validate_feature_refs(feature_refs: List[str], full_feature_names: bool = F def _group_feature_refs( features: List[str], all_feature_views: List[FeatureView], - all_request_feature_views: List[RequestFeatureView], all_on_demand_feature_views: List[OnDemandFeatureView], ) -> Tuple[ - List[Tuple[FeatureView, List[str]]], - List[Tuple[OnDemandFeatureView, List[str]]], - List[Tuple[RequestFeatureView, List[str]]], - Set[str], + List[Tuple[FeatureView, List[str]]], List[Tuple[OnDemandFeatureView, List[str]]] ]: """Get list of feature views and corresponding feature names based on feature references""" # view name to view proto view_index = {view.projection.name_to_use(): view for view in all_feature_views} - # request view name to proto - request_view_index = { - view.projection.name_to_use(): view for view in all_request_feature_views - } - # on demand view to on demand view proto on_demand_view_index = { view.projection.name_to_use(): view for view in all_on_demand_feature_views @@ -2481,8 +2359,6 @@ def _group_feature_refs( # view name to feature names views_features = defaultdict(set) - request_views_features = defaultdict(set) - request_view_refs = set() # on demand view name to feature names on_demand_view_features = defaultdict(set) @@ -2503,26 +2379,17 @@ def _group_feature_refs( ].source_feature_view_projections.values(): for input_feat in input_fv_projection.features: views_features[input_fv_projection.name].add(input_feat.name) - elif view_name in request_view_index: - request_view_index[view_name].projection.get_feature( - feat_name - ) # For validation - request_views_features[view_name].add(feat_name) - request_view_refs.add(ref) else: raise FeatureViewNotFoundException(view_name) fvs_result: List[Tuple[FeatureView, List[str]]] = [] odfvs_result: List[Tuple[OnDemandFeatureView, List[str]]] = [] - request_fvs_result: List[Tuple[RequestFeatureView, List[str]]] = [] for view_name, feature_names in views_features.items(): fvs_result.append((view_index[view_name], list(feature_names))) - for view_name, feature_names in request_views_features.items(): - request_fvs_result.append((request_view_index[view_name], list(feature_names))) for view_name, feature_names in on_demand_view_features.items(): odfvs_result.append((on_demand_view_index[view_name], list(feature_names))) - return fvs_result, odfvs_result, request_fvs_result, request_view_refs + return fvs_result, odfvs_result def _print_materialization_log( diff --git a/sdk/python/feast/infra/registry/base_registry.py b/sdk/python/feast/infra/registry/base_registry.py index 583206941e..c874001c89 100644 --- a/sdk/python/feast/infra/registry/base_registry.py +++ b/sdk/python/feast/infra/registry/base_registry.py @@ -30,7 +30,6 @@ from feast.on_demand_feature_view import OnDemandFeatureView from feast.project_metadata import ProjectMetadata from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto -from feast.request_feature_view import RequestFeatureView from feast.saved_dataset import SavedDataset, ValidationReference from feast.stream_feature_view import StreamFeatureView from feast.transformation.pandas_transformation import PandasTransformation @@ -350,41 +349,6 @@ def list_feature_views( """ raise NotImplementedError - # request feature view operations - @abstractmethod - def get_request_feature_view( - self, name: str, project: str, allow_cache: bool = False - ) -> RequestFeatureView: - """ - Retrieves a request feature view. - - Args: - name: Name of request feature view - project: Feast project that this feature view belongs to - allow_cache: Allow returning feature view from the cached registry - - Returns: - Returns either the specified feature view, or raises an exception if - none is found - """ - raise NotImplementedError - - @abstractmethod - def list_request_feature_views( - self, project: str, allow_cache: bool = False - ) -> List[RequestFeatureView]: - """ - Retrieve a list of request feature views from the registry - - Args: - allow_cache: Allow returning feature views from the cached registry - project: Filter feature views based on project name - - Returns: - List of request feature views - """ - raise NotImplementedError - @abstractmethod def apply_materialization( self, @@ -699,13 +663,6 @@ def to_dict(self, project: str) -> Dict[str, List[Any]]: "body" ] = None registry_dict["onDemandFeatureViews"].append(odfv_dict) - for request_feature_view in sorted( - self.list_request_feature_views(project=project), - key=lambda request_feature_view: request_feature_view.name, - ): - registry_dict["requestFeatureViews"].append( - self._message_to_sorted_dict(request_feature_view.to_proto()) - ) for stream_feature_view in sorted( self.list_stream_feature_views(project=project), key=lambda stream_feature_view: stream_feature_view.name, diff --git a/sdk/python/feast/infra/registry/caching_registry.py b/sdk/python/feast/infra/registry/caching_registry.py index 4c408b0a46..3101b073d5 100644 --- a/sdk/python/feast/infra/registry/caching_registry.py +++ b/sdk/python/feast/infra/registry/caching_registry.py @@ -14,7 +14,6 @@ from feast.infra.registry.base_registry import BaseRegistry from feast.on_demand_feature_view import OnDemandFeatureView from feast.project_metadata import ProjectMetadata -from feast.request_feature_view import RequestFeatureView from feast.saved_dataset import SavedDataset, ValidationReference from feast.stream_feature_view import StreamFeatureView @@ -145,34 +144,6 @@ def list_on_demand_feature_views( ) return self._list_on_demand_feature_views(project) - @abstractmethod - def _get_request_feature_view(self, name: str, project: str) -> RequestFeatureView: - pass - - def get_request_feature_view( - self, name: str, project: str, allow_cache: bool = False - ) -> RequestFeatureView: - if allow_cache: - self._refresh_cached_registry_if_necessary() - return proto_registry_utils.get_request_feature_view( - self.cached_registry_proto, name, project - ) - return self._get_request_feature_view(name, project) - - @abstractmethod - def _list_request_feature_views(self, project: str) -> List[RequestFeatureView]: - pass - - def list_request_feature_views( - self, project: str, allow_cache: bool = False - ) -> List[RequestFeatureView]: - if allow_cache: - self._refresh_cached_registry_if_necessary() - return proto_registry_utils.list_request_feature_views( - self.cached_registry_proto, project - ) - return self._list_request_feature_views(project) - @abstractmethod def _get_stream_feature_view(self, name: str, project: str) -> StreamFeatureView: pass diff --git a/sdk/python/feast/infra/registry/proto_registry_utils.py b/sdk/python/feast/infra/registry/proto_registry_utils.py index e93f513b69..4d2e16cb02 100644 --- a/sdk/python/feast/infra/registry/proto_registry_utils.py +++ b/sdk/python/feast/infra/registry/proto_registry_utils.py @@ -19,7 +19,6 @@ from feast.project_metadata import ProjectMetadata from feast.protos.feast.core.Registry_pb2 import ProjectMetadata as ProjectMetadataProto from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto -from feast.request_feature_view import RequestFeatureView from feast.saved_dataset import SavedDataset, ValidationReference from feast.stream_feature_view import StreamFeatureView @@ -99,16 +98,6 @@ def get_stream_feature_view( raise FeatureViewNotFoundException(name, project) -def get_request_feature_view(registry_proto: RegistryProto, name: str, project: str): - for feature_view_proto in registry_proto.feature_views: - if ( - feature_view_proto.spec.name == name - and feature_view_proto.spec.project == project - ): - return RequestFeatureView.from_proto(feature_view_proto) - raise FeatureViewNotFoundException(name, project) - - def get_on_demand_feature_view( registry_proto: RegistryProto, name: str, project: str ) -> OnDemandFeatureView: @@ -180,19 +169,6 @@ def list_feature_views( return feature_views -@registry_proto_cache -def list_request_feature_views( - registry_proto: RegistryProto, project: str -) -> List[RequestFeatureView]: - feature_views: List[RequestFeatureView] = [] - for request_feature_view_proto in registry_proto.request_feature_views: - if request_feature_view_proto.spec.project == project: - feature_views.append( - RequestFeatureView.from_proto(request_feature_view_proto) - ) - return feature_views - - @registry_proto_cache def list_stream_feature_views( registry_proto: RegistryProto, project: str diff --git a/sdk/python/feast/infra/registry/registry.py b/sdk/python/feast/infra/registry/registry.py index a9d6c44f38..3c9843c904 100644 --- a/sdk/python/feast/infra/registry/registry.py +++ b/sdk/python/feast/infra/registry/registry.py @@ -46,7 +46,6 @@ from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto from feast.repo_config import RegistryConfig from feast.repo_contents import RepoContents -from feast.request_feature_view import RequestFeatureView from feast.saved_dataset import SavedDataset, ValidationReference from feast.stream_feature_view import StreamFeatureView @@ -73,7 +72,6 @@ class FeastObjectType(Enum): ENTITY = "entity" FEATURE_VIEW = "feature view" ON_DEMAND_FEATURE_VIEW = "on demand feature view" - REQUEST_FEATURE_VIEW = "request feature view" STREAM_FEATURE_VIEW = "stream feature view" FEATURE_SERVICE = "feature service" @@ -88,9 +86,6 @@ def get_objects_from_registry( FeastObjectType.ON_DEMAND_FEATURE_VIEW: registry.list_on_demand_feature_views( project=project ), - FeastObjectType.REQUEST_FEATURE_VIEW: registry.list_request_feature_views( - project=project - ), FeastObjectType.STREAM_FEATURE_VIEW: registry.list_stream_feature_views( project=project, ), @@ -108,7 +103,6 @@ def get_objects_from_repo_contents( FeastObjectType.ENTITY: repo_contents.entities, FeastObjectType.FEATURE_VIEW: repo_contents.feature_views, FeastObjectType.ON_DEMAND_FEATURE_VIEW: repo_contents.on_demand_feature_views, - FeastObjectType.REQUEST_FEATURE_VIEW: repo_contents.request_feature_views, FeastObjectType.STREAM_FEATURE_VIEW: repo_contents.stream_feature_views, FeastObjectType.FEATURE_SERVICE: repo_contents.feature_services, } @@ -402,10 +396,6 @@ def apply_feature_view( existing_feature_views_of_same_type = ( self.cached_registry_proto.on_demand_feature_views ) - elif isinstance(feature_view, RequestFeatureView): - existing_feature_views_of_same_type = ( - self.cached_registry_proto.request_feature_views - ) else: raise ValueError(f"Unexpected feature view type: {type(feature_view)}") @@ -532,24 +522,6 @@ def list_feature_views( ) return proto_registry_utils.list_feature_views(registry_proto, project) - def get_request_feature_view( - self, name: str, project: str, allow_cache: bool = False - ): - registry_proto = self._get_registry_proto( - project=project, allow_cache=allow_cache - ) - return proto_registry_utils.get_request_feature_view( - registry_proto, name, project - ) - - def list_request_feature_views( - self, project: str, allow_cache: bool = False - ) -> List[RequestFeatureView]: - registry_proto = self._get_registry_proto( - project=project, allow_cache=allow_cache - ) - return proto_registry_utils.list_request_feature_views(registry_proto, project) - def get_feature_view( self, name: str, project: str, allow_cache: bool = False ) -> FeatureView: @@ -601,18 +573,6 @@ def delete_feature_view(self, name: str, project: str, commit: bool = True): self.commit() return - for idx, existing_request_feature_view_proto in enumerate( - self.cached_registry_proto.request_feature_views - ): - if ( - existing_request_feature_view_proto.spec.name == name - and existing_request_feature_view_proto.spec.project == project - ): - del self.cached_registry_proto.request_feature_views[idx] - if commit: - self.commit() - return - for idx, existing_on_demand_feature_view_proto in enumerate( self.cached_registry_proto.on_demand_feature_views ): @@ -890,10 +850,7 @@ def _existing_feature_view_names_to_fvs(self) -> Dict[str, Message]: for fv in self.cached_registry_proto.on_demand_feature_views } fvs = {fv.spec.name: fv for fv in self.cached_registry_proto.feature_views} - request_fvs = { - fv.spec.name: fv for fv in self.cached_registry_proto.request_feature_views - } sfv = { fv.spec.name: fv for fv in self.cached_registry_proto.stream_feature_views } - return {**odfvs, **fvs, **request_fvs, **sfv} + return {**odfvs, **fvs, **sfv} diff --git a/sdk/python/feast/infra/registry/remote.py b/sdk/python/feast/infra/registry/remote.py index 67d61ffec7..f93e1ab1c0 100644 --- a/sdk/python/feast/infra/registry/remote.py +++ b/sdk/python/feast/infra/registry/remote.py @@ -19,7 +19,6 @@ from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto from feast.protos.feast.registry import RegistryServer_pb2, RegistryServer_pb2_grpc from feast.repo_config import RegistryConfig -from feast.request_feature_view import RequestFeatureView from feast.saved_dataset import SavedDataset, ValidationReference from feast.stream_feature_view import StreamFeatureView @@ -215,31 +214,6 @@ def list_feature_views( for feature_view in response.feature_views ] - def get_request_feature_view( - self, name: str, project: str, allow_cache: bool = False - ) -> RequestFeatureView: - request = RegistryServer_pb2.GetRequestFeatureViewRequest( - name=name, project=project, allow_cache=allow_cache - ) - - response = self.stub.GetRequestFeatureView(request) - - return RequestFeatureView.from_proto(response) - - def list_request_feature_views( - self, project: str, allow_cache: bool = False - ) -> List[RequestFeatureView]: - request = RegistryServer_pb2.ListRequestFeatureViewsRequest( - project=project, allow_cache=allow_cache - ) - - response = self.stub.ListRequestFeatureViews(request) - - return [ - RequestFeatureView.from_proto(request_feature_view) - for request_feature_view in response.request_feature_views - ] - def apply_materialization( self, feature_view: FeatureView, diff --git a/sdk/python/feast/infra/registry/snowflake.py b/sdk/python/feast/infra/registry/snowflake.py index cdf79c78b5..326d2e0226 100644 --- a/sdk/python/feast/infra/registry/snowflake.py +++ b/sdk/python/feast/infra/registry/snowflake.py @@ -44,9 +44,6 @@ OnDemandFeatureView as OnDemandFeatureViewProto, ) from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto -from feast.protos.feast.core.RequestFeatureView_pb2 import ( - RequestFeatureView as RequestFeatureViewProto, -) from feast.protos.feast.core.SavedDataset_pb2 import SavedDataset as SavedDatasetProto from feast.protos.feast.core.StreamFeatureView_pb2 import ( StreamFeatureView as StreamFeatureViewProto, @@ -55,7 +52,6 @@ ValidationReference as ValidationReferenceProto, ) from feast.repo_config import RegistryConfig -from feast.request_feature_view import RequestFeatureView from feast.saved_dataset import SavedDataset, ValidationReference from feast.stream_feature_view import StreamFeatureView @@ -370,7 +366,6 @@ def delete_feature_view(self, name: str, project: str, commit: bool = True): deleted_count = 0 for table in { "FEATURE_VIEWS", - "REQUEST_FEATURE_VIEWS", "ON_DEMAND_FEATURE_VIEWS", "STREAM_FEATURE_VIEWS", }: @@ -529,25 +524,6 @@ def get_on_demand_feature_view( FeatureViewNotFoundException, ) - def get_request_feature_view( - self, name: str, project: str, allow_cache: bool = False - ) -> RequestFeatureView: - if allow_cache: - self._refresh_cached_registry_if_necessary() - return proto_registry_utils.get_request_feature_view( - self.cached_registry_proto, name, project - ) - return self._get_object( - "REQUEST_FEATURE_VIEWS", - name, - project, - RequestFeatureViewProto, - RequestFeatureView, - "REQUEST_FEATURE_VIEW_NAME", - "REQUEST_FEATURE_VIEW_PROTO", - FeatureViewNotFoundException, - ) - def get_saved_dataset( self, name: str, project: str, allow_cache: bool = False ) -> SavedDataset: @@ -709,22 +685,6 @@ def list_on_demand_feature_views( "ON_DEMAND_FEATURE_VIEW_PROTO", ) - def list_request_feature_views( - self, project: str, allow_cache: bool = False - ) -> List[RequestFeatureView]: - if allow_cache: - self._refresh_cached_registry_if_necessary() - return proto_registry_utils.list_request_feature_views( - self.cached_registry_proto, project - ) - return self._list_objects( - "REQUEST_FEATURE_VIEWS", - project, - RequestFeatureViewProto, - RequestFeatureView, - "REQUEST_FEATURE_VIEW_PROTO", - ) - def list_saved_datasets( self, project: str, allow_cache: bool = False ) -> List[SavedDataset]: @@ -809,7 +769,7 @@ def apply_materialization( fv_column_name = fv_table_str[:-1] python_class, proto_class = self._infer_fv_classes(feature_view) - if python_class in {RequestFeatureView, OnDemandFeatureView}: + if python_class in {OnDemandFeatureView}: raise ValueError( f"Cannot apply materialization for feature {feature_view.name} of type {python_class}" ) @@ -933,7 +893,6 @@ def proto(self) -> RegistryProto: (self.list_feature_views, r.feature_views), (self.list_data_sources, r.data_sources), (self.list_on_demand_feature_views, r.on_demand_feature_views), - (self.list_request_feature_views, r.request_feature_views), (self.list_stream_feature_views, r.stream_feature_views), (self.list_feature_services, r.feature_services), (self.list_saved_datasets, r.saved_datasets), @@ -968,7 +927,6 @@ def _get_all_projects(self) -> Set[str]: "ENTITIES", "FEATURE_VIEWS", "ON_DEMAND_FEATURE_VIEWS", - "REQUEST_FEATURE_VIEWS", "STREAM_FEATURE_VIEWS", ] @@ -1010,8 +968,6 @@ def _infer_fv_classes(self, feature_view): python_class, proto_class = FeatureView, FeatureViewProto elif isinstance(feature_view, OnDemandFeatureView): python_class, proto_class = OnDemandFeatureView, OnDemandFeatureViewProto - elif isinstance(feature_view, RequestFeatureView): - python_class, proto_class = RequestFeatureView, RequestFeatureViewProto else: raise ValueError(f"Unexpected feature view type: {type(feature_view)}") return python_class, proto_class @@ -1023,8 +979,6 @@ def _infer_fv_table(self, feature_view) -> str: table = "FEATURE_VIEWS" elif isinstance(feature_view, OnDemandFeatureView): table = "ON_DEMAND_FEATURE_VIEWS" - elif isinstance(feature_view, RequestFeatureView): - table = "REQUEST_FEATURE_VIEWS" else: raise ValueError(f"Unexpected feature view type: {type(feature_view)}") return table diff --git a/sdk/python/feast/infra/registry/sql.py b/sdk/python/feast/infra/registry/sql.py index 597c9b8513..2077ba4aae 100644 --- a/sdk/python/feast/infra/registry/sql.py +++ b/sdk/python/feast/infra/registry/sql.py @@ -50,9 +50,6 @@ OnDemandFeatureView as OnDemandFeatureViewProto, ) from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto -from feast.protos.feast.core.RequestFeatureView_pb2 import ( - RequestFeatureView as RequestFeatureViewProto, -) from feast.protos.feast.core.SavedDataset_pb2 import SavedDataset as SavedDatasetProto from feast.protos.feast.core.StreamFeatureView_pb2 import ( StreamFeatureView as StreamFeatureViewProto, @@ -61,7 +58,6 @@ ValidationReference as ValidationReferenceProto, ) from feast.repo_config import RegistryConfig -from feast.request_feature_view import RequestFeatureView from feast.saved_dataset import SavedDataset, ValidationReference from feast.stream_feature_view import StreamFeatureView @@ -96,16 +92,6 @@ Column("user_metadata", LargeBinary, nullable=True), ) -request_feature_views = Table( - "request_feature_views", - metadata, - Column("feature_view_name", String(50), primary_key=True), - Column("project_id", String(50), primary_key=True), - Column("last_updated_timestamp", BigInteger, nullable=False), - Column("feature_view_proto", LargeBinary, nullable=False), - Column("user_metadata", LargeBinary, nullable=True), -) - stream_feature_views = Table( "stream_feature_views", metadata, @@ -216,7 +202,6 @@ def teardown(self): feature_views, feature_services, on_demand_feature_views, - request_feature_views, saved_datasets, validation_references, }: @@ -292,18 +277,6 @@ def _get_on_demand_feature_view( not_found_exception=FeatureViewNotFoundException, ) - def _get_request_feature_view(self, name: str, project: str): - return self._get_object( - table=request_feature_views, - name=name, - project=project, - proto_class=RequestFeatureViewProto, - python_class=RequestFeatureView, - id_field_name="feature_view_name", - proto_field_name="feature_view_proto", - not_found_exception=FeatureViewNotFoundException, - ) - def _get_feature_service(self, name: str, project: str) -> FeatureService: return self._get_object( table=feature_services, @@ -363,7 +336,6 @@ def delete_feature_view(self, name: str, project: str, commit: bool = True): deleted_count = 0 for table in { feature_views, - request_feature_views, on_demand_feature_views, stream_feature_views, }: @@ -459,15 +431,6 @@ def _list_saved_datasets(self, project: str) -> List[SavedDataset]: "saved_dataset_proto", ) - def _list_request_feature_views(self, project: str) -> List[RequestFeatureView]: - return self._list_objects( - request_feature_views, - project, - RequestFeatureViewProto, - RequestFeatureView, - "feature_view_proto", - ) - def _list_on_demand_feature_views(self, project: str) -> List[OnDemandFeatureView]: return self._list_objects( on_demand_feature_views, @@ -532,7 +495,7 @@ def apply_materialization( table = self._infer_fv_table(feature_view) python_class, proto_class = self._infer_fv_classes(feature_view) - if python_class in {RequestFeatureView, OnDemandFeatureView}: + if python_class in {OnDemandFeatureView}: raise ValueError( f"Cannot apply materialization for feature {feature_view.name} of type {python_class}" ) @@ -628,8 +591,6 @@ def _infer_fv_table(self, feature_view): table = feature_views elif isinstance(feature_view, OnDemandFeatureView): table = on_demand_feature_views - elif isinstance(feature_view, RequestFeatureView): - table = request_feature_views else: raise ValueError(f"Unexpected feature view type: {type(feature_view)}") return table @@ -641,8 +602,6 @@ def _infer_fv_classes(self, feature_view): python_class, proto_class = FeatureView, FeatureViewProto elif isinstance(feature_view, OnDemandFeatureView): python_class, proto_class = OnDemandFeatureView, OnDemandFeatureViewProto - elif isinstance(feature_view, RequestFeatureView): - python_class, proto_class = RequestFeatureView, RequestFeatureViewProto else: raise ValueError(f"Unexpected feature view type: {type(feature_view)}") return python_class, proto_class @@ -671,7 +630,6 @@ def proto(self) -> RegistryProto: (self.list_feature_views, r.feature_views), (self.list_data_sources, r.data_sources), (self.list_on_demand_feature_views, r.on_demand_feature_views), - (self.list_request_feature_views, r.request_feature_views), (self.list_stream_feature_views, r.stream_feature_views), (self.list_feature_services, r.feature_services), (self.list_saved_datasets, r.saved_datasets), @@ -905,7 +863,6 @@ def _get_all_projects(self) -> Set[str]: entities, data_sources, feature_views, - request_feature_views, on_demand_feature_views, stream_feature_views, }: diff --git a/sdk/python/feast/infra/utils/snowflake/registry/snowflake_table_creation.sql b/sdk/python/feast/infra/utils/snowflake/registry/snowflake_table_creation.sql index 4b53d6bb3f..aa35caeac4 100644 --- a/sdk/python/feast/infra/utils/snowflake/registry/snowflake_table_creation.sql +++ b/sdk/python/feast/infra/utils/snowflake/registry/snowflake_table_creation.sql @@ -57,15 +57,6 @@ CREATE TABLE IF NOT EXISTS REGISTRY_PATH."ON_DEMAND_FEATURE_VIEWS" ( PRIMARY KEY (on_demand_feature_view_name, project_id) ); -CREATE TABLE IF NOT EXISTS REGISTRY_PATH."REQUEST_FEATURE_VIEWS" ( - request_feature_view_name VARCHAR, - project_id VARCHAR, - last_updated_timestamp TIMESTAMP_LTZ NOT NULL, - request_feature_view_proto BINARY NOT NULL, - user_metadata BINARY, - PRIMARY KEY (request_feature_view_name, project_id) -); - CREATE TABLE IF NOT EXISTS REGISTRY_PATH."SAVED_DATASETS" ( saved_dataset_name VARCHAR, project_id VARCHAR, diff --git a/sdk/python/feast/infra/utils/snowflake/registry/snowflake_table_deletion.sql b/sdk/python/feast/infra/utils/snowflake/registry/snowflake_table_deletion.sql index 7f5c1991ea..a355c72062 100644 --- a/sdk/python/feast/infra/utils/snowflake/registry/snowflake_table_deletion.sql +++ b/sdk/python/feast/infra/utils/snowflake/registry/snowflake_table_deletion.sql @@ -12,8 +12,6 @@ DROP TABLE IF EXISTS REGISTRY_PATH."MANAGED_INFRA"; DROP TABLE IF EXISTS REGISTRY_PATH."ON_DEMAND_FEATURE_VIEWS"; -DROP TABLE IF EXISTS REGISTRY_PATH."REQUEST_FEATURE_VIEWS"; - DROP TABLE IF EXISTS REGISTRY_PATH."SAVED_DATASETS"; DROP TABLE IF EXISTS REGISTRY_PATH."STREAM_FEATURE_VIEWS"; diff --git a/sdk/python/feast/registry_server.py b/sdk/python/feast/registry_server.py index 221715480e..7de0cc43e1 100644 --- a/sdk/python/feast/registry_server.py +++ b/sdk/python/feast/registry_server.py @@ -59,23 +59,6 @@ def ListFeatureViews(self, request, context): ] ) - def GetRequestFeatureView( - self, request: RegistryServer_pb2.GetRequestFeatureViewRequest, context - ): - return self.proxied_registry.get_request_feature_view( - name=request.name, project=request.project, allow_cache=request.allow_cache - ).to_proto() - - def ListRequestFeatureViews(self, request, context): - return RegistryServer_pb2.ListRequestFeatureViewsResponse( - request_feature_views=[ - request_feature_view.to_proto() - for request_feature_view in self.proxied_registry.list_request_feature_views( - project=request.project, allow_cache=request.allow_cache - ) - ] - ) - def GetStreamFeatureView( self, request: RegistryServer_pb2.GetStreamFeatureViewRequest, context ): diff --git a/sdk/python/feast/repo_contents.py b/sdk/python/feast/repo_contents.py index fe5cbd284b..33b99f29b2 100644 --- a/sdk/python/feast/repo_contents.py +++ b/sdk/python/feast/repo_contents.py @@ -19,7 +19,6 @@ from feast.feature_view import FeatureView from feast.on_demand_feature_view import OnDemandFeatureView from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto -from feast.request_feature_view import RequestFeatureView from feast.stream_feature_view import StreamFeatureView @@ -31,7 +30,6 @@ class RepoContents(NamedTuple): data_sources: List[DataSource] feature_views: List[FeatureView] on_demand_feature_views: List[OnDemandFeatureView] - request_feature_views: List[RequestFeatureView] stream_feature_views: List[StreamFeatureView] entities: List[Entity] feature_services: List[FeatureService] @@ -46,9 +44,6 @@ def to_registry_proto(self) -> RegistryProto: registry_proto.on_demand_feature_views.extend( [fv.to_proto() for fv in self.on_demand_feature_views] ) - registry_proto.request_feature_views.extend( - [fv.to_proto() for fv in self.request_feature_views] - ) registry_proto.feature_services.extend( [fs.to_proto() for fs in self.feature_services] ) diff --git a/sdk/python/feast/repo_operations.py b/sdk/python/feast/repo_operations.py index 120f6e7a42..000e000438 100644 --- a/sdk/python/feast/repo_operations.py +++ b/sdk/python/feast/repo_operations.py @@ -29,7 +29,6 @@ from feast.on_demand_feature_view import OnDemandFeatureView from feast.repo_config import RepoConfig from feast.repo_contents import RepoContents -from feast.request_feature_view import RequestFeatureView from feast.stream_feature_view import StreamFeatureView from feast.usage import log_exceptions_and_usage @@ -114,7 +113,6 @@ def parse_repo(repo_root: Path) -> RepoContents: feature_services=[], on_demand_feature_views=[], stream_feature_views=[], - request_feature_views=[], ) for repo_file in get_repo_files(repo_root): @@ -196,10 +194,6 @@ def parse_repo(repo_root: Path) -> RepoContents: (obj is odfv) for odfv in res.on_demand_feature_views ): res.on_demand_feature_views.append(obj) - elif isinstance(obj, RequestFeatureView) and not any( - (obj is rfv) for rfv in res.request_feature_views - ): - res.request_feature_views.append(obj) res.entities.append(DUMMY_ENTITY) return res @@ -250,7 +244,6 @@ def extract_objects_for_apply_delete(project, registry, repo): Union[ Entity, FeatureView, - RequestFeatureView, OnDemandFeatureView, StreamFeatureView, FeatureService, @@ -264,7 +257,6 @@ def extract_objects_for_apply_delete(project, registry, repo): Union[ Entity, FeatureView, - RequestFeatureView, OnDemandFeatureView, StreamFeatureView, FeatureService, diff --git a/sdk/python/feast/request_feature_view.py b/sdk/python/feast/request_feature_view.py deleted file mode 100644 index 7248ffe989..0000000000 --- a/sdk/python/feast/request_feature_view.py +++ /dev/null @@ -1,137 +0,0 @@ -import copy -import warnings -from typing import Dict, List, Optional, Type - -from feast.base_feature_view import BaseFeatureView -from feast.data_source import RequestSource -from feast.feature_view_projection import FeatureViewProjection -from feast.field import Field -from feast.protos.feast.core.RequestFeatureView_pb2 import ( - RequestFeatureView as RequestFeatureViewProto, -) -from feast.protos.feast.core.RequestFeatureView_pb2 import RequestFeatureViewSpec -from feast.usage import log_exceptions - - -class RequestFeatureView(BaseFeatureView): - """ - [Experimental] A RequestFeatureView defines a logical group of features that should - be available as an input to an on demand feature view at request time. - - Attributes: - name: The unique name of the request feature view. - request_source: The request source that specifies the schema and - features of the request feature view. - features: The list of features defined as part of this request feature view. - description: A human-readable description. - tags: A dictionary of key-value pairs to store arbitrary metadata. - owner: The owner of the request feature view, typically the email of the primary - maintainer. - """ - - name: str - request_source: RequestSource - features: List[Field] - description: str - tags: Dict[str, str] - owner: str - - @log_exceptions - def __init__( - self, - name: str, - request_data_source: RequestSource, - description: str = "", - tags: Optional[Dict[str, str]] = None, - owner: str = "", - ): - """ - Creates a RequestFeatureView object. - - Args: - name: The unique name of the request feature view. - request_data_source: The request data source that specifies the schema and - features of the request feature view. - description (optional): A human-readable description. - tags (optional): A dictionary of key-value pairs to store arbitrary metadata. - owner (optional): The owner of the request feature view, typically the email - of the primary maintainer. - """ - warnings.warn( - "Request feature view is deprecated. " - "Please use request data source instead", - DeprecationWarning, - ) - - if isinstance(request_data_source.schema, Dict): - new_features = [ - Field(name=name, dtype=dtype) - for name, dtype in request_data_source.schema.items() - ] - else: - new_features = request_data_source.schema - - super().__init__( - name=name, - features=new_features, - description=description, - tags=tags, - owner=owner, - ) - self.request_source = request_data_source - - @property - def proto_class(self) -> Type[RequestFeatureViewProto]: - return RequestFeatureViewProto - - def to_proto(self) -> RequestFeatureViewProto: - """ - Converts an request feature view object to its protobuf representation. - - Returns: - A RequestFeatureViewProto protobuf. - """ - spec = RequestFeatureViewSpec( - name=self.name, - request_data_source=self.request_source.to_proto(), - description=self.description, - tags=self.tags, - owner=self.owner, - ) - - return RequestFeatureViewProto(spec=spec) - - @classmethod - def from_proto(cls, request_feature_view_proto: RequestFeatureViewProto): - """ - Creates a request feature view from a protobuf representation. - - Args: - request_feature_view_proto: A protobuf representation of an request feature view. - - Returns: - A RequestFeatureView object based on the request feature view protobuf. - """ - - request_feature_view_obj = cls( - name=request_feature_view_proto.spec.name, - request_data_source=RequestSource.from_proto( - request_feature_view_proto.spec.request_data_source - ), - description=request_feature_view_proto.spec.description, - tags=dict(request_feature_view_proto.spec.tags), - owner=request_feature_view_proto.spec.owner, - ) - - # FeatureViewProjections are not saved in the RequestFeatureView proto. - # Create the default projection. - request_feature_view_obj.projection = FeatureViewProjection.from_definition( - request_feature_view_obj - ) - - return request_feature_view_obj - - def __copy__(self): - fv = RequestFeatureView(name=self.name, request_data_source=self.request_source) - fv.projection = copy.copy(self.projection) - return fv From c364be473da5162e09f1e10a92950107e2e5c5e7 Mon Sep 17 00:00:00 2001 From: Tornike Gurgenidze Date: Wed, 27 Mar 2024 20:57:58 +0400 Subject: [PATCH 088/122] chore: Run tests as make commands in workflows (#4035) * run tests as make commands in workflows Signed-off-by: tokoko * rename test-python make command to test-python-unit Signed-off-by: tokoko * add env vars to pytest.ini Signed-off-by: tokoko * fix odfv transformation field signature Signed-off-by: tokoko * fix linting, formatting Signed-off-by: tokoko * handle stream_feature_view failures Signed-off-by: tokoko --------- Signed-off-by: tokoko --- .github/workflows/pr_integration_tests.yml | 2 +- .../workflows/pr_local_integration_tests.yml | 7 +--- .github/workflows/unit_tests.yml | 13 +------ Makefile | 37 ++++--------------- sdk/python/feast/stream_feature_view.py | 14 ++++--- sdk/python/pytest.ini | 6 ++- .../requirements/py3.10-ci-requirements.txt | 31 +++++++++------- .../requirements/py3.10-requirements.txt | 6 +-- .../requirements/py3.9-ci-requirements.txt | 31 +++++++++------- .../requirements/py3.9-requirements.txt | 6 +-- sdk/python/tests/conftest.py | 3 -- setup.py | 1 + 12 files changed, 66 insertions(+), 91 deletions(-) diff --git a/.github/workflows/pr_integration_tests.yml b/.github/workflows/pr_integration_tests.yml index b335c0f042..5e7287351b 100644 --- a/.github/workflows/pr_integration_tests.yml +++ b/.github/workflows/pr_integration_tests.yml @@ -167,4 +167,4 @@ jobs: SNOWFLAKE_CI_PASSWORD: ${{ secrets.SNOWFLAKE_CI_PASSWORD }} SNOWFLAKE_CI_ROLE: ${{ secrets.SNOWFLAKE_CI_ROLE }} SNOWFLAKE_CI_WAREHOUSE: ${{ secrets.SNOWFLAKE_CI_WAREHOUSE }} - run: pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread + run: make test-python-integration \ No newline at end of file diff --git a/.github/workflows/pr_local_integration_tests.yml b/.github/workflows/pr_local_integration_tests.yml index 17ff54b1f8..266cdcc9b9 100644 --- a/.github/workflows/pr_local_integration_tests.yml +++ b/.github/workflows/pr_local_integration_tests.yml @@ -61,9 +61,4 @@ jobs: run: make install-python-ci-dependencies - name: Test local integration tests if: ${{ always() }} # this will guarantee that step won't be canceled and resources won't leak - env: - FEAST_USAGE: "False" - IS_TEST: "True" - FEAST_LOCAL_ONLINE_CONTAINER: "True" - FEAST_IS_LOCAL_TEST: "True" - run: pytest -n 8 --cov=./ --cov-report=xml --color=yes --integration -k "not gcs_registry and not s3_registry and not test_lambda_materialization and not test_snowflake_materialization" sdk/python/tests + run: make test-python-integration-local diff --git a/.github/workflows/unit_tests.yml b/.github/workflows/unit_tests.yml index 7e2e3b577a..f3f91bb67f 100644 --- a/.github/workflows/unit_tests.yml +++ b/.github/workflows/unit_tests.yml @@ -23,17 +23,6 @@ jobs: with: python-version: ${{ matrix.python-version }} architecture: x64 - - name: Install mysql on macOS - if: startsWith(matrix.os, 'macOS') - run: | - brew install mysql - PATH=$PATH:/usr/local/mysql/bin - - name: Work around Homebrew MySQL being broken - # See https://github.com/Homebrew/homebrew-core/issues/130258 for more details. - if: startsWith(matrix.os, 'macOS') - run: | - brew install zlib - ln -sv $(brew --prefix zlib)/lib/libz.dylib $(brew --prefix)/lib/libzlib.dylib - name: Get pip cache dir id: pip-cache run: | @@ -56,7 +45,7 @@ jobs: - name: Install dependencies run: make install-python-ci-dependencies - name: Test Python - run: pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests + run: make test-python-unit unit-test-ui: diff --git a/Makefile b/Makefile index 0eac7e03a2..55896b8c11 100644 --- a/Makefile +++ b/Makefile @@ -28,7 +28,7 @@ format: format-python format-java lint: lint-python lint-java -test: test-python test-java +test: test-python-unit test-java protos: compile-protos-python compile-protos-docs @@ -63,32 +63,26 @@ benchmark-python: benchmark-python-local: FEAST_USAGE=False IS_TEST=True FEAST_IS_LOCAL_TEST=True python -m pytest --integration --benchmark --benchmark-autosave --benchmark-save-data sdk/python/tests -test-python: - FEAST_USAGE=False \ - IS_TEST=True \ - python -m pytest -n 8 sdk/python/tests \ +test-python-unit: + python -m pytest -n 8 --color=yes sdk/python/tests test-python-integration: - FEAST_USAGE=False IS_TEST=True python -m pytest -n 8 --integration sdk/python/tests + python -m pytest -n 8 --integration --color=yes --durations=5 --timeout=1200 --timeout_method=thread sdk/python/tests test-python-integration-local: @(docker info > /dev/null 2>&1 && \ - FEAST_USAGE=False \ - IS_TEST=True \ FEAST_IS_LOCAL_TEST=True \ FEAST_LOCAL_ONLINE_CONTAINER=True \ - python -m pytest -n 8 --integration \ + python -m pytest -n 8 --color=yes --integration \ -k "not gcs_registry and \ not s3_registry and \ not test_lambda_materialization and \ - not test_snowflake" \ + not test_snowflake_materialization" \ sdk/python/tests \ ) || echo "This script uses Docker, and it isn't running - please start the Docker Daemon and try again!"; test-python-integration-container: @(docker info > /dev/null 2>&1 && \ - FEAST_USAGE=False \ - IS_TEST=True \ FEAST_LOCAL_ONLINE_CONTAINER=True \ python -m pytest -n 8 --integration sdk/python/tests \ ) || echo "This script uses Docker, and it isn't running - please start the Docker Daemon and try again!"; @@ -97,7 +91,6 @@ test-python-universal-spark: PYTHONPATH='.' \ FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.offline_stores.contrib.spark_repo_configuration \ PYTEST_PLUGINS=feast.infra.offline_stores.contrib.spark_offline_store.tests \ - FEAST_USAGE=False IS_TEST=True \ python -m pytest -n 8 --integration \ -k "not test_historical_retrieval_fails_on_validation and \ not test_historical_retrieval_with_validation and \ @@ -121,7 +114,6 @@ test-python-universal-trino: PYTHONPATH='.' \ FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.offline_stores.contrib.trino_repo_configuration \ PYTEST_PLUGINS=feast.infra.offline_stores.contrib.trino_offline_store.tests \ - FEAST_USAGE=False IS_TEST=True \ python -m pytest -n 8 --integration \ -k "not test_historical_retrieval_fails_on_validation and \ not test_historical_retrieval_with_validation and \ @@ -148,7 +140,6 @@ test-python-universal-mssql: PYTHONPATH='.' \ FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.offline_stores.contrib.mssql_repo_configuration \ PYTEST_PLUGINS=feast.infra.offline_stores.contrib.mssql_offline_store.tests \ - FEAST_USAGE=False IS_TEST=True \ FEAST_LOCAL_ONLINE_CONTAINER=True \ python -m pytest -n 8 --integration \ -k "not gcs_registry and \ @@ -166,7 +157,6 @@ test-python-universal-athena: PYTHONPATH='.' \ FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.offline_stores.contrib.athena_repo_configuration \ PYTEST_PLUGINS=feast.infra.offline_stores.contrib.athena_offline_store.tests \ - FEAST_USAGE=False IS_TEST=True \ ATHENA_REGION=ap-northeast-2 \ ATHENA_DATA_SOURCE=AwsDataCatalog \ ATHENA_DATABASE=default \ @@ -190,7 +180,6 @@ test-python-universal-athena: test-python-universal-duckdb: PYTHONPATH='.' \ FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.offline_stores.contrib.duckdb_repo_configuration \ - FEAST_USAGE=False IS_TEST=True \ python -m pytest -n 8 --integration \ -k "not test_nullable_online_store and \ not gcs_registry and \ @@ -204,8 +193,6 @@ test-python-universal-postgres-offline: PYTHONPATH='.' \ FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.offline_stores.contrib.postgres_repo_configuration \ PYTEST_PLUGINS=sdk.python.feast.infra.offline_stores.contrib.postgres_offline_store.tests \ - FEAST_USAGE=False \ - IS_TEST=True \ python -m pytest -n 8 --integration \ -k "not test_historical_retrieval_with_validation and \ not test_historical_features_persisting and \ @@ -226,8 +213,6 @@ test-python-universal-postgres-online: PYTHONPATH='.' \ FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.online_stores.contrib.postgres_repo_configuration \ PYTEST_PLUGINS=sdk.python.feast.infra.offline_stores.contrib.postgres_offline_store.tests \ - FEAST_USAGE=False \ - IS_TEST=True \ python -m pytest -n 8 --integration \ -k "not test_universal_cli and \ not test_go_feature_server and \ @@ -247,8 +232,6 @@ test-python-universal-postgres-online: PYTHONPATH='.' \ FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.online_stores.contrib.mysql_repo_configuration \ PYTEST_PLUGINS=sdk.python.tests.integration.feature_repos.universal.online_store.mysql \ - FEAST_USAGE=False \ - IS_TEST=True \ python -m pytest -n 8 --integration \ -k "not test_universal_cli and \ not test_go_feature_server and \ @@ -268,8 +251,6 @@ test-python-universal-cassandra: PYTHONPATH='.' \ FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.online_stores.contrib.cassandra_repo_configuration \ PYTEST_PLUGINS=sdk.python.tests.integration.feature_repos.universal.online_store.cassandra \ - FEAST_USAGE=False \ - IS_TEST=True \ python -m pytest -x --integration \ sdk/python/tests @@ -277,8 +258,6 @@ test-python-universal-hazelcast: PYTHONPATH='.' \ FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.online_stores.contrib.hazelcast_repo_configuration \ PYTEST_PLUGINS=sdk.python.tests.integration.feature_repos.universal.online_store.hazelcast \ - FEAST_USAGE=False \ - IS_TEST=True \ python -m pytest -n 8 --integration \ -k "not test_universal_cli and \ not test_go_feature_server and \ @@ -298,8 +277,6 @@ test-python-universal-cassandra-no-cloud-providers: PYTHONPATH='.' \ FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.online_stores.contrib.cassandra_repo_configuration \ PYTEST_PLUGINS=sdk.python.tests.integration.feature_repos.universal.online_store.cassandra \ - FEAST_USAGE=False \ - IS_TEST=True \ python -m pytest -x --integration \ -k "not test_lambda_materialization_consistency and \ not test_apply_entity_integration and \ @@ -314,7 +291,7 @@ test-python-universal-cassandra-no-cloud-providers: sdk/python/tests test-python-universal: - FEAST_USAGE=False IS_TEST=True python -m pytest -n 8 --integration sdk/python/tests + python -m pytest -n 8 --integration sdk/python/tests format-python: # Sort diff --git a/sdk/python/feast/stream_feature_view.py b/sdk/python/feast/stream_feature_view.py index 0d1125d2bd..301cf6cba5 100644 --- a/sdk/python/feast/stream_feature_view.py +++ b/sdk/python/feast/stream_feature_view.py @@ -241,11 +241,11 @@ def from_proto(cls, sfv_proto): if sfv_proto.spec.HasField("user_defined_function") else None ) - feature_transformation = ( - sfv_proto.spec.feature_transformation.user_defined_function.body_text - if sfv_proto.spec.HasField("feature_transformation") - else None - ) + # feature_transformation = ( + # sfv_proto.spec.feature_transformation.user_defined_function.body_text + # if sfv_proto.spec.HasField("feature_transformation") + # else None + # ) stream_feature_view = cls( name=sfv_proto.spec.name, description=sfv_proto.spec.description, @@ -264,7 +264,9 @@ def from_proto(cls, sfv_proto): mode=sfv_proto.spec.mode, udf=udf, udf_string=udf_string, - feature_transformation=feature_transformation, + feature_transformation=PandasTransformation(udf, udf_string) + if udf + else None, aggregations=[ Aggregation.from_proto(agg_proto) for agg_proto in sfv_proto.spec.aggregations diff --git a/sdk/python/pytest.ini b/sdk/python/pytest.ini index 07a5e869dc..83317d36c9 100644 --- a/sdk/python/pytest.ini +++ b/sdk/python/pytest.ini @@ -1,4 +1,8 @@ [pytest] markers = universal_offline_stores: mark a test as using all offline stores. - universal_online_stores: mark a test as using all online stores. \ No newline at end of file + universal_online_stores: mark a test as using all online stores. + +env = + FEAST_USAGE=False + IS_TEST=True \ No newline at end of file diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index 8f0ef90d77..4b1e26e1f3 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -61,11 +61,11 @@ black==22.12.0 # via feast (setup.py) bleach==6.1.0 # via nbconvert -boto3==1.34.65 +boto3==1.34.69 # via # feast (setup.py) # moto -botocore==1.34.65 +botocore==1.34.69 # via # boto3 # moto @@ -82,7 +82,7 @@ cachecontrol==0.14.0 # via firebase-admin cachetools==5.3.3 # via google-auth -cassandra-driver==3.29.0 +cassandra-driver==3.29.1 # via feast (setup.py) certifi==2024.2.2 # via @@ -199,7 +199,7 @@ geojson==2.5.0 # via rockset geomet==0.2.1.post1 # via cassandra-driver -google-api-core[grpc]==2.17.1 +google-api-core[grpc]==2.18.0 # via # feast (setup.py) # firebase-admin @@ -211,9 +211,9 @@ google-api-core[grpc]==2.17.1 # google-cloud-datastore # google-cloud-firestore # google-cloud-storage -google-api-python-client==2.122.0 +google-api-python-client==2.123.0 # via firebase-admin -google-auth==2.28.2 +google-auth==2.29.0 # via # google-api-core # google-api-python-client @@ -258,7 +258,7 @@ googleapis-common-protos[grpc]==1.63.0 # google-api-core # grpc-google-iam-v1 # grpcio-status -great-expectations==0.18.11 +great-expectations==0.18.12 # via feast (setup.py) greenlet==3.0.3 # via sqlalchemy @@ -331,7 +331,7 @@ importlib-metadata==6.11.0 # via # dask # feast (setup.py) -importlib-resources==6.3.1 +importlib-resources==6.4.0 # via feast (setup.py) iniconfig==2.0.0 # via pytest @@ -459,7 +459,7 @@ moreorless==0.4.0 # via bowler moto==4.2.14 # via feast (setup.py) -msal==1.27.0 +msal==1.28.0 # via # azure-identity # msal-extensions @@ -483,7 +483,7 @@ mypy-protobuf==3.3.0 # via feast (setup.py) nbclient==0.10.0 # via nbconvert -nbconvert==7.16.2 +nbconvert==7.16.3 # via jupyter-server nbformat==5.10.3 # via @@ -581,6 +581,7 @@ prompt-toolkit==3.0.43 # via ipython proto-plus==1.23.0 # via + # google-api-core # google-cloud-bigquery # google-cloud-bigquery-storage # google-cloud-bigtable @@ -687,6 +688,7 @@ pytest==7.4.4 # feast (setup.py) # pytest-benchmark # pytest-cov + # pytest-env # pytest-lazy-fixture # pytest-mock # pytest-ordering @@ -696,6 +698,8 @@ pytest-benchmark==3.4.1 # via feast (setup.py) pytest-cov==4.1.0 # via feast (setup.py) +pytest-env==1.1.3 + # via feast (setup.py) pytest-lazy-fixture==0.6.3 # via feast (setup.py) pytest-mock==1.10.4 @@ -773,7 +777,7 @@ requests==2.31.0 # snowflake-connector-python # sphinx # trino -requests-oauthlib==1.4.0 +requests-oauthlib==2.0.0 # via kubernetes responses==0.25.0 # via moto @@ -882,6 +886,7 @@ tomli==2.0.1 # pip-tools # pyproject-hooks # pytest + # pytest-env tomlkit==0.12.4 # via snowflake-connector-python toolz==0.12.1 @@ -919,7 +924,7 @@ traitlets==5.14.2 # nbformat trino==0.328.0 # via feast (setup.py) -typeguard==4.1.5 +typeguard==4.2.1 # via feast (setup.py) types-protobuf==3.19.22 # via @@ -984,7 +989,7 @@ urllib3==1.26.18 # requests # responses # rockset -uvicorn[standard]==0.28.0 +uvicorn[standard]==0.29.0 # via feast (setup.py) uvloop==0.19.0 # via uvicorn diff --git a/sdk/python/requirements/py3.10-requirements.txt b/sdk/python/requirements/py3.10-requirements.txt index e17a588538..6603171d45 100644 --- a/sdk/python/requirements/py3.10-requirements.txt +++ b/sdk/python/requirements/py3.10-requirements.txt @@ -62,7 +62,7 @@ importlib-metadata==6.11.0 # via # dask # feast (setup.py) -importlib-resources==6.3.1 +importlib-resources==6.4.0 # via feast (setup.py) jinja2==3.1.3 # via feast (setup.py) @@ -158,7 +158,7 @@ toolz==0.12.1 # partd tqdm==4.66.2 # via feast (setup.py) -typeguard==4.1.5 +typeguard==4.2.1 # via feast (setup.py) types-protobuf==4.24.0.20240311 # via mypy-protobuf @@ -176,7 +176,7 @@ tzdata==2024.1 # via pandas urllib3==2.2.1 # via requests -uvicorn[standard]==0.28.0 +uvicorn[standard]==0.29.0 # via feast (setup.py) uvloop==0.19.0 # via uvicorn diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index dc96554431..99dee08c05 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -61,11 +61,11 @@ black==22.12.0 # via feast (setup.py) bleach==6.1.0 # via nbconvert -boto3==1.34.65 +boto3==1.34.69 # via # feast (setup.py) # moto -botocore==1.34.65 +botocore==1.34.69 # via # boto3 # moto @@ -82,7 +82,7 @@ cachecontrol==0.14.0 # via firebase-admin cachetools==5.3.3 # via google-auth -cassandra-driver==3.29.0 +cassandra-driver==3.29.1 # via feast (setup.py) certifi==2024.2.2 # via @@ -199,7 +199,7 @@ geojson==2.5.0 # via rockset geomet==0.2.1.post1 # via cassandra-driver -google-api-core[grpc]==2.17.1 +google-api-core[grpc]==2.18.0 # via # feast (setup.py) # firebase-admin @@ -211,9 +211,9 @@ google-api-core[grpc]==2.17.1 # google-cloud-datastore # google-cloud-firestore # google-cloud-storage -google-api-python-client==2.122.0 +google-api-python-client==2.123.0 # via firebase-admin -google-auth==2.28.2 +google-auth==2.29.0 # via # google-api-core # google-api-python-client @@ -258,7 +258,7 @@ googleapis-common-protos[grpc]==1.63.0 # google-api-core # grpc-google-iam-v1 # grpcio-status -great-expectations==0.18.11 +great-expectations==0.18.12 # via feast (setup.py) greenlet==3.0.3 # via sqlalchemy @@ -339,7 +339,7 @@ importlib-metadata==6.11.0 # nbconvert # sphinx # typeguard -importlib-resources==6.3.1 +importlib-resources==6.4.0 # via feast (setup.py) iniconfig==2.0.0 # via pytest @@ -467,7 +467,7 @@ moreorless==0.4.0 # via bowler moto==4.2.14 # via feast (setup.py) -msal==1.27.0 +msal==1.28.0 # via # azure-identity # msal-extensions @@ -491,7 +491,7 @@ mypy-protobuf==3.3.0 # via feast (setup.py) nbclient==0.10.0 # via nbconvert -nbconvert==7.16.2 +nbconvert==7.16.3 # via jupyter-server nbformat==5.10.3 # via @@ -589,6 +589,7 @@ prompt-toolkit==3.0.43 # via ipython proto-plus==1.23.0 # via + # google-api-core # google-cloud-bigquery # google-cloud-bigquery-storage # google-cloud-bigtable @@ -695,6 +696,7 @@ pytest==7.4.4 # feast (setup.py) # pytest-benchmark # pytest-cov + # pytest-env # pytest-lazy-fixture # pytest-mock # pytest-ordering @@ -704,6 +706,8 @@ pytest-benchmark==3.4.1 # via feast (setup.py) pytest-cov==4.1.0 # via feast (setup.py) +pytest-env==1.1.3 + # via feast (setup.py) pytest-lazy-fixture==0.6.3 # via feast (setup.py) pytest-mock==1.10.4 @@ -781,7 +785,7 @@ requests==2.31.0 # snowflake-connector-python # sphinx # trino -requests-oauthlib==1.4.0 +requests-oauthlib==2.0.0 # via kubernetes responses==0.25.0 # via moto @@ -892,6 +896,7 @@ tomli==2.0.1 # pip-tools # pyproject-hooks # pytest + # pytest-env tomlkit==0.12.4 # via snowflake-connector-python toolz==0.12.1 @@ -929,7 +934,7 @@ traitlets==5.14.2 # nbformat trino==0.328.0 # via feast (setup.py) -typeguard==4.1.5 +typeguard==4.2.1 # via feast (setup.py) types-protobuf==3.19.22 # via @@ -998,7 +1003,7 @@ urllib3==1.26.18 # responses # rockset # snowflake-connector-python -uvicorn[standard]==0.28.0 +uvicorn[standard]==0.29.0 # via feast (setup.py) uvloop==0.19.0 # via uvicorn diff --git a/sdk/python/requirements/py3.9-requirements.txt b/sdk/python/requirements/py3.9-requirements.txt index f2228ade02..3b8f555ca7 100644 --- a/sdk/python/requirements/py3.9-requirements.txt +++ b/sdk/python/requirements/py3.9-requirements.txt @@ -63,7 +63,7 @@ importlib-metadata==6.11.0 # dask # feast (setup.py) # typeguard -importlib-resources==6.3.1 +importlib-resources==6.4.0 # via feast (setup.py) jinja2==3.1.3 # via feast (setup.py) @@ -159,7 +159,7 @@ toolz==0.12.1 # partd tqdm==4.66.2 # via feast (setup.py) -typeguard==4.1.5 +typeguard==4.2.1 # via feast (setup.py) types-protobuf==4.24.0.20240311 # via mypy-protobuf @@ -178,7 +178,7 @@ tzdata==2024.1 # via pandas urllib3==2.2.1 # via requests -uvicorn[standard]==0.28.0 +uvicorn[standard]==0.29.0 # via feast (setup.py) uvloop==0.19.0 # via uvicorn diff --git a/sdk/python/tests/conftest.py b/sdk/python/tests/conftest.py index 743a1ce4a0..1c9a958ce3 100644 --- a/sdk/python/tests/conftest.py +++ b/sdk/python/tests/conftest.py @@ -13,7 +13,6 @@ # limitations under the License. import logging import multiprocessing -import os import random from datetime import datetime, timedelta from multiprocessing import Process @@ -24,8 +23,6 @@ import pytest from _pytest.nodes import Item -os.environ["FEAST_USAGE"] = "False" -os.environ["IS_TEST"] = "True" from feast.feature_store import FeatureStore # noqa: E402 from feast.wait import wait_retry_backoff # noqa: E402 from tests.data.data_creator import create_basic_driver_dataset # noqa: E402 diff --git a/setup.py b/setup.py index b32d03ed77..f142da7ec3 100644 --- a/setup.py +++ b/setup.py @@ -177,6 +177,7 @@ "pytest-timeout==1.4.2", "pytest-ordering~=0.6.0", "pytest-mock==1.10.4", + "pytest-env", "Sphinx>4.0.0,<7", "testcontainers>=3.5,<4", "firebase-admin>=5.2.0,<6", From ad8f5721af6d8ad8b7539b91e0616ebf6e47f47b Mon Sep 17 00:00:00 2001 From: Tornike Gurgenidze Date: Wed, 27 Mar 2024 21:53:55 +0400 Subject: [PATCH 089/122] fix: Remove proto-plus imports (#4044) remove proto-plus imports Signed-off-by: tokoko --- sdk/python/feast/base_feature_view.py | 15 ++++++++++++--- sdk/python/feast/infra/registry/base_registry.py | 2 +- sdk/python/feast/infra/registry/registry.py | 2 +- 3 files changed, 14 insertions(+), 5 deletions(-) diff --git a/sdk/python/feast/base_feature_view.py b/sdk/python/feast/base_feature_view.py index 975537a394..31140e2899 100644 --- a/sdk/python/feast/base_feature_view.py +++ b/sdk/python/feast/base_feature_view.py @@ -13,13 +13,20 @@ # limitations under the License. from abc import ABC, abstractmethod from datetime import datetime -from typing import Dict, List, Optional, Type +from typing import Dict, List, Optional, Type, Union from google.protobuf.json_format import MessageToJson -from proto import Message +from google.protobuf.message import Message from feast.feature_view_projection import FeatureViewProjection from feast.field import Field +from feast.protos.feast.core.FeatureView_pb2 import FeatureView as FeatureViewProto +from feast.protos.feast.core.OnDemandFeatureView_pb2 import ( + OnDemandFeatureView as OnDemandFeatureViewProto, +) +from feast.protos.feast.core.StreamFeatureView_pb2 import ( + StreamFeatureView as StreamFeatureViewProto, +) class BaseFeatureView(ABC): @@ -89,7 +96,9 @@ def proto_class(self) -> Type[Message]: pass @abstractmethod - def to_proto(self) -> Message: + def to_proto( + self, + ) -> Union[FeatureViewProto, OnDemandFeatureViewProto, StreamFeatureViewProto]: pass @classmethod diff --git a/sdk/python/feast/infra/registry/base_registry.py b/sdk/python/feast/infra/registry/base_registry.py index c874001c89..c67164103e 100644 --- a/sdk/python/feast/infra/registry/base_registry.py +++ b/sdk/python/feast/infra/registry/base_registry.py @@ -19,7 +19,7 @@ from typing import Any, Dict, List, Optional from google.protobuf.json_format import MessageToJson -from proto import Message +from google.protobuf.message import Message from feast.base_feature_view import BaseFeatureView from feast.data_source import DataSource diff --git a/sdk/python/feast/infra/registry/registry.py b/sdk/python/feast/infra/registry/registry.py index 3c9843c904..d949b6079d 100644 --- a/sdk/python/feast/infra/registry/registry.py +++ b/sdk/python/feast/infra/registry/registry.py @@ -20,7 +20,7 @@ from urllib.parse import urlparse from google.protobuf.internal.containers import RepeatedCompositeFieldContainer -from proto import Message +from google.protobuf.message import Message from feast import usage from feast.base_feature_view import BaseFeatureView From ef62defbd80172ba3c536c413388234707278be1 Mon Sep 17 00:00:00 2001 From: Nmroth42 <38186683+Nmroth42@users.noreply.github.com> Date: Thu, 28 Mar 2024 00:54:26 +0700 Subject: [PATCH 090/122] fix: SqlRegistry _apply_object update statement (#4042) * fix sql registry update stmt Signed-off-by: Nmroth42 * refactor code Signed-off-by: Nmroth42 --------- Signed-off-by: Nmroth42 --- sdk/python/feast/infra/registry/sql.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/sdk/python/feast/infra/registry/sql.py b/sdk/python/feast/infra/registry/sql.py index 2077ba4aae..f9030a6875 100644 --- a/sdk/python/feast/infra/registry/sql.py +++ b/sdk/python/feast/infra/registry/sql.py @@ -691,7 +691,10 @@ def _apply_object( } update_stmt = ( update(table) - .where(getattr(table.c, id_field_name) == name) + .where( + getattr(table.c, id_field_name) == name, + table.c.project_id == project, + ) .values( values, ) From 73bc85351a9202d3db93907e8206d68123ee5baa Mon Sep 17 00:00:00 2001 From: Francisco Javier Arceo Date: Thu, 28 Mar 2024 17:13:58 -0400 Subject: [PATCH 091/122] feat: Adding support for Native Python feature transformations for ODFVs (#4045) --- .../embedded_go/online_features_service.py | 5 + sdk/python/feast/feature_store.py | 55 +++-- .../infra/offline_stores/offline_store.py | 8 + sdk/python/feast/on_demand_feature_view.py | 211 ++++++++++++++++-- .../transformation/pandas_transformation.py | 15 +- .../transformation/python_transformation.py | 65 ++++++ sdk/python/feast/transformation_server.py | 5 + .../unit/infra/test_inference_unit_tests.py | 69 +++++- .../tests/unit/test_on_demand_feature_view.py | 92 +++++++- ...test_on_demand_substrait_transformation.py | 1 + 10 files changed, 488 insertions(+), 38 deletions(-) create mode 100644 sdk/python/feast/transformation/python_transformation.py diff --git a/sdk/python/feast/embedded_go/online_features_service.py b/sdk/python/feast/embedded_go/online_features_service.py index bf82fab6a3..c6430b5f6d 100644 --- a/sdk/python/feast/embedded_go/online_features_service.py +++ b/sdk/python/feast/embedded_go/online_features_service.py @@ -252,6 +252,11 @@ def transformation_callback( # the typeguard requirement. full_feature_names = bool(full_feature_names) + if odfv.mode != "pandas": + raise Exception( + f"OnDemandFeatureView mode '{odfv.mode} not supported by EmbeddedOnlineFeatureServer." + ) + output = odfv.get_transformed_features_df( input_record.to_pandas(), full_feature_names=full_feature_names ) diff --git a/sdk/python/feast/feature_store.py b/sdk/python/feast/feature_store.py index 9ac2c14527..bfb8a59b2b 100644 --- a/sdk/python/feast/feature_store.py +++ b/sdk/python/feast/feature_store.py @@ -1995,26 +1995,53 @@ def _augment_response_with_on_demand_transforms( ) initial_response = OnlineResponse(online_features_response) - initial_response_df = initial_response.to_df() + initial_response_df: Optional[pd.DataFrame] = None + initial_response_dict: Optional[Dict[str, List[Any]]] = None # Apply on demand transformations and augment the result rows odfv_result_names = set() for odfv_name, _feature_refs in odfv_feature_refs.items(): odfv = requested_odfv_map[odfv_name] - transformed_features_df = odfv.get_transformed_features_df( - initial_response_df, - full_feature_names, - ) - selected_subset = [ - f for f in transformed_features_df.columns if f in _feature_refs - ] - - proto_values = [ - python_values_to_proto_values( - transformed_features_df[feature].values, ValueType.UNKNOWN + if odfv.mode == "python": + if initial_response_dict is None: + initial_response_dict = initial_response.to_dict() + transformed_features_dict: Dict[ + str, List[Any] + ] = odfv.get_transformed_features( + initial_response_dict, + full_feature_names, ) - for feature in selected_subset - ] + elif odfv.mode in {"pandas", "substrait"}: + if initial_response_df is None: + initial_response_df = initial_response.to_df() + transformed_features_df: pd.DataFrame = odfv.get_transformed_features( + initial_response_df, + full_feature_names, + ) + else: + raise Exception( + f"Invalid OnDemandFeatureMode: {odfv.mode}. Expected one of 'pandas', 'python', or 'substrait'." + ) + + transformed_features = ( + transformed_features_dict + if odfv.mode == "python" + else transformed_features_df + ) + transformed_columns = ( + transformed_features.columns + if isinstance(transformed_features, pd.DataFrame) + else transformed_features + ) + selected_subset = [f for f in transformed_columns if f in _feature_refs] + + proto_values = [] + for selected_feature in selected_subset: + if odfv.mode in ["python", "pandas"]: + feature_vector = transformed_features[selected_feature] + proto_values.append( + python_values_to_proto_values(feature_vector, ValueType.UNKNOWN) + ) odfv_result_names |= set(selected_subset) diff --git a/sdk/python/feast/infra/offline_stores/offline_store.py b/sdk/python/feast/infra/offline_stores/offline_store.py index 30135feccb..aaed78dd45 100644 --- a/sdk/python/feast/infra/offline_stores/offline_store.py +++ b/sdk/python/feast/infra/offline_stores/offline_store.py @@ -81,6 +81,10 @@ def to_df( if self.on_demand_feature_views: # TODO(adchia): Fix requirement to specify dependent feature views in feature_refs for odfv in self.on_demand_feature_views: + if odfv.mode not in {"pandas", "substrait"}: + raise Exception( + f'OnDemandFeatureView mode "{odfv.mode}" not supported for offline processing.' + ) features_df = features_df.join( odfv.get_transformed_features_df( features_df, @@ -124,6 +128,10 @@ def to_arrow( features_df = self._to_df_internal(timeout=timeout) if self.on_demand_feature_views: for odfv in self.on_demand_feature_views: + if odfv.mode != "pandas": + raise Exception( + f'OnDemandFeatureView mode "{odfv.mode}" not supported for offline processing.' + ) features_df = features_df.join( odfv.get_transformed_features_df( features_df, diff --git a/sdk/python/feast/on_demand_feature_view.py b/sdk/python/feast/on_demand_feature_view.py index ce416fff2a..8d51edbe58 100644 --- a/sdk/python/feast/on_demand_feature_view.py +++ b/sdk/python/feast/on_demand_feature_view.py @@ -32,6 +32,7 @@ UserDefinedFunctionV2 as UserDefinedFunctionProto, ) from feast.transformation.pandas_transformation import PandasTransformation +from feast.transformation.python_transformation import PythonTransformation from feast.transformation.substrait_transformation import SubstraitTransformation from feast.type_map import ( feast_value_type_to_pandas_type, @@ -68,7 +69,10 @@ class OnDemandFeatureView(BaseFeatureView): features: List[Field] source_feature_view_projections: Dict[str, FeatureViewProjection] source_request_sources: Dict[str, RequestSource] - feature_transformation: Union[PandasTransformation, SubstraitTransformation] + feature_transformation: Union[ + PandasTransformation, PythonTransformation, SubstraitTransformation + ] + mode: str description: str tags: Dict[str, str] owner: str @@ -88,9 +92,10 @@ def __init__( # noqa: C901 ], udf: Optional[FunctionType] = None, udf_string: str = "", - feature_transformation: Optional[ - Union[PandasTransformation, SubstraitTransformation] - ] = None, + feature_transformation: Union[ + PandasTransformation, PythonTransformation, SubstraitTransformation + ], + mode: str = "pandas", description: str = "", tags: Optional[Dict[str, str]] = None, owner: str = "", @@ -109,6 +114,7 @@ def __init__( # noqa: C901 dataframes as inputs. udf_string (deprecated): The source code version of the udf (for diffing and displaying in Web UI) feature_transformation: The user defined transformation. + mode: Mode of execution (e.g., Pandas or Python native) description (optional): A human-readable description. tags (optional): A dictionary of key-value pairs to store arbitrary metadata. owner (optional): The owner of the on demand feature view, typically the email @@ -122,16 +128,28 @@ def __init__( # noqa: C901 owner=owner, ) + if mode not in {"python", "pandas", "substrait"}: + raise Exception( + f"Unknown mode {mode}. OnDemandFeatureView only supports python or pandas UDFs and substrait." + ) + else: + self.mode = mode if not feature_transformation: if udf: warnings.warn( - "udf and udf_string parameters are deprecated. Please use transformation=OnDemandPandasTransformation(udf, udf_string) instead.", + "udf and udf_string parameters are deprecated. Please use transformation=PandasTransformation(udf, udf_string) instead.", DeprecationWarning, ) - feature_transformation = PandasTransformation(udf, udf_string) + # Note inspecting the return signature won't work with isinstance so this is the best alternative + if mode == "pandas": + feature_transformation = PandasTransformation(udf, udf_string) + elif mode == "python": + feature_transformation = PythonTransformation(udf, udf_string) + else: + pass else: raise Exception( - "OnDemandFeatureView needs to be initialized with either transformation or udf arguments" + "OnDemandFeatureView needs to be initialized with either feature_transformation or udf arguments" ) self.source_feature_view_projections: Dict[str, FeatureViewProjection] = {} @@ -159,6 +177,7 @@ def __copy__(self): sources=list(self.source_feature_view_projections.values()) + list(self.source_request_sources.values()), feature_transformation=self.feature_transformation, + mode=self.mode, description=self.description, tags=self.tags, owner=self.owner, @@ -179,6 +198,7 @@ def __eq__(self, other): self.source_feature_view_projections != other.source_feature_view_projections or self.source_request_sources != other.source_request_sources + or self.mode != other.mode or self.feature_transformation != other.feature_transformation ): return False @@ -215,7 +235,10 @@ def to_proto(self) -> OnDemandFeatureViewProto: feature_transformation = FeatureTransformationProto( user_defined_function=self.feature_transformation.to_proto() - if isinstance(self.feature_transformation, PandasTransformation) + if isinstance( + self.feature_transformation, + (PandasTransformation, PythonTransformation), + ) else None, substrait_transformation=self.feature_transformation.to_proto() if isinstance(self.feature_transformation, SubstraitTransformation) @@ -226,6 +249,7 @@ def to_proto(self) -> OnDemandFeatureViewProto: features=[feature.to_proto() for feature in self.features], sources=sources, feature_transformation=feature_transformation, + mode=self.mode, description=self.description, tags=self.tags, owner=self.owner, @@ -234,12 +258,17 @@ def to_proto(self) -> OnDemandFeatureViewProto: return OnDemandFeatureViewProto(spec=spec, meta=meta) @classmethod - def from_proto(cls, on_demand_feature_view_proto: OnDemandFeatureViewProto): + def from_proto( + cls, + on_demand_feature_view_proto: OnDemandFeatureViewProto, + skip_udf: bool = False, + ): """ Creates an on demand feature view from a protobuf representation. Args: on_demand_feature_view_proto: A protobuf representation of an on-demand feature view. + skip_udf: A boolean indicating whether to skip loading the udf Returns: A OnDemandFeatureView object based on the on-demand feature view protobuf. @@ -311,6 +340,7 @@ def from_proto(cls, on_demand_feature_view_proto: OnDemandFeatureViewProto): ], sources=sources, feature_transformation=transformation, + mode=on_demand_feature_view_proto.spec.mode, description=on_demand_feature_view_proto.spec.description, tags=dict(on_demand_feature_view_proto.spec.tags), owner=on_demand_feature_view_proto.spec.owner, @@ -349,12 +379,17 @@ def get_request_data_schema(self) -> Dict[str, ValueType]: ) return schema + def _get_projected_feature_name(self, feature: str) -> str: + return f"{self.projection.name_to_use()}__{feature}" + def get_transformed_features_df( self, df_with_features: pd.DataFrame, full_feature_names: bool = False, ) -> pd.DataFrame: # Apply on demand transformations + if not isinstance(df_with_features, pd.DataFrame): + raise TypeError("get_transformed_features_df only accepts pd.DataFrame") columns_to_cleanup = [] for source_fv_projection in self.source_feature_view_projections.values(): for feature in source_fv_projection.features: @@ -369,16 +404,15 @@ def get_transformed_features_df( columns_to_cleanup.append(full_feature_ref) # Compute transformed values and apply to each result row - - df_with_transformed_features = self.feature_transformation.transform( - df_with_features + df_with_transformed_features: pd.DataFrame = ( + self.feature_transformation.transform(df_with_features) ) # Work out whether the correct columns names are used. rename_columns: Dict[str, str] = {} for feature in self.features: short_name = feature.name - long_name = f"{self.projection.name_to_use()}__{feature.name}" + long_name = self._get_projected_feature_name(feature.name) if ( short_name in df_with_transformed_features.columns and full_feature_names @@ -392,7 +426,133 @@ def get_transformed_features_df( df_with_features.drop(columns=columns_to_cleanup, inplace=True) return df_with_transformed_features.rename(columns=rename_columns) + def get_transformed_features_dict( + self, + feature_dict: Dict[str, Any], # type: ignore + ) -> Dict[str, Any]: + + # we need a mapping from full feature name to short and back to do a renaming + # The simplest thing to do is to make the full reference, copy the columns with the short reference + # and rerun + columns_to_cleanup: List[str] = [] + for source_fv_projection in self.source_feature_view_projections.values(): + for feature in source_fv_projection.features: + full_feature_ref = f"{source_fv_projection.name}__{feature.name}" + if full_feature_ref in feature_dict.keys(): + # Make sure the partial feature name is always present + feature_dict[feature.name] = feature_dict[full_feature_ref] + columns_to_cleanup.append(str(feature.name)) + elif feature.name in feature_dict.keys(): + # Make sure the full feature name is always present + feature_dict[full_feature_ref] = feature_dict[feature.name] + columns_to_cleanup.append(str(full_feature_ref)) + + output_dict: Dict[str, Any] = self.feature_transformation.transform( + feature_dict + ) + for feature_name in columns_to_cleanup: + del output_dict[feature_name] + return output_dict + + def get_transformed_features( + self, + features: Union[Dict[str, Any], pd.DataFrame], + full_feature_names: bool = False, + ) -> Union[Dict[str, Any], pd.DataFrame]: + # TODO: classic inheritance pattern....maybe fix this + if self.mode == "python" and isinstance(features, Dict): + # note full_feature_names is not needed for the dictionary + return self.get_transformed_features_dict( + feature_dict=features, + ) + elif self.mode == "pandas" and isinstance(features, pd.DataFrame): + return self.get_transformed_features_df( + df_with_features=features, + full_feature_names=full_feature_names, + ) + else: + raise Exception( + f'Invalid OnDemandFeatureMode: {self.mode}. Expected one of "pandas" or "python".' + ) + def infer_features(self) -> None: + if self.mode in {"pandas", "substrait"}: + self._infer_features_df() + elif self.mode == "python": + self._infer_features_dict() + else: + raise Exception( + f'Invalid OnDemandFeatureMode: {self.mode}. Expected one of "pandas" or "python".' + ) + + def _infer_features_dict(self): + """ + Infers the set of features associated to this feature view from the input source. + + Raises: + RegistryInferenceFailure: The set of features could not be inferred. + """ + rand_dict_value: Dict[str, Any] = { + "float": [1.0], + "int": [1], + "str": ["hello world"], + "bytes": [str.encode("hello world")], + "bool": [True], + "datetime64[ns]": [datetime.utcnow()], + } + + feature_dict = {} + for feature_view_projection in self.source_feature_view_projections.values(): + for feature in feature_view_projection.features: + dtype = feast_value_type_to_pandas_type(feature.dtype.to_value_type()) + feature_dict[f"{feature_view_projection.name}__{feature.name}"] = ( + rand_dict_value[dtype] if dtype in rand_dict_value else [None] + ) + feature_dict[f"{feature.name}"] = ( + rand_dict_value[dtype] if dtype in rand_dict_value else [None] + ) + for request_data in self.source_request_sources.values(): + for field in request_data.schema: + dtype = feast_value_type_to_pandas_type(field.dtype.to_value_type()) + feature_dict[f"{field.name}"] = ( + rand_dict_value[dtype] if dtype in rand_dict_value else [None] + ) + + output_dict: Dict[str, List[Any]] = self.feature_transformation.transform( + feature_dict + ) + inferred_features = [] + for f, dt in output_dict.items(): + inferred_features.append( + Field( + name=f, + dtype=from_value_type( + python_type_to_feast_value_type( + f, type_name=type(dt[0]).__name__ + ) + ), + ) + ) + + if self.features: + missing_features = [] + for specified_features in self.features: + if specified_features not in inferred_features: + missing_features.append(specified_features) + if missing_features: + raise SpecifiedFeaturesNotPresentError( + missing_features, inferred_features, self.name + ) + else: + self.features = inferred_features + + if not self.features: + raise RegistryInferenceFailure( + "OnDemandFeatureView", + f"Could not infer Features for the feature view '{self.name}'.", + ) + + def _infer_features_df(self) -> None: """ Infers the set of features associated to this feature view from the input source. @@ -422,6 +582,7 @@ def infer_features(self) -> None: dtype = feast_value_type_to_pandas_type(field.dtype.to_value_type()) sample_val = rand_df_value[dtype] if dtype in rand_df_value else None df[f"{field.name}"] = pd.Series(sample_val, dtype=dtype) + output_df: pd.DataFrame = self.feature_transformation.transform(df) inferred_features = [] for f, dt in zip(output_df.columns, output_df.dtypes): @@ -478,6 +639,7 @@ def on_demand_feature_view( FeatureViewProjection, ] ], + mode: str = "pandas", description: str = "", tags: Optional[Dict[str, str]] = None, owner: str = "", @@ -491,6 +653,7 @@ def on_demand_feature_view( sources: A map from input source names to the actual input sources, which may be feature views, or request data sources. These sources serve as inputs to the udf, which will refer to them by name. + mode: The mode of execution (e.g,. Pandas or Python Native) description (optional): A human-readable description. tags (optional): A dictionary of key-value pairs to store arbitrary metadata. owner (optional): The owner of the on demand feature view, typically the email @@ -504,6 +667,7 @@ def mainify(obj) -> None: obj.__module__ = "__main__" def decorator(user_function): + return_annotation = inspect.signature(user_function).return_annotation if ( return_annotation @@ -544,13 +708,27 @@ def decorator(user_function): else: udf_string = dill.source.getsource(user_function) mainify(user_function) - transformation = PandasTransformation(user_function, udf_string) + if mode == "pandas": + if return_annotation not in (inspect._empty, pd.DataFrame): + raise TypeError( + f"return signature for {user_function} is {return_annotation} but should be pd.DataFrame" + ) + transformation = PandasTransformation(user_function, udf_string) + elif mode == "python": + if return_annotation not in (inspect._empty, Dict[str, Any]): + raise TypeError( + f"return signature for {user_function} is {return_annotation} but should be Dict[str, Any]" + ) + transformation = PythonTransformation(user_function, udf_string) + elif mode == "substrait": + pass on_demand_feature_view_obj = OnDemandFeatureView( name=user_function.__name__, sources=sources, schema=schema, feature_transformation=transformation, + mode=mode, description=description, tags=tags, owner=owner, @@ -578,3 +756,8 @@ def feature_view_to_batch_feature_view(fv: FeatureView) -> BatchFeatureView: bfv.features = copy.copy(fv.features) bfv.entities = copy.copy(fv.entities) return bfv + + +def _empty_odfv_udf_fn(x: Any) -> Any: + # just an identity mapping, otherwise we risk tripping some downstream tests + return x diff --git a/sdk/python/feast/transformation/pandas_transformation.py b/sdk/python/feast/transformation/pandas_transformation.py index 76f17e2106..1838a882f2 100644 --- a/sdk/python/feast/transformation/pandas_transformation.py +++ b/sdk/python/feast/transformation/pandas_transformation.py @@ -11,7 +11,7 @@ class PandasTransformation: def __init__(self, udf: FunctionType, udf_string: str = ""): """ - Creates an OnDemandPandasTransformation object. + Creates an PandasTransformation object. Args: udf: The user defined transformation function, which must take pandas @@ -21,8 +21,17 @@ def __init__(self, udf: FunctionType, udf_string: str = ""): self.udf = udf self.udf_string = udf_string - def transform(self, df: pd.DataFrame) -> pd.DataFrame: - return self.udf.__call__(df) + def transform(self, input_df: pd.DataFrame) -> pd.DataFrame: + if not isinstance(input_df, pd.DataFrame): + raise TypeError( + f"input_df should be type pd.DataFrame but got {type(input_df).__name__}" + ) + output_df = self.udf.__call__(input_df) + if not isinstance(output_df, pd.DataFrame): + raise TypeError( + f"output_df should be type pd.DataFrame but got {type(output_df).__name__}" + ) + return output_df def __eq__(self, other): if not isinstance(other, PandasTransformation): diff --git a/sdk/python/feast/transformation/python_transformation.py b/sdk/python/feast/transformation/python_transformation.py new file mode 100644 index 0000000000..9519f23c05 --- /dev/null +++ b/sdk/python/feast/transformation/python_transformation.py @@ -0,0 +1,65 @@ +from types import FunctionType +from typing import Dict + +import dill + +from feast.protos.feast.core.Transformation_pb2 import ( + UserDefinedFunctionV2 as UserDefinedFunctionProto, +) + + +class PythonTransformation: + def __init__(self, udf: FunctionType, udf_string: str = ""): + """ + Creates an PythonTransformation object. + Args: + udf: The user defined transformation function, which must take pandas + dataframes as inputs. + udf_string: The source code version of the udf (for diffing and displaying in Web UI) + """ + self.udf = udf + self.udf_string = udf_string + + def transform(self, input_dict: Dict) -> Dict: + if not isinstance(input_dict, Dict): + raise TypeError( + f"input_dict should be type Dict[str, Any] but got {type(input_dict).__name__}" + ) + # Ensuring that the inputs are included as well + output_dict = self.udf.__call__(input_dict) + if not isinstance(output_dict, Dict): + raise TypeError( + f"output_dict should be type Dict[str, Any] but got {type(output_dict).__name__}" + ) + return {**input_dict, **output_dict} + + def __eq__(self, other): + if not isinstance(other, PythonTransformation): + raise TypeError( + "Comparisons should only involve PythonTransformation class objects." + ) + + if not super().__eq__(other): + return False + + if ( + self.udf_string != other.udf_string + or self.udf.__code__.co_code != other.udf.__code__.co_code + ): + return False + + return True + + def to_proto(self) -> UserDefinedFunctionProto: + return UserDefinedFunctionProto( + name=self.udf.__name__, + body=dill.dumps(self.udf, recurse=True), + body_text=self.udf_string, + ) + + @classmethod + def from_proto(cls, user_defined_function_proto: UserDefinedFunctionProto): + return PythonTransformation( + udf=dill.loads(user_defined_function_proto.body), + udf_string=user_defined_function_proto.body_text, + ) diff --git a/sdk/python/feast/transformation_server.py b/sdk/python/feast/transformation_server.py index 83f4af749e..34fe3eac76 100644 --- a/sdk/python/feast/transformation_server.py +++ b/sdk/python/feast/transformation_server.py @@ -47,6 +47,11 @@ def TransformFeatures(self, request, context): df = pa.ipc.open_file(request.transformation_input.arrow_value).read_pandas() + if odfv.mode != "pandas": + raise Exception( + f'OnDemandFeatureView mode "{odfv.mode}" not supported by TransformationServer.' + ) + result_df = odfv.get_transformed_features_df(df, True) result_arrow = pa.Table.from_pandas(result_df) sink = pa.BufferOutputStream() diff --git a/sdk/python/tests/unit/infra/test_inference_unit_tests.py b/sdk/python/tests/unit/infra/test_inference_unit_tests.py index a108d397bd..be97a838bd 100644 --- a/sdk/python/tests/unit/infra/test_inference_unit_tests.py +++ b/sdk/python/tests/unit/infra/test_inference_unit_tests.py @@ -1,3 +1,5 @@ +from typing import Any, Dict + import pandas as pd import pytest @@ -51,7 +53,7 @@ def test_infer_datasource_names_dwh(): data_source = dwh_class(query="test_query") -def test_on_demand_features_type_inference(): +def test_on_demand_features_valid_type_inference(): # Create Feature Views date_request = RequestSource( name="date_request", @@ -73,6 +75,31 @@ def test_view(features_df: pd.DataFrame) -> pd.DataFrame: test_view.infer_features() + @on_demand_feature_view( + sources=[date_request], + schema=[ + Field(name="output", dtype=UnixTimestamp), + Field(name="object_output", dtype=String), + ], + mode="python", + ) + def python_native_test_view(input_dict: Dict[str, Any]) -> Dict[str, Any]: + output_dict: Dict[str, Any] = { + "output": input_dict["some_date"], + "object_output": str(input_dict["some_date"]), + } + return output_dict + + python_native_test_view.infer_features() + + +def test_on_demand_features_invalid_type_inference(): + # Create Feature Views + date_request = RequestSource( + name="date_request", + schema=[Field(name="some_date", dtype=UnixTimestamp)], + ) + @on_demand_feature_view( sources=[date_request], schema=[ @@ -96,13 +123,49 @@ def invalid_test_view(features_df: pd.DataFrame) -> pd.DataFrame: ], sources=[date_request], ) - def test_view_with_missing_feature(features_df: pd.DataFrame) -> pd.DataFrame: + def view_with_missing_feature(features_df: pd.DataFrame) -> pd.DataFrame: data = pd.DataFrame() data["output"] = features_df["some_date"] return data with pytest.raises(SpecifiedFeaturesNotPresentError): - test_view_with_missing_feature.infer_features() + view_with_missing_feature.infer_features() + + with pytest.raises(TypeError): + + @on_demand_feature_view( + sources=[date_request], + schema=[ + Field(name="output", dtype=UnixTimestamp), + Field(name="object_output", dtype=String), + ], + mode="pandas", + ) + def python_native_test_invalid_pandas_view( + input_dict: Dict[str, Any] + ) -> Dict[str, Any]: + output_dict: Dict[str, Any] = { + "output": input_dict["some_date"], + "object_output": str(input_dict["some_date"]), + } + return output_dict + + with pytest.raises(TypeError): + + @on_demand_feature_view( + sources=[date_request], + schema=[ + Field(name="output", dtype=UnixTimestamp), + Field(name="object_output", dtype=String), + ], + mode="python", + ) + def python_native_test_invalid_dict_view( + features_df: pd.DataFrame, + ) -> pd.DataFrame: + data = pd.DataFrame() + data["output"] = features_df["some_date"] + return data def test_datasource_inference(): diff --git a/sdk/python/tests/unit/test_on_demand_feature_view.py b/sdk/python/tests/unit/test_on_demand_feature_view.py index d561bd8e84..02e013e775 100644 --- a/sdk/python/tests/unit/test_on_demand_feature_view.py +++ b/sdk/python/tests/unit/test_on_demand_feature_view.py @@ -12,13 +12,19 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import Any, Dict, List + import pandas as pd import pytest from feast.feature_view import FeatureView from feast.field import Field from feast.infra.offline_stores.file_source import FileSource -from feast.on_demand_feature_view import OnDemandFeatureView, PandasTransformation +from feast.on_demand_feature_view import ( + OnDemandFeatureView, + PandasTransformation, + PythonTransformation, +) from feast.types import Float32 @@ -32,10 +38,18 @@ def udf1(features_df: pd.DataFrame) -> pd.DataFrame: def udf2(features_df: pd.DataFrame) -> pd.DataFrame: df = pd.DataFrame() df["output1"] = features_df["feature1"] + 100 - df["output2"] = features_df["feature2"] + 100 + df["output2"] = features_df["feature2"] + 101 return df +def python_native_udf(features_dict: Dict[str, List[Any]]) -> Dict[str, Any]: + output_dict: Dict[str, List[Any]] = { + "output1": features_dict["feature1"] + 100, + "output2": features_dict["feature2"] + 101, + } + return output_dict + + @pytest.mark.filterwarnings("ignore:udf and udf_string parameters are deprecated") def test_hash(): file_source = FileSource(name="my-file-source", path="test.parquet") @@ -101,8 +115,9 @@ def test_hash(): Field(name="output1", dtype=Float32), Field(name="output2", dtype=Float32), ], - udf=udf2, - udf_string="udf2 source code", + feature_transformation=PandasTransformation( + udf=udf2, udf_string="udf2 source code" + ), description="test", ) @@ -128,6 +143,75 @@ def test_hash(): ) +def test_python_native_transformation_mode(): + file_source = FileSource(name="my-file-source", path="test.parquet") + feature_view = FeatureView( + name="my-feature-view", + entities=[], + schema=[ + Field(name="feature1", dtype=Float32), + Field(name="feature2", dtype=Float32), + ], + source=file_source, + ) + sources = [feature_view] + + on_demand_feature_view_python_native = OnDemandFeatureView( + name="my-on-demand-feature-view", + sources=sources, + schema=[ + Field(name="output1", dtype=Float32), + Field(name="output2", dtype=Float32), + ], + feature_transformation=PythonTransformation( + udf=python_native_udf, udf_string="python native udf source code" + ), + description="test", + mode="python", + ) + + on_demand_feature_view_python_native_err = OnDemandFeatureView( + name="my-on-demand-feature-view", + sources=sources, + schema=[ + Field(name="output1", dtype=Float32), + Field(name="output2", dtype=Float32), + ], + feature_transformation=PandasTransformation( + udf=python_native_udf, udf_string="python native udf source code" + ), + description="test", + mode="python", + ) + + assert ( + on_demand_feature_view_python_native.feature_transformation + == PythonTransformation(python_native_udf, "python native udf source code") + ) + + with pytest.raises(TypeError): + assert ( + on_demand_feature_view_python_native_err.feature_transformation + == PythonTransformation(python_native_udf, "python native udf source code") + ) + + with pytest.raises(TypeError): + # This should fail + on_demand_feature_view_python_native_err.feature_transformation.transform( + { + "feature1": 0, + "feature2": 1, + } + ) + + assert on_demand_feature_view_python_native.get_transformed_features( + { + "feature1": 0, + "feature2": 1, + } + ) == {"feature1": 0, "feature2": 1, "output1": 100, "output2": 102} + + @pytest.mark.filterwarnings("ignore:udf and udf_string parameters are deprecated") def test_from_proto_backwards_compatible_udf(): file_source = FileSource(name="my-file-source", path="test.parquet") diff --git a/sdk/python/tests/unit/test_on_demand_substrait_transformation.py b/sdk/python/tests/unit/test_on_demand_substrait_transformation.py index c9d30c5b7a..378aa7ce3b 100644 --- a/sdk/python/tests/unit/test_on_demand_substrait_transformation.py +++ b/sdk/python/tests/unit/test_on_demand_substrait_transformation.py @@ -71,6 +71,7 @@ def pandas_view(inputs: pd.DataFrame) -> pd.DataFrame: @on_demand_feature_view( sources=[driver_stats_fv[["conv_rate", "acc_rate"]]], schema=[Field(name="conv_rate_plus_acc_substrait", dtype=Float64)], + mode="substrait", ) def substrait_view(inputs: Table) -> Table: return inputs.select( From 6bb4c73b49934706002f9346c2260ab4261e4638 Mon Sep 17 00:00:00 2001 From: Tornike Gurgenidze Date: Fri, 29 Mar 2024 03:20:27 +0400 Subject: [PATCH 092/122] fix: Add missing __init__.py to embedded_go (#4051) add __init__.py to embedded_go Signed-off-by: tokoko --- sdk/python/feast/embedded_go/__init__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 sdk/python/feast/embedded_go/__init__.py diff --git a/sdk/python/feast/embedded_go/__init__.py b/sdk/python/feast/embedded_go/__init__.py new file mode 100644 index 0000000000..e69de29bb2 From df0525355c32bbc40f890213edfa36512dd5bf55 Mon Sep 17 00:00:00 2001 From: Alex Vinnik <33845028+alex-vinnik-sp@users.noreply.github.com> Date: Fri, 29 Mar 2024 07:49:19 -0500 Subject: [PATCH 093/122] fix: Adding missing init files in materialization modules (#4052) * materialization-init Adding missing init files in materialization modules Signed-off-by: Alex Vinnik * materialization-init Add more missiing __init__.py Signed-off-by: Alex Vinnik --------- Signed-off-by: Alex Vinnik --- sdk/python/feast/infra/contrib/__init__.py | 0 sdk/python/feast/infra/feature_servers/__init__.py | 0 sdk/python/feast/infra/materialization/__init__.py | 0 sdk/python/feast/infra/materialization/contrib/__init__.py | 0 sdk/python/feast/infra/registry/contrib/__init__.py | 0 sdk/python/feast/infra/registry/contrib/postgres/__init__.py | 0 6 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 sdk/python/feast/infra/contrib/__init__.py create mode 100644 sdk/python/feast/infra/feature_servers/__init__.py create mode 100644 sdk/python/feast/infra/materialization/__init__.py create mode 100644 sdk/python/feast/infra/materialization/contrib/__init__.py create mode 100644 sdk/python/feast/infra/registry/contrib/__init__.py create mode 100644 sdk/python/feast/infra/registry/contrib/postgres/__init__.py diff --git a/sdk/python/feast/infra/contrib/__init__.py b/sdk/python/feast/infra/contrib/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/infra/feature_servers/__init__.py b/sdk/python/feast/infra/feature_servers/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/infra/materialization/__init__.py b/sdk/python/feast/infra/materialization/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/infra/materialization/contrib/__init__.py b/sdk/python/feast/infra/materialization/contrib/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/infra/registry/contrib/__init__.py b/sdk/python/feast/infra/registry/contrib/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/infra/registry/contrib/postgres/__init__.py b/sdk/python/feast/infra/registry/contrib/postgres/__init__.py new file mode 100644 index 0000000000..e69de29bb2 From afd52b8803d7660a90f382d2c1ad7705608c861b Mon Sep 17 00:00:00 2001 From: Francisco Javier Arceo Date: Fri, 29 Mar 2024 10:48:17 -0400 Subject: [PATCH 094/122] feat: Update odfv test (#4054) feat: update odfv test Signed-off-by: Francisco Javier Arceo --- sdk/python/tests/unit/test_on_demand_feature_view.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/python/tests/unit/test_on_demand_feature_view.py b/sdk/python/tests/unit/test_on_demand_feature_view.py index 02e013e775..cf4afa9422 100644 --- a/sdk/python/tests/unit/test_on_demand_feature_view.py +++ b/sdk/python/tests/unit/test_on_demand_feature_view.py @@ -42,7 +42,7 @@ def udf2(features_df: pd.DataFrame) -> pd.DataFrame: return df -def python_native_udf(features_dict: Dict[str, List[Any]]) -> Dict[str, Any]: +def python_native_udf(features_dict: Dict[str, Any]) -> Dict[str, Any]: output_dict: Dict[str, List[Any]] = { "output1": features_dict["feature1"] + 100, "output2": features_dict["feature2"] + 101, From 3980e0c9a762a6ec3bcee5a0e9cdf532994bb1c9 Mon Sep 17 00:00:00 2001 From: Tornike Gurgenidze Date: Sat, 30 Mar 2024 14:32:34 +0400 Subject: [PATCH 095/122] feat: Rewrite ibis point-in-time-join w/o feast abstractions (#4023) * feat: refactor ibis point-in-time-join Signed-off-by: tokoko * fix formatting, linting Signed-off-by: tokoko --------- Signed-off-by: tokoko --- .../contrib/ibis_offline_store/ibis.py | 248 ++++++++++-------- .../requirements/py3.10-ci-requirements.txt | 13 +- .../requirements/py3.9-ci-requirements.txt | 13 +- .../unit/infra/offline_stores/test_ibis.py | 138 ++++++++++ setup.py | 1 + 5 files changed, 295 insertions(+), 118 deletions(-) create mode 100644 sdk/python/tests/unit/infra/offline_stores/test_ibis.py diff --git a/sdk/python/feast/infra/offline_stores/contrib/ibis_offline_store/ibis.py b/sdk/python/feast/infra/offline_stores/contrib/ibis_offline_store/ibis.py index 72e0d970c6..8787d70158 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/ibis_offline_store/ibis.py +++ b/sdk/python/feast/infra/offline_stores/contrib/ibis_offline_store/ibis.py @@ -72,112 +72,6 @@ def _get_entity_df_event_timestamp_range( return entity_df_event_timestamp_range - @staticmethod - def _get_historical_features_one( - feature_view: FeatureView, - entity_table: Table, - feature_refs: List[str], - full_feature_names: bool, - timestamp_range: Tuple, - acc_table: Table, - event_timestamp_col: str, - ) -> Table: - fv_table: Table = ibis.read_parquet(feature_view.batch_source.name) - - for old_name, new_name in feature_view.batch_source.field_mapping.items(): - if old_name in fv_table.columns: - fv_table = fv_table.rename({new_name: old_name}) - - timestamp_field = feature_view.batch_source.timestamp_field - - # TODO mutate only if tz-naive - fv_table = fv_table.mutate( - **{ - timestamp_field: fv_table[timestamp_field].cast( - dt.Timestamp(timezone="UTC") - ) - } - ) - - full_name_prefix = feature_view.projection.name_alias or feature_view.name - - feature_refs = [ - fr.split(":")[1] - for fr in feature_refs - if fr.startswith(f"{full_name_prefix}:") - ] - - timestamp_range_start_minus_ttl = ( - timestamp_range[0] - feature_view.ttl - if feature_view.ttl and feature_view.ttl > timedelta(0, 0, 0, 0, 0, 0, 0) - else timestamp_range[0] - ) - - timestamp_range_start_minus_ttl = ibis.literal( - timestamp_range_start_minus_ttl.strftime("%Y-%m-%d %H:%M:%S.%f") - ).cast(dt.Timestamp(timezone="UTC")) - - timestamp_range_end = ibis.literal( - timestamp_range[1].strftime("%Y-%m-%d %H:%M:%S.%f") - ).cast(dt.Timestamp(timezone="UTC")) - - fv_table = fv_table.filter( - ibis.and_( - fv_table[timestamp_field] <= timestamp_range_end, - fv_table[timestamp_field] >= timestamp_range_start_minus_ttl, - ) - ) - - # join_key_map = feature_view.projection.join_key_map or {e.name: e.name for e in feature_view.entity_columns} - # predicates = [fv_table[k] == entity_table[v] for k, v in join_key_map.items()] - - if feature_view.projection.join_key_map: - predicates = [ - fv_table[k] == entity_table[v] - for k, v in feature_view.projection.join_key_map.items() - ] - else: - predicates = [ - fv_table[e.name] == entity_table[e.name] - for e in feature_view.entity_columns - ] - - predicates.append( - fv_table[timestamp_field] <= entity_table[event_timestamp_col] - ) - - fv_table = fv_table.inner_join( - entity_table, predicates, lname="", rname="{name}_y" - ) - - fv_table = ( - fv_table.group_by(by="entity_row_id") - .order_by(ibis.desc(fv_table[timestamp_field])) - .mutate(rn=ibis.row_number()) - ) - - fv_table = fv_table.filter(fv_table["rn"] == ibis.literal(0)) - - select_cols = ["entity_row_id"] - select_cols.extend(feature_refs) - fv_table = fv_table.select(select_cols) - - if full_feature_names: - fv_table = fv_table.rename( - {f"{full_name_prefix}__{feature}": feature for feature in feature_refs} - ) - - acc_table = acc_table.left_join( - fv_table, - predicates=[fv_table.entity_row_id == acc_table.entity_row_id], - lname="", - rname="{name}_yyyy", - ) - - acc_table = acc_table.drop(s.endswith("_yyyy")) - - return acc_table - @staticmethod def _to_utc(entity_df: pd.DataFrame, event_timestamp_col): entity_df_event_timestamp = entity_df.loc[ @@ -228,9 +122,11 @@ def get_historical_features( entity_schema=entity_schema, ) + # TODO get range with ibis timestamp_range = IbisOfflineStore._get_entity_df_event_timestamp_range( entity_df, event_timestamp_col ) + entity_df = IbisOfflineStore._to_utc(entity_df, event_timestamp_col) entity_table = ibis.memtable(entity_df) @@ -238,20 +134,61 @@ def get_historical_features( entity_table, feature_views, event_timestamp_col ) - res: Table = entity_table + def read_fv(feature_view, feature_refs, full_feature_names): + fv_table: Table = ibis.read_parquet(feature_view.batch_source.name) - for fv in feature_views: - res = IbisOfflineStore._get_historical_features_one( - fv, - entity_table, + for old_name, new_name in feature_view.batch_source.field_mapping.items(): + if old_name in fv_table.columns: + fv_table = fv_table.rename({new_name: old_name}) + + timestamp_field = feature_view.batch_source.timestamp_field + + # TODO mutate only if tz-naive + fv_table = fv_table.mutate( + **{ + timestamp_field: fv_table[timestamp_field].cast( + dt.Timestamp(timezone="UTC") + ) + } + ) + + full_name_prefix = feature_view.projection.name_alias or feature_view.name + + feature_refs = [ + fr.split(":")[1] + for fr in feature_refs + if fr.startswith(f"{full_name_prefix}:") + ] + + if full_feature_names: + fv_table = fv_table.rename( + { + f"{full_name_prefix}__{feature}": feature + for feature in feature_refs + } + ) + + feature_refs = [ + f"{full_name_prefix}__{feature}" for feature in feature_refs + ] + + return ( + fv_table, + feature_view.batch_source.timestamp_field, + feature_view.projection.join_key_map + or {e.name: e.name for e in feature_view.entity_columns}, feature_refs, - full_feature_names, - timestamp_range, - res, - event_timestamp_col, + feature_view.ttl, ) - res = res.drop("entity_row_id") + res = point_in_time_join( + entity_table=entity_table, + feature_tables=[ + read_fv(feature_view, feature_refs, full_feature_names) + for feature_view in feature_views + ], + event_timestamp_col=event_timestamp_col, + ) return IbisRetrievalJob( res, @@ -285,6 +222,10 @@ def pull_all_from_table_or_query( table = table.select(*fields) + # TODO get rid of this fix + if "__log_date" in table.columns: + table = table.drop("__log_date") + table = table.filter( ibis.and_( table[timestamp_field] >= ibis.literal(start_date), @@ -320,6 +261,7 @@ def write_logged_features( else: kwargs = {} + # TODO always write to directory table.to_parquet( f"{destination.path}/{uuid.uuid4().hex}-{{i}}.parquet", **kwargs ) @@ -405,3 +347,77 @@ def persist( @property def metadata(self) -> Optional[RetrievalMetadata]: return self._metadata + + +def point_in_time_join( + entity_table: Table, + feature_tables: List[Tuple[Table, str, Dict[str, str], List[str], timedelta]], + event_timestamp_col="event_timestamp", +): + # TODO handle ttl + all_entities = [event_timestamp_col] + for feature_table, timestamp_field, join_key_map, _, _ in feature_tables: + all_entities.extend(join_key_map.values()) + + r = ibis.literal("") + + for e in set(all_entities): + r = r.concat(entity_table[e].cast("string")) # type: ignore + + entity_table = entity_table.mutate(entity_row_id=r) + + acc_table = entity_table + + for ( + feature_table, + timestamp_field, + join_key_map, + feature_refs, + ttl, + ) in feature_tables: + predicates = [ + feature_table[k] == entity_table[v] for k, v in join_key_map.items() + ] + + predicates.append( + feature_table[timestamp_field] <= entity_table[event_timestamp_col], + ) + + if ttl: + predicates.append( + feature_table[timestamp_field] + >= entity_table[event_timestamp_col] - ibis.literal(ttl) + ) + + feature_table = feature_table.inner_join( + entity_table, predicates, lname="", rname="{name}_y" + ) + + feature_table = feature_table.drop(s.endswith("_y")) + + feature_table = ( + feature_table.group_by(by="entity_row_id") + .order_by(ibis.desc(feature_table[timestamp_field])) + .mutate(rn=ibis.row_number()) + ) + + feature_table = feature_table.filter( + feature_table["rn"] == ibis.literal(0) + ).drop("rn") + + select_cols = ["entity_row_id"] + select_cols.extend(feature_refs) + feature_table = feature_table.select(select_cols) + + acc_table = acc_table.left_join( + feature_table, + predicates=[feature_table.entity_row_id == acc_table.entity_row_id], + lname="", + rname="{name}_yyyy", + ) + + acc_table = acc_table.drop(s.endswith("_yyyy")) + + acc_table = acc_table.drop("entity_row_id") + + return acc_table diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index 4b1e26e1f3..ac1994da37 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -164,6 +164,12 @@ docker==7.0.0 # testcontainers docutils==0.19 # via sphinx +duckdb==0.10.1 + # via + # duckdb-engine + # ibis-framework +duckdb-engine==0.11.2 + # via ibis-framework entrypoints==0.4 # via altair exceptiongroup==1.2.0 @@ -310,7 +316,7 @@ httpx==0.27.0 # via # feast (setup.py) # jupyterlab -ibis-framework==8.0.0 +ibis-framework[duckdb]==8.0.0 # via # feast (setup.py) # ibis-substrait @@ -848,8 +854,13 @@ sphinxcontrib-serializinghtml==1.1.10 # via sphinx sqlalchemy[mypy]==1.4.52 # via + # duckdb-engine # feast (setup.py) + # ibis-framework # sqlalchemy + # sqlalchemy-views +sqlalchemy-views==0.3.2 + # via ibis-framework sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy sqlglot==20.11.0 diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index 99dee08c05..367b5dc050 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -164,6 +164,12 @@ docker==7.0.0 # testcontainers docutils==0.19 # via sphinx +duckdb==0.10.1 + # via + # duckdb-engine + # ibis-framework +duckdb-engine==0.11.2 + # via ibis-framework entrypoints==0.4 # via altair exceptiongroup==1.2.0 @@ -310,7 +316,7 @@ httpx==0.27.0 # via # feast (setup.py) # jupyterlab -ibis-framework==8.0.0 +ibis-framework[duckdb]==8.0.0 # via # feast (setup.py) # ibis-substrait @@ -858,8 +864,13 @@ sphinxcontrib-serializinghtml==1.1.10 # via sphinx sqlalchemy[mypy]==1.4.52 # via + # duckdb-engine # feast (setup.py) + # ibis-framework # sqlalchemy + # sqlalchemy-views +sqlalchemy-views==0.3.2 + # via ibis-framework sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy sqlglot==20.11.0 diff --git a/sdk/python/tests/unit/infra/offline_stores/test_ibis.py b/sdk/python/tests/unit/infra/offline_stores/test_ibis.py new file mode 100644 index 0000000000..5f105e2af7 --- /dev/null +++ b/sdk/python/tests/unit/infra/offline_stores/test_ibis.py @@ -0,0 +1,138 @@ +from datetime import datetime, timedelta +from typing import Dict, List, Tuple + +import ibis +import pyarrow as pa + +from feast.infra.offline_stores.contrib.ibis_offline_store.ibis import ( + point_in_time_join, +) + + +def pa_datetime(year, month, day): + return pa.scalar(datetime(year, month, day), type=pa.timestamp("s", tz="UTC")) + + +def customer_table(): + return pa.Table.from_arrays( + arrays=[ + pa.array([1, 1, 2]), + pa.array( + [ + pa_datetime(2024, 1, 1), + pa_datetime(2024, 1, 2), + pa_datetime(2024, 1, 1), + ] + ), + ], + names=["customer_id", "event_timestamp"], + ) + + +def features_table_1(): + return pa.Table.from_arrays( + arrays=[ + pa.array([1, 1, 1, 2]), + pa.array( + [ + pa_datetime(2023, 12, 31), + pa_datetime(2024, 1, 2), + pa_datetime(2024, 1, 3), + pa_datetime(2023, 1, 3), + ] + ), + pa.array([11, 22, 33, 22]), + ], + names=["customer_id", "event_timestamp", "feature1"], + ) + + +def point_in_time_join_brute( + entity_table: pa.Table, + feature_tables: List[Tuple[pa.Table, str, Dict[str, str], List[str], timedelta]], + event_timestamp_col="event_timestamp", +): + ret_fields = [entity_table.schema.field(n) for n in entity_table.schema.names] + + from operator import itemgetter + + ret = entity_table.to_pydict() + batch_dict = entity_table.to_pydict() + + for i, row_timestmap in enumerate(batch_dict[event_timestamp_col]): + for ( + feature_table, + timestamp_key, + join_key_map, + feature_refs, + ttl, + ) in feature_tables: + if i == 0: + ret_fields.extend( + [ + feature_table.schema.field(f) + for f in feature_table.schema.names + if f not in join_key_map.values() and f != timestamp_key + ] + ) + + def check_equality(ft_dict, batch_dict, x, y): + return all( + [ft_dict[k][x] == batch_dict[v][y] for k, v in join_key_map.items()] + ) + + ft_dict = feature_table.to_pydict() + found_matches = [ + (j, ft_dict[timestamp_key][j]) + for j in range(entity_table.num_rows) + if check_equality(ft_dict, batch_dict, j, i) + and ft_dict[timestamp_key][j] <= row_timestmap + and ft_dict[timestamp_key][j] >= row_timestmap - ttl + ] + + index_found = ( + max(found_matches, key=itemgetter(1))[0] if found_matches else None + ) + for col in ft_dict.keys(): + if col not in feature_refs: + continue + + if col not in ret: + ret[col] = [] + + if index_found is not None: + ret[col].append(ft_dict[col][index_found]) + else: + ret[col].append(None) + + return pa.Table.from_pydict(ret, schema=pa.schema(ret_fields)) + + +def test_point_in_time_join(): + expected = point_in_time_join_brute( + customer_table(), + feature_tables=[ + ( + features_table_1(), + "event_timestamp", + {"customer_id": "customer_id"}, + ["feature1"], + timedelta(days=10), + ) + ], + ) + + actual = point_in_time_join( + ibis.memtable(customer_table()), + feature_tables=[ + ( + ibis.memtable(features_table_1()), + "event_timestamp", + {"customer_id": "customer_id"}, + ["feature1"], + timedelta(days=10), + ) + ], + ).to_pyarrow() + + assert actual.equals(expected) diff --git a/setup.py b/setup.py index f142da7ec3..2d7bf63778 100644 --- a/setup.py +++ b/setup.py @@ -212,6 +212,7 @@ + HAZELCAST_REQUIRED + IBIS_REQUIRED + GRPCIO_REQUIRED + + DUCKDB_REQUIRED ) DOCS_REQUIRED = CI_REQUIRED From b9aabbd35e27b26fb3af414da604062d6c8d17d0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ferenc=20Szab=C3=B3?= Date: Sat, 30 Mar 2024 11:34:20 +0100 Subject: [PATCH 096/122] fix: Azure blob storage support in Java feature server (#2319) (#4014) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add azure blob storage support in java feature server - Fix S3 integration test to work without a real AWS account - Add GCS mock to integration tests to be able to run them without a real google cloud account - Adding dependency management in maven for libraries with older incompatible versions as transitive dependencies Signed-off-by: Ferenc Szabó --- java/CONTRIBUTING.md | 4 +- java/pom.xml | 65 +++++++++-- java/serving/.gitignore | 5 +- java/serving/pom.xml | 27 +++-- .../serving/registry/AzureRegistryFile.java | 57 ++++++++++ .../service/config/ApplicationProperties.java | 9 ++ .../service/config/RegistryConfigModule.java | 23 +++- .../it/ServingRedisAzureRegistryIT.java | 105 ++++++++++++++++++ .../serving/it/ServingRedisGSRegistryIT.java | 74 +++++++----- .../serving/it/ServingRedisS3RegistryIT.java | 18 ++- 10 files changed, 335 insertions(+), 52 deletions(-) create mode 100644 java/serving/src/main/java/feast/serving/registry/AzureRegistryFile.java create mode 100644 java/serving/src/test/java/feast/serving/it/ServingRedisAzureRegistryIT.java diff --git a/java/CONTRIBUTING.md b/java/CONTRIBUTING.md index 65d43d0de5..6d53c7b5c2 100644 --- a/java/CONTRIBUTING.md +++ b/java/CONTRIBUTING.md @@ -50,7 +50,7 @@ Automatically format the code to conform the style guide by: ```sh # formats all code in the feast-java repository -mvn spotless:apply +make format-java ``` > If you're using IntelliJ, you can import these [code style settings](https://github.com/google/styleguide/blob/gh-pages/intellij-java-google-style.xml) @@ -66,7 +66,7 @@ Run all Unit tests: make test-java ``` -Run all Integration tests (note: this also runs GCS + S3 based tests which should fail): +Run all Integration tests: ``` make test-java-integration ``` diff --git a/java/pom.xml b/java/pom.xml index 59c6733784..ccb3312596 100644 --- a/java/pom.xml +++ b/java/pom.xml @@ -68,6 +68,8 @@ 0.21.0 1.6.6 30.1-jre + 3.4.34 + 4.1.101.Final ${javax.validation.version} + + com.fasterxml.jackson.core + jackson-core + ${jackson.version} + + + com.fasterxml.jackson.core + jackson-databind + ${jackson.version} + + + com.fasterxml.jackson.core + jackson-annotations + ${jackson.version} + + + + io.netty + netty-common + ${netty.version} + + + io.netty + netty-buffer + ${netty.version} + + + io.netty + netty-handler + ${netty.version} + + + io.netty + netty-transport + ${netty.version} + + + + io.projectreactor + reactor-core + ${reactor.version} + + org.junit.platform junit-platform-engine @@ -246,7 +291,7 @@ - ${license.content} + ${license.content} 1.7 @@ -264,15 +309,15 @@ - - - spotless-check - process-test-classes - - check - - - + + + spotless-check + process-test-classes + + check + + + org.apache.maven.plugins diff --git a/java/serving/.gitignore b/java/serving/.gitignore index 6c6b6d8d8f..750b7f498b 100644 --- a/java/serving/.gitignore +++ b/java/serving/.gitignore @@ -34,4 +34,7 @@ feast-serving.jar /.nb-gradle/ ## Feast Temporary Files ## -/temp/ \ No newline at end of file +/temp/ + +## Generated test data ## +**/*.parquet \ No newline at end of file diff --git a/java/serving/pom.xml b/java/serving/pom.xml index 19e54e1362..6929d65d93 100644 --- a/java/serving/pom.xml +++ b/java/serving/pom.xml @@ -16,8 +16,8 @@ ~ --> + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> 4.0.0 @@ -121,6 +121,19 @@ 5.0.1 + + + + com.azure + azure-storage-blob + 12.25.2 + + + com.azure + azure-identity + 1.11.3 + + org.slf4j @@ -356,11 +369,11 @@ 2.7.4 test - - io.lettuce - lettuce-core - 6.0.2.RELEASE - + + io.lettuce + lettuce-core + 6.0.2.RELEASE + org.apache.commons commons-lang3 diff --git a/java/serving/src/main/java/feast/serving/registry/AzureRegistryFile.java b/java/serving/src/main/java/feast/serving/registry/AzureRegistryFile.java new file mode 100644 index 0000000000..72f6d476d5 --- /dev/null +++ b/java/serving/src/main/java/feast/serving/registry/AzureRegistryFile.java @@ -0,0 +1,57 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2021 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.registry; + +import com.azure.storage.blob.BlobClient; +import com.azure.storage.blob.BlobServiceClient; +import com.google.protobuf.InvalidProtocolBufferException; +import feast.proto.core.RegistryProto; +import java.util.Objects; +import java.util.Optional; + +public class AzureRegistryFile implements RegistryFile { + private final BlobClient blobClient; + private String lastKnownETag; + + public AzureRegistryFile(BlobServiceClient blobServiceClient, String url) { + String[] split = url.replace("az://", "").split("/"); + String objectPath = String.join("/", java.util.Arrays.copyOfRange(split, 1, split.length)); + this.blobClient = blobServiceClient.getBlobContainerClient(split[0]).getBlobClient(objectPath); + } + + @Override + public RegistryProto.Registry getContent() { + try { + return RegistryProto.Registry.parseFrom(blobClient.downloadContent().toBytes()); + } catch (InvalidProtocolBufferException e) { + throw new RuntimeException( + String.format( + "Couldn't read remote registry: %s. Error: %s", + blobClient.getBlobUrl(), e.getMessage())); + } + } + + @Override + public Optional getContentIfModified() { + String eTag = blobClient.getProperties().getETag(); + if (Objects.equals(eTag, this.lastKnownETag)) { + return Optional.empty(); + } else this.lastKnownETag = eTag; + + return Optional.of(getContent()); + } +} diff --git a/java/serving/src/main/java/feast/serving/service/config/ApplicationProperties.java b/java/serving/src/main/java/feast/serving/service/config/ApplicationProperties.java index 7cef10e61a..91c5440cb7 100644 --- a/java/serving/src/main/java/feast/serving/service/config/ApplicationProperties.java +++ b/java/serving/src/main/java/feast/serving/service/config/ApplicationProperties.java @@ -95,6 +95,7 @@ public static class FeastProperties { private String gcpProject; private String awsRegion; private String transformationServiceEndpoint; + private String azureStorageAccount; public String getRegistry() { return registry; @@ -205,6 +206,14 @@ public String getTransformationServiceEndpoint() { public void setTransformationServiceEndpoint(String transformationServiceEndpoint) { this.transformationServiceEndpoint = transformationServiceEndpoint; } + + public String getAzureStorageAccount() { + return azureStorageAccount; + } + + public void setAzureStorageAccount(String azureStorageAccount) { + this.azureStorageAccount = azureStorageAccount; + } } /** Store configuration class for database that this Feast Serving uses. */ diff --git a/java/serving/src/main/java/feast/serving/service/config/RegistryConfigModule.java b/java/serving/src/main/java/feast/serving/service/config/RegistryConfigModule.java index cfb4666f07..5ab951c71c 100644 --- a/java/serving/src/main/java/feast/serving/service/config/RegistryConfigModule.java +++ b/java/serving/src/main/java/feast/serving/service/config/RegistryConfigModule.java @@ -18,6 +18,9 @@ import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3ClientBuilder; +import com.azure.identity.DefaultAzureCredentialBuilder; +import com.azure.storage.blob.BlobServiceClient; +import com.azure.storage.blob.BlobServiceClientBuilder; import com.google.cloud.storage.Storage; import com.google.cloud.storage.StorageOptions; import com.google.inject.AbstractModule; @@ -43,11 +46,27 @@ public AmazonS3 awsStorage(ApplicationProperties applicationProperties) { .build(); } + @Provides + public BlobServiceClient azureStorage(ApplicationProperties applicationProperties) { + + BlobServiceClient blobServiceClient = + new BlobServiceClientBuilder() + .endpoint( + String.format( + "https://%s.blob.core.windows.net", + applicationProperties.getFeast().getAzureStorageAccount())) + .credential(new DefaultAzureCredentialBuilder().build()) + .buildClient(); + + return blobServiceClient; + } + @Provides RegistryFile registryFile( ApplicationProperties applicationProperties, Provider storageProvider, - Provider amazonS3Provider) { + Provider amazonS3Provider, + Provider azureProvider) { String registryPath = applicationProperties.getFeast().getRegistry(); Optional scheme = Optional.ofNullable(URI.create(registryPath).getScheme()); @@ -57,6 +76,8 @@ RegistryFile registryFile( return new GSRegistryFile(storageProvider.get(), registryPath); case "s3": return new S3RegistryFile(amazonS3Provider.get(), registryPath); + case "az": + return new AzureRegistryFile(azureProvider.get(), registryPath); case "": case "file": return new LocalRegistryFile(registryPath); diff --git a/java/serving/src/test/java/feast/serving/it/ServingRedisAzureRegistryIT.java b/java/serving/src/test/java/feast/serving/it/ServingRedisAzureRegistryIT.java new file mode 100644 index 0000000000..8ab658fc2a --- /dev/null +++ b/java/serving/src/test/java/feast/serving/it/ServingRedisAzureRegistryIT.java @@ -0,0 +1,105 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2021 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.it; + +import com.azure.storage.blob.BlobClient; +import com.azure.storage.blob.BlobServiceClient; +import com.azure.storage.blob.BlobServiceClientBuilder; +import com.azure.storage.common.StorageSharedKeyCredential; +import com.google.inject.AbstractModule; +import com.google.inject.Provides; +import feast.proto.core.RegistryProto; +import feast.serving.service.config.ApplicationProperties; +import java.io.ByteArrayInputStream; +import org.junit.jupiter.api.BeforeAll; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.wait.strategy.Wait; +import org.testcontainers.junit.jupiter.Container; + +public class ServingRedisAzureRegistryIT extends ServingBaseTests { + private static final String TEST_ACCOUNT_NAME = "devstoreaccount1"; + private static final String TEST_ACCOUNT_KEY = + "Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw=="; + private static final int BLOB_STORAGE_PORT = 10000; + private static final String TEST_CONTAINER = "test-container"; + private static final StorageSharedKeyCredential CREDENTIAL = + new StorageSharedKeyCredential(TEST_ACCOUNT_NAME, TEST_ACCOUNT_KEY); + + @Container + static final GenericContainer azureBlobMock = + new GenericContainer<>("mcr.microsoft.com/azure-storage/azurite:latest") + .waitingFor(Wait.forLogMessage("Azurite Blob service successfully listens on.*", 1)) + .withExposedPorts(BLOB_STORAGE_PORT) + .withCommand("azurite-blob", "--blobHost", "0.0.0.0"); + + private static BlobServiceClient createClient() { + return new BlobServiceClientBuilder() + .endpoint( + String.format( + "http://localhost:%d/%s", + azureBlobMock.getMappedPort(BLOB_STORAGE_PORT), TEST_ACCOUNT_NAME)) + .credential(CREDENTIAL) + .buildClient(); + } + + private static void putToStorage(RegistryProto.Registry registry) { + BlobServiceClient client = createClient(); + BlobClient blobClient = + client.getBlobContainerClient(TEST_CONTAINER).getBlobClient("registry.db"); + + blobClient.upload(new ByteArrayInputStream(registry.toByteArray())); + } + + @BeforeAll + static void setUp() { + BlobServiceClient client = createClient(); + client.createBlobContainer(TEST_CONTAINER); + + putToStorage(registryProto); + } + + @Override + ApplicationProperties.FeastProperties createFeastProperties() { + final ApplicationProperties.FeastProperties feastProperties = + TestUtils.createBasicFeastProperties( + environment.getServiceHost("redis", 6379), environment.getServicePort("redis", 6379)); + feastProperties.setRegistry(String.format("az://%s/registry.db", TEST_CONTAINER)); + + return feastProperties; + } + + @Override + void updateRegistryFile(RegistryProto.Registry registry) { + putToStorage(registry); + } + + @Override + AbstractModule registryConfig() { + return new AbstractModule() { + @Provides + public BlobServiceClient awsStorage() { + return new BlobServiceClientBuilder() + .endpoint( + String.format( + "http://localhost:%d/%s", + azureBlobMock.getMappedPort(BLOB_STORAGE_PORT), TEST_ACCOUNT_NAME)) + .credential(CREDENTIAL) + .buildClient(); + } + }; + } +} diff --git a/java/serving/src/test/java/feast/serving/it/ServingRedisGSRegistryIT.java b/java/serving/src/test/java/feast/serving/it/ServingRedisGSRegistryIT.java index 925f1887d2..96aa2077c0 100644 --- a/java/serving/src/test/java/feast/serving/it/ServingRedisGSRegistryIT.java +++ b/java/serving/src/test/java/feast/serving/it/ServingRedisGSRegistryIT.java @@ -16,47 +16,54 @@ */ package feast.serving.it; -import static org.junit.jupiter.api.Assertions.assertArrayEquals; - +import com.google.auth.oauth2.AccessToken; +import com.google.auth.oauth2.ServiceAccountCredentials; import com.google.cloud.storage.*; -import com.google.cloud.storage.testing.RemoteStorageHelper; +import com.google.inject.AbstractModule; +import com.google.inject.Provides; import feast.proto.core.RegistryProto; import feast.serving.service.config.ApplicationProperties; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; -import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.junit.jupiter.Container; public class ServingRedisGSRegistryIT extends ServingBaseTests { - static Storage storage = - RemoteStorageHelper.create() - .getOptions() - .toBuilder() - .setProjectId(System.getProperty("GCP_PROJECT", "kf-feast")) - .build() - .getService(); - static final String bucket = RemoteStorageHelper.generateBucketName(); + private static final String TEST_PROJECT = "test-project"; + private static final String TEST_BUCKET = "test-bucket"; + private static final BlobId blobId = BlobId.of(TEST_BUCKET, "registry.db");; + private static final int GCS_PORT = 4443; - static void putToStorage(BlobId blobId, RegistryProto.Registry registry) { - storage.create(BlobInfo.newBuilder(blobId).build(), registry.toByteArray()); + @Container + static final GenericContainer gcsMock = + new GenericContainer<>("fsouza/fake-gcs-server") + .withExposedPorts(GCS_PORT) + .withCreateContainerCmdModifier( + cmd -> cmd.withEntrypoint("/bin/fake-gcs-server", "-scheme", "http")); - assertArrayEquals(storage.get(blobId).getContent(), registry.toByteArray()); - } + public static final AccessToken credential = new AccessToken("test-token", null); - static BlobId blobId; + static void putToStorage(RegistryProto.Registry registry) { + Storage gcsClient = createClient(); + + gcsClient.create(BlobInfo.newBuilder(blobId).build(), registry.toByteArray()); + } @BeforeAll static void setUp() { - storage.create(BucketInfo.of(bucket)); - blobId = BlobId.of(bucket, "registry.db"); + Storage gcsClient = createClient(); + gcsClient.create(BucketInfo.of(TEST_BUCKET)); - putToStorage(blobId, registryProto); + putToStorage(registryProto); } - @AfterAll - static void tearDown() throws ExecutionException, InterruptedException { - RemoteStorageHelper.forceDelete(storage, bucket, 5, TimeUnit.SECONDS); + private static Storage createClient() { + return StorageOptions.newBuilder() + .setProjectId(TEST_PROJECT) + .setCredentials(ServiceAccountCredentials.create(credential)) + .setHost("http://localhost:" + gcsMock.getMappedPort(GCS_PORT)) + .build() + .getService(); } @Override @@ -71,6 +78,21 @@ ApplicationProperties.FeastProperties createFeastProperties() { @Override void updateRegistryFile(RegistryProto.Registry registry) { - putToStorage(blobId, registry); + putToStorage(registry); + } + + @Override + AbstractModule registryConfig() { + return new AbstractModule() { + @Provides + Storage googleStorage(ApplicationProperties applicationProperties) { + return StorageOptions.newBuilder() + .setProjectId(TEST_PROJECT) + .setCredentials(ServiceAccountCredentials.create(credential)) + .setHost("http://localhost:" + gcsMock.getMappedPort(GCS_PORT)) + .build() + .getService(); + } + }; } } diff --git a/java/serving/src/test/java/feast/serving/it/ServingRedisS3RegistryIT.java b/java/serving/src/test/java/feast/serving/it/ServingRedisS3RegistryIT.java index 12315c9e48..52e1af9065 100644 --- a/java/serving/src/test/java/feast/serving/it/ServingRedisS3RegistryIT.java +++ b/java/serving/src/test/java/feast/serving/it/ServingRedisS3RegistryIT.java @@ -17,6 +17,8 @@ package feast.serving.it; import com.adobe.testing.s3mock.testcontainers.S3MockContainer; +import com.amazonaws.auth.AWSStaticCredentialsProvider; +import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.client.builder.AwsClientBuilder; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3ClientBuilder; @@ -30,13 +32,18 @@ import org.testcontainers.junit.jupiter.Container; public class ServingRedisS3RegistryIT extends ServingBaseTests { + private static final String TEST_REGION = "us-east-1"; + private static final String TEST_BUCKET = "test-bucket"; @Container static final S3MockContainer s3Mock = new S3MockContainer("2.2.3"); + private static final AWSStaticCredentialsProvider credentials = + new AWSStaticCredentialsProvider(new BasicAWSCredentials("anyAccessKey", "anySecretKey")); private static AmazonS3 createClient() { return AmazonS3ClientBuilder.standard() .withEndpointConfiguration( new AwsClientBuilder.EndpointConfiguration( - String.format("http://localhost:%d", s3Mock.getHttpServerPort()), "us-east-1")) + String.format("http://localhost:%d", s3Mock.getHttpServerPort()), TEST_REGION)) + .withCredentials(credentials) .enablePathStyleAccess() .build(); } @@ -48,13 +55,13 @@ private static void putToStorage(RegistryProto.Registry proto) { metadata.setContentType("application/protobuf"); AmazonS3 s3Client = createClient(); - s3Client.putObject("test-bucket", "registry.db", new ByteArrayInputStream(bytes), metadata); + s3Client.putObject(TEST_BUCKET, "registry.db", new ByteArrayInputStream(bytes), metadata); } @BeforeAll static void setUp() { AmazonS3 s3Client = createClient(); - s3Client.createBucket("test-bucket"); + s3Client.createBucket(TEST_BUCKET); putToStorage(registryProto); } @@ -64,7 +71,7 @@ ApplicationProperties.FeastProperties createFeastProperties() { final ApplicationProperties.FeastProperties feastProperties = TestUtils.createBasicFeastProperties( environment.getServiceHost("redis", 6379), environment.getServicePort("redis", 6379)); - feastProperties.setRegistry("s3://test-bucket/registry.db"); + feastProperties.setRegistry(String.format("s3://%s/registry.db", TEST_BUCKET)); return feastProperties; } @@ -82,7 +89,8 @@ public AmazonS3 awsStorage() { return AmazonS3ClientBuilder.standard() .withEndpointConfiguration( new AwsClientBuilder.EndpointConfiguration( - String.format("http://localhost:%d", s3Mock.getHttpServerPort()), "us-east-1")) + String.format("http://localhost:%d", s3Mock.getHttpServerPort()), TEST_REGION)) + .withCredentials(credentials) .enablePathStyleAccess() .build(); } From 5579f5cbba076185f66967655030005328984248 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 30 Mar 2024 10:50:29 +0000 Subject: [PATCH 097/122] chore: Bump express from 4.18.1 to 4.19.2 in /sdk/python/feast/ui (#4047) Bumps [express](https://github.com/expressjs/express) from 4.18.1 to 4.19.2. - [Release notes](https://github.com/expressjs/express/releases) - [Changelog](https://github.com/expressjs/express/blob/master/History.md) - [Commits](https://github.com/expressjs/express/compare/4.18.1...4.19.2) --- updated-dependencies: - dependency-name: express dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- sdk/python/feast/ui/yarn.lock | 55 +++++++++++++++++++---------------- 1 file changed, 30 insertions(+), 25 deletions(-) diff --git a/sdk/python/feast/ui/yarn.lock b/sdk/python/feast/ui/yarn.lock index 06f4d3f12b..48cbd30803 100644 --- a/sdk/python/feast/ui/yarn.lock +++ b/sdk/python/feast/ui/yarn.lock @@ -3511,21 +3511,21 @@ bluebird@^3.5.5: resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f" integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg== -body-parser@1.20.0: - version "1.20.0" - resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.0.tgz#3de69bd89011c11573d7bfee6a64f11b6bd27cc5" - integrity sha512-DfJ+q6EPcGKZD1QWUjSpqp+Q7bDQTsQIF4zfUAtZ6qk+H/3/QRhg9CEp39ss+/T2vw0+HaidC0ecJj/DRLIaKg== +body-parser@1.20.2: + version "1.20.2" + resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.2.tgz#6feb0e21c4724d06de7ff38da36dad4f57a747fd" + integrity sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA== dependencies: bytes "3.1.2" - content-type "~1.0.4" + content-type "~1.0.5" debug "2.6.9" depd "2.0.0" destroy "1.2.0" http-errors "2.0.0" iconv-lite "0.4.24" on-finished "2.4.1" - qs "6.10.3" - raw-body "2.5.1" + qs "6.11.0" + raw-body "2.5.2" type-is "~1.6.18" unpipe "1.0.0" @@ -3966,6 +3966,11 @@ content-type@~1.0.4: resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== +content-type@~1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.5.tgz#8b773162656d1d1086784c8f23a54ce6d73d7918" + integrity sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA== + convert-source-map@^1.4.0, convert-source-map@^1.5.0, convert-source-map@^1.6.0, convert-source-map@^1.7.0: version "1.8.0" resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.8.0.tgz#f3373c32d21b4d780dd8004514684fb791ca4369" @@ -3978,10 +3983,10 @@ cookie-signature@1.0.6: resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" integrity sha1-4wOogrNCzD7oylE6eZmXNNqzriw= -cookie@0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.5.0.tgz#d1f5d71adec6558c58f389987c366aa47e994f8b" - integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw== +cookie@0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.6.0.tgz#2798b04b071b0ecbff0dbb62a505a8efa4e19051" + integrity sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw== core-js-compat@^3.21.0, core-js-compat@^3.22.1: version "3.22.5" @@ -5266,16 +5271,16 @@ expect@^27.5.1: jest-message-util "^27.5.1" express@^4.17.3: - version "4.18.1" - resolved "https://registry.yarnpkg.com/express/-/express-4.18.1.tgz#7797de8b9c72c857b9cd0e14a5eea80666267caf" - integrity sha512-zZBcOX9TfehHQhtupq57OF8lFZ3UZi08Y97dwFCkD8p9d/d2Y3M+ykKcwaMDEL+4qyUolgBDX6AblpR3fL212Q== + version "4.19.2" + resolved "https://registry.yarnpkg.com/express/-/express-4.19.2.tgz#e25437827a3aa7f2a827bc8171bbbb664a356465" + integrity sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q== dependencies: accepts "~1.3.8" array-flatten "1.1.1" - body-parser "1.20.0" + body-parser "1.20.2" content-disposition "0.5.4" content-type "~1.0.4" - cookie "0.5.0" + cookie "0.6.0" cookie-signature "1.0.6" debug "2.6.9" depd "2.0.0" @@ -5291,7 +5296,7 @@ express@^4.17.3: parseurl "~1.3.3" path-to-regexp "0.1.7" proxy-addr "~2.0.7" - qs "6.10.3" + qs "6.11.0" range-parser "~1.2.1" safe-buffer "5.2.1" send "0.18.0" @@ -8591,10 +8596,10 @@ q@^1.1.2: resolved "https://registry.yarnpkg.com/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7" integrity sha1-fjL3W0E4EpHQRhHxvxQQmsAGUdc= -qs@6.10.3: - version "6.10.3" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.10.3.tgz#d6cde1b2ffca87b5aa57889816c5f81535e22e8e" - integrity sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ== +qs@6.11.0: + version "6.11.0" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.11.0.tgz#fd0d963446f7a65e1367e01abd85429453f0c37a" + integrity sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q== dependencies: side-channel "^1.0.4" @@ -8647,10 +8652,10 @@ range-parser@^1.2.1, range-parser@~1.2.1: resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== -raw-body@2.5.1: - version "2.5.1" - resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.1.tgz#fe1b1628b181b700215e5fd42389f98b71392857" - integrity sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig== +raw-body@2.5.2: + version "2.5.2" + resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.2.tgz#99febd83b90e08975087e8f1f9419a149366b68a" + integrity sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA== dependencies: bytes "3.1.2" http-errors "2.0.0" From d82d1ecb534ab35b901c36e920666196eae0ac79 Mon Sep 17 00:00:00 2001 From: Tornike Gurgenidze Date: Sat, 30 Mar 2024 23:14:10 +0400 Subject: [PATCH 098/122] feat: Add local tests for s3 registry using minio (#4029) --- Makefile | 2 +- .../integration/registration/test_registry.py | 60 ++++++++++++++++++- 2 files changed, 58 insertions(+), 4 deletions(-) diff --git a/Makefile b/Makefile index 55896b8c11..d232d9c93f 100644 --- a/Makefile +++ b/Makefile @@ -67,7 +67,7 @@ test-python-unit: python -m pytest -n 8 --color=yes sdk/python/tests test-python-integration: - python -m pytest -n 8 --integration --color=yes --durations=5 --timeout=1200 --timeout_method=thread sdk/python/tests + python -m pytest -n 8 --integration -k "not minio_registry" --color=yes --durations=5 --timeout=1200 --timeout_method=thread sdk/python/tests test-python-integration-local: @(docker info > /dev/null 2>&1 && \ diff --git a/sdk/python/tests/integration/registration/test_registry.py b/sdk/python/tests/integration/registration/test_registry.py index 57e625e66b..3bc2b3fb39 100644 --- a/sdk/python/tests/integration/registration/test_registry.py +++ b/sdk/python/tests/integration/registration/test_registry.py @@ -14,9 +14,11 @@ import os import time from datetime import timedelta +from unittest import mock import pytest from pytest_lazyfixture import lazy_fixture +from testcontainers.core.container import DockerContainer from feast import FileSource from feast.data_format import ParquetFormat @@ -60,12 +62,56 @@ def s3_registry() -> Registry: return Registry("project", registry_config, None) +@pytest.fixture +def minio_registry() -> Registry: + minio_user = "minio99" + minio_password = "minio123" + bucket_name = "test-bucket" + + container: DockerContainer = ( + DockerContainer("quay.io/minio/minio") + .with_exposed_ports(9000, 9001) + .with_env("MINIO_ROOT_USER", minio_user) + .with_env("MINIO_ROOT_PASSWORD", minio_password) + .with_command('server /data --console-address ":9001"') + .with_exposed_ports() + ) + + container.start() + + exposed_port = container.get_exposed_port("9000") + container_host = container.get_container_host_ip() + + container.exec(f"mkdir /data/{bucket_name}") + + registry_config = RegistryConfig( + path=f"s3://{bucket_name}/registry.db", cache_ttl_seconds=600 + ) + + mock_environ = { + "FEAST_S3_ENDPOINT_URL": f"http://{container_host}:{exposed_port}", + "AWS_ACCESS_KEY_ID": minio_user, + "AWS_SECRET_ACCESS_KEY": minio_password, + "AWS_SESSION_TOKEN": "", + } + + with mock.patch.dict(os.environ, mock_environ): + yield Registry("project", registry_config, None) + + container.stop() + + @pytest.mark.integration @pytest.mark.parametrize( "test_registry", - [lazy_fixture("gcs_registry"), lazy_fixture("s3_registry")], + [ + lazy_fixture("gcs_registry"), + lazy_fixture("s3_registry"), + lazy_fixture("minio_registry"), + ], ) def test_apply_entity_integration(test_registry): + entity = Entity( name="driver_car_id", description="Car driver id", @@ -106,7 +152,11 @@ def test_apply_entity_integration(test_registry): @pytest.mark.integration @pytest.mark.parametrize( "test_registry", - [lazy_fixture("gcs_registry"), lazy_fixture("s3_registry")], + [ + lazy_fixture("gcs_registry"), + lazy_fixture("s3_registry"), + lazy_fixture("minio_registry"), + ], ) def test_apply_feature_view_integration(test_registry): # Create Feature Views @@ -183,7 +233,11 @@ def test_apply_feature_view_integration(test_registry): @pytest.mark.integration @pytest.mark.parametrize( "test_registry", - [lazy_fixture("gcs_registry"), lazy_fixture("s3_registry")], + [ + lazy_fixture("gcs_registry"), + lazy_fixture("s3_registry"), + lazy_fixture("minio_registry"), + ], ) def test_apply_data_source_integration(test_registry: Registry): validate_registry_data_source_apply(test_registry) From 26391b07605794bcb0eb6cdec6d59bd94720bba6 Mon Sep 17 00:00:00 2001 From: Tornike Gurgenidze Date: Tue, 2 Apr 2024 18:34:01 +0400 Subject: [PATCH 099/122] fix: Substrait ODFVs for online (#4064) * fix substrait odfvs for online, add tests Signed-off-by: tokoko * fix formatting Signed-off-by: tokoko * change odfv substrait test dates relative to start_date and end_date Signed-off-by: tokoko * force tests rerun Signed-off-by: tokoko --------- Signed-off-by: tokoko --- sdk/python/feast/feature_store.py | 9 ++-- .../infra/offline_stores/offline_store.py | 2 +- sdk/python/feast/on_demand_feature_view.py | 4 +- ...on.py => test_substrait_transformation.py} | 49 +++++++++++++------ 4 files changed, 43 insertions(+), 21 deletions(-) rename sdk/python/tests/unit/{test_on_demand_substrait_transformation.py => test_substrait_transformation.py} (73%) diff --git a/sdk/python/feast/feature_store.py b/sdk/python/feast/feature_store.py index bfb8a59b2b..83aaafd686 100644 --- a/sdk/python/feast/feature_store.py +++ b/sdk/python/feast/feature_store.py @@ -2037,11 +2037,10 @@ def _augment_response_with_on_demand_transforms( proto_values = [] for selected_feature in selected_subset: - if odfv.mode in ["python", "pandas"]: - feature_vector = transformed_features[selected_feature] - proto_values.append( - python_values_to_proto_values(feature_vector, ValueType.UNKNOWN) - ) + feature_vector = transformed_features[selected_feature] + proto_values.append( + python_values_to_proto_values(feature_vector, ValueType.UNKNOWN) + ) odfv_result_names |= set(selected_subset) diff --git a/sdk/python/feast/infra/offline_stores/offline_store.py b/sdk/python/feast/infra/offline_stores/offline_store.py index aaed78dd45..6c16ef2643 100644 --- a/sdk/python/feast/infra/offline_stores/offline_store.py +++ b/sdk/python/feast/infra/offline_stores/offline_store.py @@ -128,7 +128,7 @@ def to_arrow( features_df = self._to_df_internal(timeout=timeout) if self.on_demand_feature_views: for odfv in self.on_demand_feature_views: - if odfv.mode != "pandas": + if odfv.mode not in {"pandas", "substrait"}: raise Exception( f'OnDemandFeatureView mode "{odfv.mode}" not supported for offline processing.' ) diff --git a/sdk/python/feast/on_demand_feature_view.py b/sdk/python/feast/on_demand_feature_view.py index 8d51edbe58..f83500cbc9 100644 --- a/sdk/python/feast/on_demand_feature_view.py +++ b/sdk/python/feast/on_demand_feature_view.py @@ -465,7 +465,9 @@ def get_transformed_features( return self.get_transformed_features_dict( feature_dict=features, ) - elif self.mode == "pandas" and isinstance(features, pd.DataFrame): + elif self.mode in {"pandas", "substrait"} and isinstance( + features, pd.DataFrame + ): return self.get_transformed_features_df( df_with_features=features, full_feature_names=full_feature_names, diff --git a/sdk/python/tests/unit/test_on_demand_substrait_transformation.py b/sdk/python/tests/unit/test_substrait_transformation.py similarity index 73% rename from sdk/python/tests/unit/test_on_demand_substrait_transformation.py rename to sdk/python/tests/unit/test_substrait_transformation.py index 378aa7ce3b..28ab68c70b 100644 --- a/sdk/python/tests/unit/test_on_demand_substrait_transformation.py +++ b/sdk/python/tests/unit/test_substrait_transformation.py @@ -60,6 +60,7 @@ def test_ibis_pandas_parity(): @on_demand_feature_view( sources=[driver_stats_fv], schema=[Field(name="conv_rate_plus_acc", dtype=Float64)], + mode="pandas", ) def pandas_view(inputs: pd.DataFrame) -> pd.DataFrame: df = pd.DataFrame() @@ -84,30 +85,50 @@ def substrait_view(inputs: Table) -> Table: [driver, driver_stats_source, driver_stats_fv, substrait_view, pandas_view] ) + store.materialize( + start_date=start_date, + end_date=end_date, + ) + entity_df = pd.DataFrame.from_dict( { # entity's join key -> entity values "driver_id": [1001, 1002, 1003], # "event_timestamp" (reserved key) -> timestamps "event_timestamp": [ - datetime(2021, 4, 12, 10, 59, 42), - datetime(2021, 4, 12, 8, 12, 10), - datetime(2021, 4, 12, 16, 40, 26), + start_date + timedelta(days=4), + start_date + timedelta(days=5), + start_date + timedelta(days=6), ], } ) + requested_features = [ + "driver_hourly_stats:conv_rate", + "driver_hourly_stats:acc_rate", + "driver_hourly_stats:avg_daily_trips", + "substrait_view:conv_rate_plus_acc_substrait", + "pandas_view:conv_rate_plus_acc", + ] + training_df = store.get_historical_features( - entity_df=entity_df, - features=[ - "driver_hourly_stats:conv_rate", - "driver_hourly_stats:acc_rate", - "driver_hourly_stats:avg_daily_trips", - "substrait_view:conv_rate_plus_acc_substrait", - "pandas_view:conv_rate_plus_acc", - ], - ).to_df() + entity_df=entity_df, features=requested_features + ) + + assert training_df.to_df()["conv_rate_plus_acc"].equals( + training_df.to_df()["conv_rate_plus_acc_substrait"] + ) + + assert training_df.to_arrow()["conv_rate_plus_acc"].equals( + training_df.to_arrow()["conv_rate_plus_acc_substrait"] + ) + + online_response = store.get_online_features( + features=requested_features, + entity_rows=[{"driver_id": 1001}, {"driver_id": 1002}, {"driver_id": 1003}], + ) - assert training_df["conv_rate_plus_acc"].equals( - training_df["conv_rate_plus_acc_substrait"] + assert ( + online_response.to_dict()["conv_rate_plus_acc"] + == online_response.to_dict()["conv_rate_plus_acc_substrait"] ) From e4bef6769265a9b5d87486e34ac00f022ca9ce28 Mon Sep 17 00:00:00 2001 From: Alex Vinnik <33845028+alex-vinnik-sp@users.noreply.github.com> Date: Tue, 2 Apr 2024 10:11:22 -0500 Subject: [PATCH 100/122] fix: Dump repo_config by alias (#4063) dump-repo-config-alias Dump repo_config by alias Signed-off-by: Alex Vinnik --- .../contrib/bytewax/bytewax_materialization_engine.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/python/feast/infra/materialization/contrib/bytewax/bytewax_materialization_engine.py b/sdk/python/feast/infra/materialization/contrib/bytewax/bytewax_materialization_engine.py index 060a47ce58..d82e0920e2 100644 --- a/sdk/python/feast/infra/materialization/contrib/bytewax/bytewax_materialization_engine.py +++ b/sdk/python/feast/infra/materialization/contrib/bytewax/bytewax_materialization_engine.py @@ -309,7 +309,7 @@ def _create_kubernetes_job(self, job_id, paths, feature_view): def _create_configuration_map(self, job_id, paths, feature_view, namespace): """Create a Kubernetes configmap for this job""" - feature_store_configuration = yaml.dump(self.repo_config.dict()) + feature_store_configuration = yaml.dump(self.repo_config.dict(by_alias=True)) materialization_config = yaml.dump( {"paths": paths, "feature_view": feature_view.name} From 54910a16253c3f901d3bd5399bc2ba9703a7254d Mon Sep 17 00:00:00 2001 From: Alex Vinnik <33845028+alex-vinnik-sp@users.noreply.github.com> Date: Tue, 2 Apr 2024 18:42:35 -0500 Subject: [PATCH 101/122] fix: Add missing init files in infra utils (#4067) infa-utils-init fix: Add mising init files in infra utils Signed-off-by: Alex Vinnik --- sdk/python/feast/infra/utils/snowflake/__init__.py | 0 sdk/python/feast/infra/utils/snowflake/registry/__init__.py | 0 2 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 sdk/python/feast/infra/utils/snowflake/__init__.py create mode 100644 sdk/python/feast/infra/utils/snowflake/registry/__init__.py diff --git a/sdk/python/feast/infra/utils/snowflake/__init__.py b/sdk/python/feast/infra/utils/snowflake/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/infra/utils/snowflake/registry/__init__.py b/sdk/python/feast/infra/utils/snowflake/registry/__init__.py new file mode 100644 index 0000000000..e69de29bb2 From f09c612d046dfa56e9c616ff68c05823ce0f3bb6 Mon Sep 17 00:00:00 2001 From: lokeshrangineni Date: Wed, 3 Apr 2024 08:30:04 -0400 Subject: [PATCH 102/122] feat: Dropping python 3.8 requirements files from the project. (#4021) * Dropping the support for python 3.8 version from feast Signed-off-by: Lokesh Rangineni Signed-off-by: Lokesh Rangineni * updating the pyproject.toml to use the python 3.9 version. Signed-off-by: Lokesh Rangineni Signed-off-by: Lokesh Rangineni * Dropping the support for python 3.8 but these are required to merge the PR as the PR build needs these files because it runs based on the master files. We will be deleting these files once the PR is merged. Signed-off-by: Lokesh Rangineni * Adding missed file. dropping the support for python 3.8. Dropping the support for python 3.8 but these are required to merge the PR as the PR build needs these files because it runs based on the master files. We will be deleting these files once the PR is merged. Signed-off-by: Lokesh Rangineni Signed-off-by: Lokesh Rangineni * Trying to fix the integration test failures with drop python 3.8 version PR. Signed-off-by: Lokesh Rangineni Signed-off-by: Lokesh Rangineni * Removing the last dependencies of python 3.8 version from repo. Signed-off-by: Lokesh Rangineni Signed-off-by: Lokesh Rangineni --------- Signed-off-by: Lokesh Rangineni --- .../requirements/py3.8-ci-requirements.txt | 1053 ----------------- .../requirements/py3.8-requirements.txt | 214 ---- 2 files changed, 1267 deletions(-) delete mode 100644 sdk/python/requirements/py3.8-ci-requirements.txt delete mode 100644 sdk/python/requirements/py3.8-requirements.txt diff --git a/sdk/python/requirements/py3.8-ci-requirements.txt b/sdk/python/requirements/py3.8-ci-requirements.txt deleted file mode 100644 index 3d3dbe764b..0000000000 --- a/sdk/python/requirements/py3.8-ci-requirements.txt +++ /dev/null @@ -1,1053 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --extra=ci --output-file=sdk/python/requirements/py3.8-ci-requirements.txt -# -alabaster==0.7.13 - # via sphinx -altair==4.2.2 - # via great-expectations -annotated-types==0.6.0 - # via pydantic -anyio==4.3.0 - # via - # httpx - # jupyter-server - # starlette - # watchfiles -appdirs==1.4.4 - # via fissix -argon2-cffi==23.1.0 - # via jupyter-server -argon2-cffi-bindings==21.2.0 - # via argon2-cffi -arrow==1.3.0 - # via isoduration -asn1crypto==1.5.1 - # via snowflake-connector-python -assertpy==1.1 - # via feast (setup.py) -asttokens==2.4.1 - # via stack-data -async-lru==2.0.4 - # via jupyterlab -async-timeout==4.0.3 - # via redis -atpublic==3.1.2 - # via ibis-framework -attrs==23.2.0 - # via - # bowler - # jsonschema - # referencing -avro==1.11.3 - # via feast (setup.py) -azure-core==1.30.1 - # via - # azure-identity - # azure-storage-blob -azure-identity==1.15.0 - # via feast (setup.py) -azure-storage-blob==12.19.1 - # via feast (setup.py) -babel==2.14.0 - # via - # jupyterlab-server - # sphinx -backcall==0.2.0 - # via ipython -backports-zoneinfo==0.2.1 - # via - # trino - # tzlocal -beautifulsoup4==4.12.3 - # via nbconvert -black==22.12.0 - # via feast (setup.py) -bleach==6.1.0 - # via nbconvert -boto3==1.34.60 - # via - # feast (setup.py) - # moto -botocore==1.34.60 - # via - # boto3 - # moto - # s3transfer -bowler==0.9.0 - # via feast (setup.py) -build==1.1.1 - # via - # feast (setup.py) - # pip-tools -bytewax==0.15.1 - # via feast (setup.py) -cachecontrol==0.14.0 - # via firebase-admin -cachetools==5.3.3 - # via google-auth -cassandra-driver==3.29.0 - # via feast (setup.py) -certifi==2024.2.2 - # via - # httpcore - # httpx - # kubernetes - # minio - # requests - # snowflake-connector-python -cffi==1.16.0 - # via - # argon2-cffi-bindings - # cryptography - # snowflake-connector-python -cfgv==3.4.0 - # via pre-commit -charset-normalizer==3.3.2 - # via - # requests - # snowflake-connector-python -click==8.1.7 - # via - # black - # bowler - # dask - # feast (setup.py) - # geomet - # great-expectations - # moreorless - # pip-tools - # uvicorn -cloudpickle==3.0.0 - # via dask -colorama==0.4.6 - # via - # feast (setup.py) - # great-expectations -comm==0.2.2 - # via - # ipykernel - # ipywidgets -coverage[toml]==7.4.3 - # via pytest-cov -cryptography==42.0.5 - # via - # azure-identity - # azure-storage-blob - # feast (setup.py) - # great-expectations - # moto - # msal - # pyjwt - # pyopenssl - # snowflake-connector-python - # types-pyopenssl - # types-redis -dask==2023.5.0 - # via feast (setup.py) -db-dtypes==1.2.0 - # via google-cloud-bigquery -debugpy==1.8.1 - # via ipykernel -decorator==5.1.1 - # via ipython -defusedxml==0.7.1 - # via nbconvert -deprecation==2.1.0 - # via testcontainers -dill==0.3.8 - # via - # bytewax - # feast (setup.py) - # multiprocess -distlib==0.3.8 - # via virtualenv -docker==7.0.0 - # via - # feast (setup.py) - # testcontainers -docutils==0.19 - # via sphinx -entrypoints==0.4 - # via altair -exceptiongroup==1.2.0 - # via - # anyio - # pytest -execnet==2.0.2 - # via pytest-xdist -executing==2.0.1 - # via stack-data -fastapi==0.110.0 - # via feast (setup.py) -fastjsonschema==2.19.1 - # via nbformat -filelock==3.13.1 - # via - # snowflake-connector-python - # virtualenv -firebase-admin==5.4.0 - # via feast (setup.py) -fissix==21.11.13 - # via bowler -flake8==6.0.0 - # via feast (setup.py) -fqdn==1.5.1 - # via jsonschema -fsspec==2023.12.2 - # via - # dask - # feast (setup.py) -geojson==2.5.0 - # via rockset -geomet==0.2.1.post1 - # via cassandra-driver -google-api-core[grpc]==2.17.1 - # via - # feast (setup.py) - # firebase-admin - # google-api-python-client - # google-cloud-bigquery - # google-cloud-bigquery-storage - # google-cloud-bigtable - # google-cloud-core - # google-cloud-datastore - # google-cloud-firestore - # google-cloud-storage -google-api-python-client==2.122.0 - # via firebase-admin -google-auth==2.28.2 - # via - # google-api-core - # google-api-python-client - # google-auth-httplib2 - # google-cloud-core - # google-cloud-storage - # kubernetes -google-auth-httplib2==0.2.0 - # via google-api-python-client -google-cloud-bigquery[pandas]==3.12.0 - # via feast (setup.py) -google-cloud-bigquery-storage==2.24.0 - # via feast (setup.py) -google-cloud-bigtable==2.23.0 - # via feast (setup.py) -google-cloud-core==2.4.1 - # via - # google-cloud-bigquery - # google-cloud-bigtable - # google-cloud-datastore - # google-cloud-firestore - # google-cloud-storage -google-cloud-datastore==2.19.0 - # via feast (setup.py) -google-cloud-firestore==2.15.0 - # via firebase-admin -google-cloud-storage==2.15.0 - # via - # feast (setup.py) - # firebase-admin -google-crc32c==1.5.0 - # via - # google-cloud-storage - # google-resumable-media -google-resumable-media==2.7.0 - # via - # google-cloud-bigquery - # google-cloud-storage -googleapis-common-protos[grpc]==1.63.0 - # via - # feast (setup.py) - # google-api-core - # grpc-google-iam-v1 - # grpcio-status -great-expectations==0.18.10 - # via feast (setup.py) -greenlet==3.0.3 - # via sqlalchemy -grpc-google-iam-v1==0.13.0 - # via google-cloud-bigtable -grpcio==1.62.1 - # via - # feast (setup.py) - # google-api-core - # google-cloud-bigquery - # googleapis-common-protos - # grpc-google-iam-v1 - # grpcio-health-checking - # grpcio-reflection - # grpcio-status - # grpcio-testing - # grpcio-tools -grpcio-health-checking==1.62.1 - # via feast (setup.py) -grpcio-reflection==1.62.1 - # via feast (setup.py) -grpcio-status==1.62.1 - # via google-api-core -grpcio-testing==1.62.1 - # via feast (setup.py) -grpcio-tools==1.62.1 - # via feast (setup.py) -gunicorn==21.2.0 - # via feast (setup.py) -h11==0.14.0 - # via - # httpcore - # uvicorn -happybase==1.2.0 - # via feast (setup.py) -hazelcast-python-client==5.3.0 - # via feast (setup.py) -hiredis==2.3.2 - # via feast (setup.py) -httpcore==1.0.4 - # via httpx -httplib2==0.22.0 - # via - # google-api-python-client - # google-auth-httplib2 -httptools==0.6.1 - # via uvicorn -httpx==0.27.0 - # via - # feast (setup.py) - # jupyterlab -ibis-framework==4.1.0 - # via - # feast (setup.py) - # ibis-substrait -ibis-substrait==2.29.1 - # via feast (setup.py) -identify==2.5.35 - # via pre-commit -idna==3.6 - # via - # anyio - # httpx - # jsonschema - # requests - # snowflake-connector-python -imagesize==1.4.1 - # via sphinx -importlib-metadata==6.11.0 - # via - # build - # dask - # feast (setup.py) - # jupyter-client - # jupyter-lsp - # jupyterlab - # jupyterlab-server - # nbconvert - # sphinx - # typeguard -importlib-resources==6.1.3 - # via - # feast (setup.py) - # jsonschema - # jsonschema-specifications - # jupyterlab -iniconfig==2.0.0 - # via pytest -ipykernel==6.29.3 - # via jupyterlab -ipython==8.12.3 - # via - # great-expectations - # ipykernel - # ipywidgets -ipywidgets==8.1.2 - # via great-expectations -isodate==0.6.1 - # via azure-storage-blob -isoduration==20.11.0 - # via jsonschema -isort==5.13.2 - # via feast (setup.py) -jedi==0.19.1 - # via ipython -jinja2==3.1.3 - # via - # altair - # feast (setup.py) - # great-expectations - # jupyter-server - # jupyterlab - # jupyterlab-server - # moto - # nbconvert - # sphinx -jmespath==1.0.1 - # via - # boto3 - # botocore -json5==0.9.22 - # via jupyterlab-server -jsonpatch==1.33 - # via great-expectations -jsonpointer==2.4 - # via - # jsonpatch - # jsonschema -jsonschema[format-nongpl]==4.21.1 - # via - # altair - # feast (setup.py) - # great-expectations - # jupyter-events - # jupyterlab-server - # nbformat -jsonschema-specifications==2023.12.1 - # via jsonschema -jupyter-client==8.6.1 - # via - # ipykernel - # jupyter-server - # nbclient -jupyter-core==5.7.2 - # via - # ipykernel - # jupyter-client - # jupyter-server - # jupyterlab - # nbclient - # nbconvert - # nbformat -jupyter-events==0.9.1 - # via jupyter-server -jupyter-lsp==2.2.4 - # via jupyterlab -jupyter-server==2.13.0 - # via - # jupyter-lsp - # jupyterlab - # jupyterlab-server - # notebook - # notebook-shim -jupyter-server-terminals==0.5.3 - # via jupyter-server -jupyterlab==4.1.4 - # via notebook -jupyterlab-pygments==0.3.0 - # via nbconvert -jupyterlab-server==2.25.4 - # via - # jupyterlab - # notebook -jupyterlab-widgets==3.0.10 - # via ipywidgets -kubernetes==20.13.0 - # via feast (setup.py) -locket==1.0.0 - # via partd -makefun==1.15.2 - # via great-expectations -markdown-it-py==3.0.0 - # via rich -markupsafe==2.1.5 - # via - # jinja2 - # nbconvert - # werkzeug -marshmallow==3.21.1 - # via great-expectations -matplotlib-inline==0.1.6 - # via - # ipykernel - # ipython -mccabe==0.7.0 - # via flake8 -mdurl==0.1.2 - # via markdown-it-py -minio==7.1.0 - # via feast (setup.py) -mistune==3.0.2 - # via - # great-expectations - # nbconvert -mmh3==4.1.0 - # via feast (setup.py) -mock==2.0.0 - # via feast (setup.py) -moreorless==0.4.0 - # via bowler -moto==4.2.14 - # via feast (setup.py) -msal==1.27.0 - # via - # azure-identity - # msal-extensions -msal-extensions==1.1.0 - # via azure-identity -msgpack==1.0.8 - # via cachecontrol -multipledispatch==0.6.0 - # via ibis-framework -multiprocess==0.70.16 - # via bytewax -mypy==1.9.0 - # via - # feast (setup.py) - # sqlalchemy -mypy-extensions==1.0.0 - # via - # black - # mypy -mypy-protobuf==3.3.0 - # via feast (setup.py) -nbclient==0.9.1 - # via nbconvert -nbconvert==7.16.2 - # via jupyter-server -nbformat==5.10.2 - # via - # great-expectations - # jupyter-server - # nbclient - # nbconvert -nest-asyncio==1.6.0 - # via ipykernel -nodeenv==1.8.0 - # via pre-commit -notebook==7.1.1 - # via great-expectations -notebook-shim==0.2.4 - # via - # jupyterlab - # notebook -numpy==1.24.4 - # via - # altair - # db-dtypes - # feast (setup.py) - # great-expectations - # ibis-framework - # pandas - # pyarrow - # scipy -oauthlib==3.2.2 - # via requests-oauthlib -overrides==7.7.0 - # via jupyter-server -packaging==24.0 - # via - # build - # dask - # db-dtypes - # deprecation - # docker - # google-cloud-bigquery - # great-expectations - # gunicorn - # ibis-substrait - # ipykernel - # jupyter-server - # jupyterlab - # jupyterlab-server - # marshmallow - # msal-extensions - # nbconvert - # pytest - # snowflake-connector-python - # sphinx -pandas==1.5.3 ; python_version < "3.9" - # via - # altair - # db-dtypes - # feast (setup.py) - # google-cloud-bigquery - # great-expectations - # ibis-framework - # snowflake-connector-python -pandocfilters==1.5.1 - # via nbconvert -parso==0.8.3 - # via jedi -parsy==2.1 - # via ibis-framework -partd==1.4.1 - # via dask -pathspec==0.12.1 - # via black -pbr==6.0.0 - # via mock -pexpect==4.9.0 - # via ipython -pickleshare==0.7.5 - # via ipython -pip-tools==7.4.1 - # via feast (setup.py) -pkgutil-resolve-name==1.3.10 - # via jsonschema -platformdirs==3.11.0 - # via - # black - # jupyter-core - # snowflake-connector-python - # virtualenv -pluggy==1.4.0 - # via pytest -ply==3.11 - # via thriftpy2 -portalocker==2.8.2 - # via msal-extensions -pre-commit==3.3.1 - # via feast (setup.py) -prometheus-client==0.20.0 - # via jupyter-server -prompt-toolkit==3.0.43 - # via ipython -proto-plus==1.23.0 - # via - # feast (setup.py) - # google-cloud-bigquery - # google-cloud-bigquery-storage - # google-cloud-bigtable - # google-cloud-datastore - # google-cloud-firestore -protobuf==4.25.3 - # via - # feast (setup.py) - # google-api-core - # google-cloud-bigquery - # google-cloud-bigquery-storage - # google-cloud-bigtable - # google-cloud-datastore - # google-cloud-firestore - # googleapis-common-protos - # grpc-google-iam-v1 - # grpcio-health-checking - # grpcio-reflection - # grpcio-status - # grpcio-testing - # grpcio-tools - # ibis-substrait - # mypy-protobuf - # proto-plus -psutil==5.9.0 - # via - # feast (setup.py) - # ipykernel -psycopg2-binary==2.9.9 - # via feast (setup.py) -ptyprocess==0.7.0 - # via - # pexpect - # terminado -pure-eval==0.2.2 - # via stack-data -py==1.11.0 - # via feast (setup.py) -py-cpuinfo==9.0.0 - # via pytest-benchmark -py4j==0.10.9.7 - # via pyspark -pyarrow==15.0.1 - # via - # db-dtypes - # feast (setup.py) - # google-cloud-bigquery - # snowflake-connector-python -pyasn1==0.5.1 - # via - # pyasn1-modules - # rsa -pyasn1-modules==0.3.0 - # via google-auth -pybindgen==0.22.1 - # via feast (setup.py) -pycodestyle==2.10.0 - # via flake8 -pycparser==2.21 - # via cffi -pydantic==2.6.4 - # via - # fastapi - # feast (setup.py) - # great-expectations -pydantic-core==2.16.3 - # via pydantic -pyflakes==3.0.1 - # via flake8 -pygments==2.17.2 - # via - # feast (setup.py) - # ipython - # nbconvert - # rich - # sphinx -pyjwt[crypto]==2.8.0 - # via - # msal - # snowflake-connector-python -pymssql==2.2.11 - # via feast (setup.py) -pymysql==1.1.0 - # via feast (setup.py) -pyodbc==5.1.0 - # via feast (setup.py) -pyopenssl==24.1.0 - # via snowflake-connector-python -pyparsing==3.1.2 - # via - # great-expectations - # httplib2 -pyproject-hooks==1.0.0 - # via - # build - # pip-tools -pyspark==3.5.1 - # via feast (setup.py) -pytest==7.4.4 - # via - # feast (setup.py) - # pytest-benchmark - # pytest-cov - # pytest-lazy-fixture - # pytest-mock - # pytest-ordering - # pytest-timeout - # pytest-xdist -pytest-benchmark==3.4.1 - # via feast (setup.py) -pytest-cov==4.1.0 - # via feast (setup.py) -pytest-lazy-fixture==0.6.3 - # via feast (setup.py) -pytest-mock==1.10.4 - # via feast (setup.py) -pytest-ordering==0.6 - # via feast (setup.py) -pytest-timeout==1.4.2 - # via feast (setup.py) -pytest-xdist==3.5.0 - # via feast (setup.py) -python-dateutil==2.9.0.post0 - # via - # arrow - # botocore - # google-cloud-bigquery - # great-expectations - # ibis-framework - # jupyter-client - # kubernetes - # moto - # pandas - # rockset - # trino -python-dotenv==1.0.1 - # via uvicorn -python-json-logger==2.0.7 - # via jupyter-events -pytz==2024.1 - # via - # babel - # great-expectations - # ibis-framework - # pandas - # snowflake-connector-python - # trino -pyyaml==6.0.1 - # via - # dask - # feast (setup.py) - # ibis-substrait - # jupyter-events - # kubernetes - # pre-commit - # responses - # uvicorn -pyzmq==25.1.2 - # via - # ipykernel - # jupyter-client - # jupyter-server -redis==4.6.0 - # via feast (setup.py) -referencing==0.33.0 - # via - # jsonschema - # jsonschema-specifications - # jupyter-events -regex==2023.12.25 - # via feast (setup.py) -requests==2.31.0 - # via - # azure-core - # cachecontrol - # docker - # feast (setup.py) - # google-api-core - # google-cloud-bigquery - # google-cloud-storage - # great-expectations - # jupyterlab-server - # kubernetes - # moto - # msal - # requests-oauthlib - # responses - # snowflake-connector-python - # sphinx - # trino -requests-oauthlib==1.4.0 - # via kubernetes -responses==0.25.0 - # via moto -rfc3339-validator==0.1.4 - # via - # jsonschema - # jupyter-events -rfc3986-validator==0.1.1 - # via - # jsonschema - # jupyter-events -rich==13.7.1 - # via ibis-framework -rockset==2.1.1 - # via feast (setup.py) -rpds-py==0.18.0 - # via - # jsonschema - # referencing -rsa==4.9 - # via google-auth -ruamel-yaml==0.17.17 - # via great-expectations -ruamel-yaml-clib==0.2.8 - # via ruamel-yaml -s3transfer==0.10.0 - # via boto3 -scipy==1.10.1 - # via great-expectations -send2trash==1.8.2 - # via jupyter-server -six==1.16.0 - # via - # asttokens - # azure-core - # bleach - # geomet - # happybase - # isodate - # kubernetes - # mock - # multipledispatch - # python-dateutil - # rfc3339-validator - # thriftpy2 -sniffio==1.3.1 - # via - # anyio - # httpx -snowballstemmer==2.2.0 - # via sphinx -snowflake-connector-python[pandas]==3.7.1 - # via feast (setup.py) -sortedcontainers==2.4.0 - # via snowflake-connector-python -soupsieve==2.5 - # via beautifulsoup4 -sphinx==6.2.1 - # via feast (setup.py) -sphinxcontrib-applehelp==1.0.4 - # via sphinx -sphinxcontrib-devhelp==1.0.2 - # via sphinx -sphinxcontrib-htmlhelp==2.0.1 - # via sphinx -sphinxcontrib-jsmath==1.0.1 - # via sphinx -sphinxcontrib-qthelp==1.0.3 - # via sphinx -sphinxcontrib-serializinghtml==1.1.5 - # via sphinx -sqlalchemy[mypy]==1.4.52 - # via feast (setup.py) -sqlalchemy2-stubs==0.0.2a38 - # via sqlalchemy -sqlglot==10.6.4 - # via ibis-framework -stack-data==0.6.3 - # via ipython -starlette==0.36.3 - # via fastapi -tabulate==0.9.0 - # via feast (setup.py) -tenacity==8.2.3 - # via feast (setup.py) -terminado==0.18.1 - # via - # jupyter-server - # jupyter-server-terminals -testcontainers==3.7.1 - # via feast (setup.py) -thriftpy2==0.4.20 - # via happybase -tinycss2==1.2.1 - # via nbconvert -toml==0.10.2 - # via feast (setup.py) -tomli==2.0.1 - # via - # black - # build - # coverage - # jupyterlab - # mypy - # pip-tools - # pyproject-hooks - # pytest -tomlkit==0.12.4 - # via snowflake-connector-python -toolz==0.12.1 - # via - # altair - # dask - # ibis-framework - # partd -tornado==6.4 - # via - # ipykernel - # jupyter-client - # jupyter-server - # jupyterlab - # notebook - # terminado -tqdm==4.66.2 - # via - # feast (setup.py) - # great-expectations -traitlets==5.14.2 - # via - # comm - # ipykernel - # ipython - # ipywidgets - # jupyter-client - # jupyter-core - # jupyter-events - # jupyter-server - # jupyterlab - # matplotlib-inline - # nbclient - # nbconvert - # nbformat -trino==0.328.0 - # via feast (setup.py) -typeguard==4.1.5 - # via feast (setup.py) -types-protobuf==3.19.22 - # via - # feast (setup.py) - # mypy-protobuf -types-pymysql==1.1.0.1 - # via feast (setup.py) -types-pyopenssl==24.0.0.20240311 - # via types-redis -types-python-dateutil==2.8.19.20240311 - # via - # arrow - # feast (setup.py) -types-pytz==2024.1.0.20240203 - # via feast (setup.py) -types-pyyaml==6.0.12.20240311 - # via feast (setup.py) -types-redis==4.6.0.20240311 - # via feast (setup.py) -types-requests==2.30.0.0 - # via feast (setup.py) -types-setuptools==69.1.0.20240310 - # via feast (setup.py) -types-tabulate==0.9.0.20240106 - # via feast (setup.py) -types-urllib3==1.26.25.14 - # via types-requests -typing-extensions==4.10.0 - # via - # annotated-types - # anyio - # async-lru - # azure-core - # azure-storage-blob - # black - # fastapi - # great-expectations - # ibis-framework - # ipython - # mypy - # pydantic - # pydantic-core - # rich - # snowflake-connector-python - # sqlalchemy2-stubs - # starlette - # typeguard - # uvicorn -tzlocal==5.2 - # via - # great-expectations - # trino -uri-template==1.3.0 - # via jsonschema -uritemplate==4.1.1 - # via google-api-python-client -urllib3==1.26.18 - # via - # botocore - # docker - # feast (setup.py) - # great-expectations - # kubernetes - # minio - # requests - # responses - # rockset - # snowflake-connector-python -uvicorn[standard]==0.28.0 - # via feast (setup.py) -uvloop==0.19.0 - # via uvicorn -virtualenv==20.23.0 - # via - # feast (setup.py) - # pre-commit -volatile==2.1.0 - # via bowler -watchfiles==0.21.0 - # via uvicorn -wcwidth==0.2.13 - # via prompt-toolkit -webcolors==1.13 - # via jsonschema -webencodings==0.5.1 - # via - # bleach - # tinycss2 -websocket-client==1.7.0 - # via - # jupyter-server - # kubernetes -websockets==12.0 - # via uvicorn -werkzeug==3.0.1 - # via moto -wheel==0.43.0 - # via pip-tools -widgetsnbextension==4.0.10 - # via ipywidgets -wrapt==1.16.0 - # via testcontainers -xmltodict==0.13.0 - # via moto -zipp==3.17.0 - # via - # importlib-metadata - # importlib-resources - -# The following packages are considered to be unsafe in a requirements file: -# pip -# setuptools diff --git a/sdk/python/requirements/py3.8-requirements.txt b/sdk/python/requirements/py3.8-requirements.txt deleted file mode 100644 index e689c011c5..0000000000 --- a/sdk/python/requirements/py3.8-requirements.txt +++ /dev/null @@ -1,214 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --output-file=sdk/python/requirements/py3.8-requirements.txt -# -annotated-types==0.6.0 - # via pydantic -anyio==4.3.0 - # via - # httpx - # starlette - # watchfiles -appdirs==1.4.4 - # via fissix -attrs==23.2.0 - # via - # bowler - # jsonschema - # referencing -bowler==0.9.0 - # via feast (setup.py) -certifi==2024.2.2 - # via - # httpcore - # httpx - # requests -charset-normalizer==3.3.2 - # via requests -click==8.1.7 - # via - # bowler - # dask - # feast (setup.py) - # moreorless - # uvicorn -cloudpickle==3.0.0 - # via dask -colorama==0.4.6 - # via feast (setup.py) -dask==2023.2.1 - # via feast (setup.py) -dill==0.3.8 - # via feast (setup.py) -exceptiongroup==1.2.0 - # via anyio -fastapi==0.110.0 - # via feast (setup.py) -fissix==21.11.13 - # via bowler -fsspec==2024.2.0 - # via dask -greenlet==3.0.3 - # via sqlalchemy -gunicorn==21.2.0 - # via feast (setup.py) -h11==0.14.0 - # via - # httpcore - # uvicorn -httpcore==1.0.4 - # via httpx -httptools==0.6.1 - # via uvicorn -httpx==0.27.0 - # via feast (setup.py) -idna==3.6 - # via - # anyio - # httpx - # requests -importlib-metadata==6.11.0 - # via - # dask - # feast (setup.py) - # typeguard -importlib-resources==6.1.3 - # via - # feast (setup.py) - # jsonschema - # jsonschema-specifications -jinja2==3.1.3 - # via feast (setup.py) -jsonschema==4.21.1 - # via feast (setup.py) -jsonschema-specifications==2023.12.1 - # via jsonschema -locket==1.0.0 - # via partd -markupsafe==2.1.5 - # via jinja2 -mmh3==4.1.0 - # via feast (setup.py) -moreorless==0.4.0 - # via bowler -mypy==1.9.0 - # via sqlalchemy -mypy-extensions==1.0.0 - # via mypy -mypy-protobuf==3.5.0 - # via feast (setup.py) -numpy==1.24.4 - # via - # feast (setup.py) - # pandas - # pyarrow -packaging==24.0 - # via - # dask - # gunicorn -pandas==2.0.3 - # via feast (setup.py) -partd==1.4.1 - # via dask -pkgutil-resolve-name==1.3.10 - # via jsonschema -proto-plus==1.23.0 - # via feast (setup.py) -protobuf==4.25.3 - # via - # feast (setup.py) - # mypy-protobuf - # proto-plus -pyarrow==15.0.1 - # via feast (setup.py) -pydantic==2.6.4 - # via - # fastapi - # feast (setup.py) -pydantic-core==2.16.3 - # via pydantic -pygments==2.17.2 - # via feast (setup.py) -python-dateutil==2.9.0.post0 - # via pandas -python-dotenv==1.0.1 - # via uvicorn -pytz==2024.1 - # via pandas -pyyaml==6.0.1 - # via - # dask - # feast (setup.py) - # uvicorn -referencing==0.33.0 - # via - # jsonschema - # jsonschema-specifications -requests==2.31.0 - # via feast (setup.py) -rpds-py==0.18.0 - # via - # jsonschema - # referencing -six==1.16.0 - # via python-dateutil -sniffio==1.3.1 - # via - # anyio - # httpx -sqlalchemy[mypy]==1.4.52 - # via feast (setup.py) -sqlalchemy2-stubs==0.0.2a38 - # via sqlalchemy -starlette==0.36.3 - # via fastapi -tabulate==0.9.0 - # via feast (setup.py) -tenacity==8.2.3 - # via feast (setup.py) -toml==0.10.2 - # via feast (setup.py) -tomli==2.0.1 - # via mypy -toolz==0.12.1 - # via - # dask - # partd -tqdm==4.66.2 - # via feast (setup.py) -typeguard==4.1.5 - # via feast (setup.py) -types-protobuf==4.24.0.20240311 - # via mypy-protobuf -typing-extensions==4.10.0 - # via - # annotated-types - # anyio - # fastapi - # mypy - # pydantic - # pydantic-core - # sqlalchemy2-stubs - # starlette - # typeguard - # uvicorn -tzdata==2024.1 - # via pandas -urllib3==2.2.1 - # via requests -uvicorn[standard]==0.28.0 - # via feast (setup.py) -uvloop==0.19.0 - # via uvicorn -volatile==2.1.0 - # via bowler -watchfiles==0.21.0 - # via uvicorn -websockets==12.0 - # via uvicorn -zipp==3.17.0 - # via - # importlib-metadata - # importlib-resources From 584e9b1be9452158d9104133a24ff29d3976f9ed Mon Sep 17 00:00:00 2001 From: Tornike Gurgenidze Date: Wed, 3 Apr 2024 19:20:34 +0400 Subject: [PATCH 103/122] fix: Run feature server w/o gunicorn on windows (#4024) * fix: Run feature server w/o gunicorn on windows Signed-off-by: tokoko * skip gunicorn for windows in setup.py Signed-off-by: tokoko * pull default registry ttl to constants Signed-off-by: tokoko * remove default value from start_server as well Signed-off-by: tokoko --------- Signed-off-by: tokoko --- sdk/python/feast/constants.py | 3 ++ sdk/python/feast/feature_server.py | 65 ++++++++++++++++++------------ setup.py | 2 +- 3 files changed, 43 insertions(+), 27 deletions(-) diff --git a/sdk/python/feast/constants.py b/sdk/python/feast/constants.py index c022ecba55..e47da0ad6b 100644 --- a/sdk/python/feast/constants.py +++ b/sdk/python/feast/constants.py @@ -49,3 +49,6 @@ # Environment variable for feature server docker image tag DOCKER_IMAGE_TAG_ENV_NAME: str = "FEAST_SERVER_DOCKER_IMAGE_TAG" + +# Default feature server registry ttl (seconds) +DEFAULT_FEATURE_SERVER_REGISTRY_TTL = 5 diff --git a/sdk/python/feast/feature_server.py b/sdk/python/feast/feature_server.py index 618aefb2f2..4b0e50a06d 100644 --- a/sdk/python/feast/feature_server.py +++ b/sdk/python/feast/feature_server.py @@ -1,10 +1,10 @@ import json +import sys import threading import traceback import warnings from typing import List, Optional -import gunicorn.app.base import pandas as pd from dateutil import parser from fastapi import FastAPI, HTTPException, Request, Response, status @@ -15,6 +15,7 @@ import feast from feast import proto_json, utils +from feast.constants import DEFAULT_FEATURE_SERVER_REGISTRY_TTL from feast.data_source import PushMode from feast.errors import PushSourceNotFoundException from feast.protos.feast.serving.ServingService_pb2 import GetOnlineFeaturesRequest @@ -45,7 +46,10 @@ class MaterializeIncrementalRequest(BaseModel): feature_views: Optional[List[str]] = None -def get_app(store: "feast.FeatureStore", registry_ttl_sec: int = 5): +def get_app( + store: "feast.FeatureStore", + registry_ttl_sec: int = DEFAULT_FEATURE_SERVER_REGISTRY_TTL, +): proto_json.patch() app = FastAPI() @@ -202,24 +206,27 @@ def materialize_incremental(body=Depends(get_body)): return app -class FeastServeApplication(gunicorn.app.base.BaseApplication): - def __init__(self, store: "feast.FeatureStore", **options): - self._app = get_app( - store=store, - registry_ttl_sec=options.get("registry_ttl_sec", 5), - ) - self._options = options - super().__init__() +if sys.platform != "win32": + import gunicorn.app.base - def load_config(self): - for key, value in self._options.items(): - if key.lower() in self.cfg.settings and value is not None: - self.cfg.set(key.lower(), value) + class FeastServeApplication(gunicorn.app.base.BaseApplication): + def __init__(self, store: "feast.FeatureStore", **options): + self._app = get_app( + store=store, + registry_ttl_sec=options["registry_ttl_sec"], + ) + self._options = options + super().__init__() + + def load_config(self): + for key, value in self._options.items(): + if key.lower() in self.cfg.settings and value is not None: + self.cfg.set(key.lower(), value) - self.cfg.set("worker_class", "uvicorn.workers.UvicornWorker") + self.cfg.set("worker_class", "uvicorn.workers.UvicornWorker") - def load(self): - return self._app + def load(self): + return self._app def start_server( @@ -229,13 +236,19 @@ def start_server( no_access_log: bool, workers: int, keep_alive_timeout: int, - registry_ttl_sec: int = 5, + registry_ttl_sec: int, ): - FeastServeApplication( - store=store, - bind=f"{host}:{port}", - accesslog=None if no_access_log else "-", - workers=workers, - keepalive=keep_alive_timeout, - registry_ttl_sec=registry_ttl_sec, - ).run() + if sys.platform != "win32": + FeastServeApplication( + store=store, + bind=f"{host}:{port}", + accesslog=None if no_access_log else "-", + workers=workers, + keepalive=keep_alive_timeout, + registry_ttl_sec=registry_ttl_sec, + ).run() + else: + import uvicorn + + app = get_app(store, registry_ttl_sec) + uvicorn.run(app, host=host, port=port, access_log=(not no_access_log)) diff --git a/setup.py b/setup.py index 2d7bf63778..ef3ba1d784 100644 --- a/setup.py +++ b/setup.py @@ -65,7 +65,7 @@ "typeguard>=4.0.0", "fastapi>=0.68.0", "uvicorn[standard]>=0.14.0,<1", - "gunicorn", + "gunicorn; platform_system != 'Windows'", # https://github.com/dask/dask/issues/10996 "dask>=2021.1.0,<2024.3.0", "bowler", # Needed for automatic repo upgrades From 7f1557b348b7935e3586c90c8dec15fdf6cd8665 Mon Sep 17 00:00:00 2001 From: Daniele Martinoli <86618610+dmartinol@users.noreply.github.com> Date: Wed, 3 Apr 2024 20:19:24 +0200 Subject: [PATCH 104/122] feat: Lint with ruff (#4043) * feat: Lin with ruff Signed-off-by: Daniele Martinoli <86618610+dmartinol@users.noreply.github.com> * fea: replace black with ruff format options Signed-off-by: Daniele Martinoli <86618610+dmartinol@users.noreply.github.com> * fea: files reformatted due to ruff deviations from black Signed-off-by: Daniele Martinoli <86618610+dmartinol@users.noreply.github.com> * fix: restored mypy in lint-python target Signed-off-by: Daniele Martinoli <86618610+dmartinol@users.noreply.github.com> * fix: fixed formatting error Signed-off-by: Daniele Martinoli <86618610+dmartinol@users.noreply.github.com> * adding ruff format --check Signed-off-by: Daniele Martinoli <86618610+dmartinol@users.noreply.github.com> * Formatting updated files Signed-off-by: Daniele Martinoli <86618610+dmartinol@users.noreply.github.com> * Adding section separator Signed-off-by: Daniele Martinoli <86618610+dmartinol@users.noreply.github.com> --------- Signed-off-by: Daniele Martinoli <86618610+dmartinol@users.noreply.github.com> --- Makefile | 13 ++---- docs/project/development-guide.md | 4 +- pyproject.toml | 44 +++++++++---------- sdk/python/feast/__init__.py | 3 +- sdk/python/feast/data_source.py | 1 - sdk/python/feast/diff/registry_diff.py | 12 ++--- sdk/python/feast/dqm/profilers/profiler.py | 12 ++--- .../embedded_go/online_features_service.py | 1 - sdk/python/feast/feature_logging.py | 18 ++++---- sdk/python/feast/feature_store.py | 28 +++++++----- .../infra/materialization/aws_lambda/app.py | 1 - .../batch_materialization_engine.py | 15 +++---- .../bytewax/bytewax_materialization_engine.py | 3 +- .../spark/spark_materialization_engine.py | 2 - .../infra/materialization/snowflake_engine.py | 8 ++-- .../feast/infra/offline_stores/bigquery.py | 6 +-- .../contrib/athena_offline_store/athena.py | 3 -- .../athena_offline_store/athena_source.py | 1 - .../athena_offline_store/tests/data_source.py | 2 - .../contrib/ibis_offline_store/ibis.py | 6 +-- .../postgres_offline_store/postgres.py | 10 +++-- .../postgres_offline_store/postgres_source.py | 1 - .../contrib/spark_offline_store/spark.py | 2 - .../trino_offline_store/connectors/upload.py | 1 + .../trino_offline_store/tests/data_source.py | 1 - .../trino_offline_store/trino_source.py | 1 - sdk/python/feast/infra/offline_stores/file.py | 31 +++++++------ .../infra/offline_stores/redshift_source.py | 6 +-- .../feast/infra/offline_stores/snowflake.py | 9 ---- .../infra/offline_stores/snowflake_source.py | 1 - .../hazelcast_online_store.py | 2 +- .../contrib/hbase_online_store/hbase.py | 6 +-- .../contrib/mysql_online_store/mysql.py | 3 -- .../feast/infra/online_stores/datastore.py | 2 - .../feast/infra/online_stores/sqlite.py | 7 ++- .../feast/infra/registry/base_registry.py | 19 ++++---- .../infra/utils/snowflake/snowflake_utils.py | 2 - sdk/python/feast/on_demand_feature_view.py | 8 ++-- sdk/python/feast/repo_operations.py | 1 - .../athena/feature_repo/test_workflow.py | 2 - .../feast/templates/snowflake/bootstrap.py | 1 - sdk/python/feast/utils.py | 6 +-- sdk/python/pyproject.toml | 15 +++++++ .../requirements/py3.10-ci-requirements.txt | 25 +++-------- .../requirements/py3.9-ci-requirements.txt | 26 +++-------- sdk/python/setup.cfg | 22 ---------- .../feature_repos/repo_configuration.py | 6 +-- .../universal/data_sources/bigquery.py | 2 - .../universal/data_sources/file.py | 2 - .../universal/data_sources/redshift.py | 2 - .../universal/data_sources/snowflake.py | 2 - .../universal/online_store/hazelcast.py | 1 - .../contrib/spark/test_spark.py | 1 - .../integration/registration/test_registry.py | 1 - .../registration/test_universal_types.py | 2 +- sdk/python/tests/unit/cli/test_cli.py | 2 - .../offline_stores/test_offline_store.py | 1 - .../infra/scaffolding/test_repo_config.py | 1 - .../unit/infra/test_inference_unit_tests.py | 2 +- .../online_store/test_online_retrieval.py | 2 +- sdk/python/tests/unit/test_type_map.py | 1 - sdk/python/tests/utils/e2e_test_validation.py | 1 - sdk/python/tests/utils/feature_records.py | 3 +- setup.cfg | 23 ---------- setup.py | 4 +- 65 files changed, 162 insertions(+), 290 deletions(-) create mode 100644 sdk/python/pyproject.toml delete mode 100644 sdk/python/setup.cfg diff --git a/Makefile b/Makefile index d232d9c93f..26e79cf6a9 100644 --- a/Makefile +++ b/Makefile @@ -294,18 +294,13 @@ test-python-universal: python -m pytest -n 8 --integration sdk/python/tests format-python: - # Sort - cd ${ROOT_DIR}/sdk/python; python -m isort feast/ tests/ - - # Format - cd ${ROOT_DIR}/sdk/python; python -m black --target-version py38 feast tests + cd ${ROOT_DIR}/sdk/python; python -m ruff check --fix feast/ tests/ + cd ${ROOT_DIR}/sdk/python; python -m ruff format feast/ tests/ lint-python: cd ${ROOT_DIR}/sdk/python; python -m mypy feast - cd ${ROOT_DIR}/sdk/python; python -m isort feast/ tests/ --check-only - cd ${ROOT_DIR}/sdk/python; python -m flake8 feast/ tests/ - cd ${ROOT_DIR}/sdk/python; python -m black --check feast tests - + cd ${ROOT_DIR}/sdk/python; python -m ruff check feast/ tests/ + cd ${ROOT_DIR}/sdk/python; python -m ruff format --check feast/ tests # Java install-java-ci-dependencies: diff --git a/docs/project/development-guide.md b/docs/project/development-guide.md index 43dae1d678..28baa789bb 100644 --- a/docs/project/development-guide.md +++ b/docs/project/development-guide.md @@ -168,8 +168,8 @@ docker build -t docker-whale -f ./sdk/python/feast/infra/feature_servers/multicl Feast Python SDK / CLI codebase: - Conforms to [Black code style](https://black.readthedocs.io/en/stable/the_black_code_style/current_style.html) - Has type annotations as enforced by `mypy` -- Has imports sorted by `isort` -- Is lintable by `flake8` +- Has imports sorted by `ruff` (see [isort (I) rules](https://docs.astral.sh/ruff/rules/#isort-i)) +- Is lintable by `ruff` To ensure your Python code conforms to Feast Python code standards: - Autoformat your code to conform to the code style: diff --git a/pyproject.toml b/pyproject.toml index bfe2bc9fd0..00170ab443 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,27 +5,25 @@ build-backend = "setuptools.build_meta" [tool.setuptools_scm] # Including this section is comparable to supplying use_scm_version=True in setup.py. -[tool.black] +[tool.ruff] line-length = 88 -target-version = ['py39'] -include = '\.pyi?$' -exclude = ''' -( - /( - \.eggs # exclude a few common directories in the - | \.git # root of the project - | \.hg - | \.mypy_cache - | \.tox - | \.venv - | _build - | buck-out - | build - | dist - | pb2.py - | \.pyi - | protos - | sdk/python/feast/embedded_go/lib - )/ -) -''' +target-version = "py39" +include = ["*.py", "*.pyi"] + +[tool.ruff.format] +# exclude a few common directories in the root of the project +exclude = [ + ".eggs", + ".git", + ".hg", + ".mypy_cache", + ".tox", + ".venv", + "_build", + "buck-out", + "build", + "dist", + "pb2.py", + ".pyi", + "protos", + "sdk/python/feast/embedded_go/lib"] diff --git a/sdk/python/feast/__init__.py b/sdk/python/feast/__init__.py index 3eff91d65f..f51eb2983c 100644 --- a/sdk/python/feast/__init__.py +++ b/sdk/python/feast/__init__.py @@ -2,7 +2,8 @@ from importlib.metadata import PackageNotFoundError from importlib.metadata import version as _version except ModuleNotFoundError: - from importlib_metadata import PackageNotFoundError, version as _version # type: ignore + from importlib_metadata import PackageNotFoundError # type: ignore + from importlib_metadata import version as _version from feast.infra.offline_stores.bigquery_source import BigQuerySource from feast.infra.offline_stores.contrib.athena_offline_store.athena_source import ( diff --git a/sdk/python/feast/data_source.py b/sdk/python/feast/data_source.py index 3421fd5d30..17fbfd5fcf 100644 --- a/sdk/python/feast/data_source.py +++ b/sdk/python/feast/data_source.py @@ -576,7 +576,6 @@ def from_proto(data_source: DataSourceProto): ) def to_proto(self) -> DataSourceProto: - schema_pb = [] if isinstance(self.schema, Dict): diff --git a/sdk/python/feast/diff/registry_diff.py b/sdk/python/feast/diff/registry_diff.py index 106d34bf48..b608757496 100644 --- a/sdk/python/feast/diff/registry_diff.py +++ b/sdk/python/feast/diff/registry_diff.py @@ -216,12 +216,12 @@ def extract_objects_for_keep_delete_update_add( objs_to_update = {} objs_to_add = {} - registry_object_type_to_objects: Dict[ - FeastObjectType, List[Any] - ] = FeastObjectType.get_objects_from_registry(registry, current_project) - registry_object_type_to_repo_contents: Dict[ - FeastObjectType, List[Any] - ] = FeastObjectType.get_objects_from_repo_contents(desired_repo_contents) + registry_object_type_to_objects: Dict[FeastObjectType, List[Any]] = ( + FeastObjectType.get_objects_from_registry(registry, current_project) + ) + registry_object_type_to_repo_contents: Dict[FeastObjectType, List[Any]] = ( + FeastObjectType.get_objects_from_repo_contents(desired_repo_contents) + ) for object_type in FEAST_OBJECT_TYPES: ( diff --git a/sdk/python/feast/dqm/profilers/profiler.py b/sdk/python/feast/dqm/profilers/profiler.py index 34496b0cca..03481bdc99 100644 --- a/sdk/python/feast/dqm/profilers/profiler.py +++ b/sdk/python/feast/dqm/profilers/profiler.py @@ -15,13 +15,11 @@ def validate(self, dataset: pd.DataFrame) -> "ValidationReport": ... @abc.abstractmethod - def to_proto(self): - ... + def to_proto(self): ... @classmethod @abc.abstractmethod - def from_proto(cls, proto) -> "Profile": - ... + def from_proto(cls, proto) -> "Profile": ... class Profiler: @@ -34,13 +32,11 @@ def analyze_dataset(self, dataset: pd.DataFrame) -> Profile: ... @abc.abstractmethod - def to_proto(self): - ... + def to_proto(self): ... @classmethod @abc.abstractmethod - def from_proto(cls, proto) -> "Profiler": - ... + def from_proto(cls, proto) -> "Profiler": ... class ValidationReport: diff --git a/sdk/python/feast/embedded_go/online_features_service.py b/sdk/python/feast/embedded_go/online_features_service.py index c6430b5f6d..56427f61e6 100644 --- a/sdk/python/feast/embedded_go/online_features_service.py +++ b/sdk/python/feast/embedded_go/online_features_service.py @@ -65,7 +65,6 @@ def get_online_features( request_data: Dict[str, Union[List[Any], Value_pb2.RepeatedValue]], full_feature_names: bool = False, ): - if feature_service: join_keys_types = self._service.GetEntityTypesMapByFeatureService( feature_service.name diff --git a/sdk/python/feast/feature_logging.py b/sdk/python/feast/feature_logging.py index bd45c09b0a..2843f87121 100644 --- a/sdk/python/feast/feature_logging.py +++ b/sdk/python/feast/feature_logging.py @@ -86,15 +86,15 @@ def get_schema(self, registry: "BaseRegistry") -> pa.Schema: fields[join_key] = FEAST_TYPE_TO_ARROW_TYPE[entity_column.dtype] for feature in projection.features: - fields[ - f"{projection.name_to_use()}__{feature.name}" - ] = FEAST_TYPE_TO_ARROW_TYPE[feature.dtype] - fields[ - f"{projection.name_to_use()}__{feature.name}__timestamp" - ] = PA_TIMESTAMP_TYPE - fields[ - f"{projection.name_to_use()}__{feature.name}__status" - ] = pa.int32() + fields[f"{projection.name_to_use()}__{feature.name}"] = ( + FEAST_TYPE_TO_ARROW_TYPE[feature.dtype] + ) + fields[f"{projection.name_to_use()}__{feature.name}__timestamp"] = ( + PA_TIMESTAMP_TYPE + ) + fields[f"{projection.name_to_use()}__{feature.name}__status"] = ( + pa.int32() + ) # system columns fields[LOG_TIMESTAMP_FIELD] = pa.timestamp("us", tz=UTC) diff --git a/sdk/python/feast/feature_store.py b/sdk/python/feast/feature_store.py index 83aaafd686..62ce9d6e38 100644 --- a/sdk/python/feast/feature_store.py +++ b/sdk/python/feast/feature_store.py @@ -818,7 +818,8 @@ def apply( views_to_update = [ ob for ob in objects - if ( + if + ( # BFVs are not handled separately from FVs right now. (isinstance(ob, FeatureView) or isinstance(ob, BatchFeatureView)) and not isinstance(ob, StreamFeatureView) @@ -950,7 +951,9 @@ def apply( validation_references.name, project=self.project, commit=False ) - tables_to_delete: List[FeatureView] = views_to_delete + sfvs_to_delete if not partial else [] # type: ignore + tables_to_delete: List[FeatureView] = ( + views_to_delete + sfvs_to_delete if not partial else [] # type: ignore + ) tables_to_keep: List[FeatureView] = views_to_update + sfvs_to_update # type: ignore self._get_provider().update_infra( @@ -1575,7 +1578,10 @@ def _get_online_features( num_rows = _validate_entity_values(entity_proto_values) _validate_feature_refs(_feature_refs, full_feature_names) - (grouped_refs, grouped_odfv_refs,) = _group_feature_refs( + ( + grouped_refs, + grouped_odfv_refs, + ) = _group_feature_refs( _feature_refs, requested_feature_views, requested_on_demand_feature_views, @@ -1728,9 +1734,9 @@ def _get_entity_maps( ) entity_name_to_join_key_map[entity_name] = join_key for entity_column in feature_view.entity_columns: - entity_type_map[ - entity_column.name - ] = entity_column.dtype.to_value_type() + entity_type_map[entity_column.name] = ( + entity_column.dtype.to_value_type() + ) return ( entity_name_to_join_key_map, @@ -2005,11 +2011,11 @@ def _augment_response_with_on_demand_transforms( if odfv.mode == "python": if initial_response_dict is None: initial_response_dict = initial_response.to_dict() - transformed_features_dict: Dict[ - str, List[Any] - ] = odfv.get_transformed_features( - initial_response_dict, - full_feature_names, + transformed_features_dict: Dict[str, List[Any]] = ( + odfv.get_transformed_features( + initial_response_dict, + full_feature_names, + ) ) elif odfv.mode in {"pandas", "substrait"}: if initial_response_df is None: diff --git a/sdk/python/feast/infra/materialization/aws_lambda/app.py b/sdk/python/feast/infra/materialization/aws_lambda/app.py index 375674adaa..2bf65542e5 100644 --- a/sdk/python/feast/infra/materialization/aws_lambda/app.py +++ b/sdk/python/feast/infra/materialization/aws_lambda/app.py @@ -23,7 +23,6 @@ def handler(event, context): print("Received event: " + json.dumps(event, indent=2), flush=True) try: - config_base64 = event[FEATURE_STORE_YAML_ENV_NAME] config_bytes = base64.b64decode(config_base64) diff --git a/sdk/python/feast/infra/materialization/batch_materialization_engine.py b/sdk/python/feast/infra/materialization/batch_materialization_engine.py index 41ab9f22d4..8e854a508d 100644 --- a/sdk/python/feast/infra/materialization/batch_materialization_engine.py +++ b/sdk/python/feast/infra/materialization/batch_materialization_engine.py @@ -49,24 +49,19 @@ class MaterializationJob(ABC): task: MaterializationTask @abstractmethod - def status(self) -> MaterializationJobStatus: - ... + def status(self) -> MaterializationJobStatus: ... @abstractmethod - def error(self) -> Optional[BaseException]: - ... + def error(self) -> Optional[BaseException]: ... @abstractmethod - def should_be_retried(self) -> bool: - ... + def should_be_retried(self) -> bool: ... @abstractmethod - def job_id(self) -> str: - ... + def job_id(self) -> str: ... @abstractmethod - def url(self) -> Optional[str]: - ... + def url(self) -> Optional[str]: ... class BatchMaterializationEngine(ABC): diff --git a/sdk/python/feast/infra/materialization/contrib/bytewax/bytewax_materialization_engine.py b/sdk/python/feast/infra/materialization/contrib/bytewax/bytewax_materialization_engine.py index d82e0920e2..3ad6fe4b55 100644 --- a/sdk/python/feast/infra/materialization/contrib/bytewax/bytewax_materialization_engine.py +++ b/sdk/python/feast/infra/materialization/contrib/bytewax/bytewax_materialization_engine.py @@ -5,9 +5,8 @@ from typing import Callable, List, Literal, Sequence, Union import yaml -from kubernetes import client +from kubernetes import client, utils from kubernetes import config as k8s_config -from kubernetes import utils from kubernetes.client.exceptions import ApiException from kubernetes.utils import FailToCreateError from pydantic import StrictStr diff --git a/sdk/python/feast/infra/materialization/contrib/spark/spark_materialization_engine.py b/sdk/python/feast/infra/materialization/contrib/spark/spark_materialization_engine.py index 798d3a8e6f..24608baebf 100644 --- a/sdk/python/feast/infra/materialization/contrib/spark/spark_materialization_engine.py +++ b/sdk/python/feast/infra/materialization/contrib/spark/spark_materialization_engine.py @@ -3,7 +3,6 @@ from typing import Callable, List, Literal, Optional, Sequence, Union, cast import dill -import pandas import pandas as pd import pyarrow from tqdm import tqdm @@ -201,7 +200,6 @@ class _SparkSerializedArtifacts: @classmethod def serialize(cls, feature_view, repo_config): - # serialize to proto feature_view_proto = feature_view.to_proto().SerializeToString() diff --git a/sdk/python/feast/infra/materialization/snowflake_engine.py b/sdk/python/feast/infra/materialization/snowflake_engine.py index 28bec198a5..4a81982dcd 100644 --- a/sdk/python/feast/infra/materialization/snowflake_engine.py +++ b/sdk/python/feast/infra/materialization/snowflake_engine.py @@ -169,7 +169,6 @@ def teardown_infra( fvs: Sequence[Union[BatchFeatureView, StreamFeatureView, FeatureView]], entities: Sequence[Entity], ): - stage_path = f'"{self.repo_config.batch_engine.database}"."{self.repo_config.batch_engine.schema_}"."feast_{project}"' with GetSnowflakeConnection(self.repo_config.batch_engine) as conn: query = f"DROP STAGE IF EXISTS {stage_path}" @@ -230,8 +229,9 @@ def _materialize_one( project: str, tqdm_builder: Callable[[int], tqdm], ): - assert isinstance(feature_view, BatchFeatureView) or isinstance( - feature_view, FeatureView + assert ( + isinstance(feature_view, BatchFeatureView) + or isinstance(feature_view, FeatureView) ), "Snowflake can only materialize FeatureView & BatchFeatureView feature view types." entities = [] @@ -350,7 +350,6 @@ def generate_snowflake_materialization_query( feature_batch: list, project: str, ) -> str: - if feature_view.batch_source.created_timestamp_column: fv_created_str = f',"{feature_view.batch_source.created_timestamp_column}"' else: @@ -477,7 +476,6 @@ def materialize_to_external_online_store( feature_view: Union[StreamFeatureView, FeatureView], pbar: tqdm, ) -> None: - feature_names = [feature.name for feature in feature_view.features] with GetSnowflakeConnection(repo_config.batch_engine) as conn: diff --git a/sdk/python/feast/infra/offline_stores/bigquery.py b/sdk/python/feast/infra/offline_stores/bigquery.py index 68420c0664..897647bfc2 100644 --- a/sdk/python/feast/infra/offline_stores/bigquery.py +++ b/sdk/python/feast/infra/offline_stores/bigquery.py @@ -95,9 +95,9 @@ class BigQueryOfflineStoreConfig(FeastConfigBaseModel): gcs_staging_location: Optional[str] = None """ (optional) GCS location used for offloading BigQuery results as parquet files.""" - table_create_disposition: Literal[ - "CREATE_NEVER", "CREATE_IF_NEEDED" - ] = "CREATE_IF_NEEDED" + table_create_disposition: Literal["CREATE_NEVER", "CREATE_IF_NEEDED"] = ( + "CREATE_IF_NEEDED" + ) """ (optional) Specifies whether the job is allowed to create new tables. The default value is CREATE_IF_NEEDED. Custom constraint for table_create_disposition. To understand more, see: https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.create_disposition diff --git a/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena.py b/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena.py index ae510171db..43960d87d5 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena.py +++ b/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena.py @@ -205,7 +205,6 @@ def get_historical_features( @contextlib.contextmanager def query_generator() -> Iterator[str]: - table_name = offline_utils.get_temp_entity_table_name() _upload_entity_df(entity_df, athena_client, config, s3_resource, table_name) @@ -240,7 +239,6 @@ def query_generator() -> Iterator[str]: try: yield query finally: - # Always clean up the temp Athena table aws_utils.execute_athena_query( athena_client, @@ -423,7 +421,6 @@ def persist( @log_exceptions_and_usage def to_athena(self, table_name: str) -> None: - if self.on_demand_feature_views: transformed_df = self.to_df() diff --git a/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena_source.py b/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena_source.py index 0aca42cd68..509d707935 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena_source.py @@ -307,7 +307,6 @@ def __init__( @staticmethod def from_proto(storage_proto: SavedDatasetStorageProto) -> SavedDatasetStorage: - return SavedDatasetAthenaStorage( table_ref=AthenaOptions.from_proto(storage_proto.athena_storage).table ) diff --git a/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/tests/data_source.py b/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/tests/data_source.py index 6b2238830b..f01144afcc 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/tests/data_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/tests/data_source.py @@ -22,7 +22,6 @@ class AthenaDataSourceCreator(DataSourceCreator): - tables: List[str] = [] def __init__(self, project_name: str, *args, **kwargs): @@ -53,7 +52,6 @@ def create_data_source( field_mapping: Optional[Dict[str, str]] = None, timestamp_field: Optional[str] = "ts", ) -> DataSource: - table_name = destination_name s3_target = ( self.offline_store_config.s3_staging_location diff --git a/sdk/python/feast/infra/offline_stores/contrib/ibis_offline_store/ibis.py b/sdk/python/feast/infra/offline_stores/contrib/ibis_offline_store/ibis.py index 8787d70158..37fc6a4718 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/ibis_offline_store/ibis.py +++ b/sdk/python/feast/infra/offline_stores/contrib/ibis_offline_store/ibis.py @@ -299,9 +299,9 @@ def __init__( ) -> None: super().__init__() self.table = table - self._on_demand_feature_views: List[ - OnDemandFeatureView - ] = on_demand_feature_views + self._on_demand_feature_views: List[OnDemandFeatureView] = ( + on_demand_feature_views + ) self._full_feature_names = full_feature_names self._metadata = metadata diff --git a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py index 17a8e20f78..1bf10202e1 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py +++ b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py @@ -160,7 +160,7 @@ def query_generator() -> Iterator[str]: # Hack for query_context.entity_selections to support uppercase in columns for context in query_context_dict: context["entity_selections"] = [ - f'''"{entity_selection.replace(' AS ', '" AS "')}\"''' + f""""{entity_selection.replace(' AS ', '" AS "')}\"""" for entity_selection in context["entity_selections"] ] @@ -338,9 +338,11 @@ def _get_entity_df_event_timestamp_range( # If the entity_df is a string (SQL query), determine range # from table with _get_conn(config.offline_store) as conn, conn.cursor() as cur: - cur.execute( - f"SELECT MIN({entity_df_event_timestamp_col}) AS min, MAX({entity_df_event_timestamp_col}) AS max FROM ({entity_df}) as tmp_alias" - ), + ( + cur.execute( + f"SELECT MIN({entity_df_event_timestamp_col}) AS min, MAX({entity_df_event_timestamp_col}) AS max FROM ({entity_df}) as tmp_alias" + ), + ) res = cur.fetchone() entity_df_event_timestamp_range = (res[0], res[1]) else: diff --git a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres_source.py b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres_source.py index bc535ed194..bbb3f768fd 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres_source.py @@ -117,7 +117,6 @@ def get_table_column_names_and_types( ) def get_table_query_string(self) -> str: - if self._postgres_options._table: return f"{self._postgres_options._table}" else: diff --git a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py index b1b1c04c7d..43902f33cf 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py +++ b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py @@ -389,7 +389,6 @@ def supports_remote_storage_export(self) -> bool: def to_remote_storage(self) -> List[str]: """Currently only works for local and s3-based staging locations""" if self.supports_remote_storage_export(): - sdf: pyspark.sql.DataFrame = self.to_spark_df() if self._config.offline_store.staging_location.startswith("/"): @@ -405,7 +404,6 @@ def to_remote_storage(self) -> List[str]: return _list_files_in_folder(output_uri) elif self._config.offline_store.staging_location.startswith("s3://"): - spark_compatible_s3_staging_location = ( self._config.offline_store.staging_location.replace( "s3://", "s3a://" diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/connectors/upload.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/connectors/upload.py index 5967b7a863..9e2ea3708d 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/connectors/upload.py +++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/connectors/upload.py @@ -17,6 +17,7 @@ file_format: parquet ``` """ + from datetime import datetime from typing import Any, Dict, Iterator, Optional, Set diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/tests/data_source.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/tests/data_source.py index bd3f9def8f..0dee517eb3 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/tests/data_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/tests/data_source.py @@ -46,7 +46,6 @@ def trino_container(): class TrinoSourceCreator(DataSourceCreator): - tables: List[str] = [] def __init__( diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_source.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_source.py index e618e8664e..73d40d902e 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_source.py @@ -182,7 +182,6 @@ def trino_options(self, trino_options): @staticmethod def from_proto(data_source: DataSourceProto): - assert data_source.HasField("trino_options") return TrinoSource( diff --git a/sdk/python/feast/infra/offline_stores/file.py b/sdk/python/feast/infra/offline_stores/file.py index d922e98c14..1ae9a1558d 100644 --- a/sdk/python/feast/infra/offline_stores/file.py +++ b/sdk/python/feast/infra/offline_stores/file.py @@ -176,7 +176,6 @@ def get_historical_features( # Create lazy function that is only called from the RetrievalJob object def evaluate_historical_retrieval(): - # Create a copy of entity_df to prevent modifying the original entity_df_with_features = entity_df.copy() @@ -188,25 +187,31 @@ def evaluate_historical_retrieval(): or entity_df_event_timestamp_col_type.tz != pytz.UTC ): # Make sure all event timestamp fields are tz-aware. We default tz-naive fields to UTC - entity_df_with_features[ - entity_df_event_timestamp_col - ] = entity_df_with_features[entity_df_event_timestamp_col].apply( - lambda x: x if x.tzinfo is not None else x.replace(tzinfo=pytz.utc) + entity_df_with_features[entity_df_event_timestamp_col] = ( + entity_df_with_features[ + entity_df_event_timestamp_col + ].apply( + lambda x: x + if x.tzinfo is not None + else x.replace(tzinfo=pytz.utc) + ) ) # Convert event timestamp column to datetime and normalize time zone to UTC # This is necessary to avoid issues with pd.merge_asof if isinstance(entity_df_with_features, dd.DataFrame): - entity_df_with_features[ - entity_df_event_timestamp_col - ] = dd.to_datetime( - entity_df_with_features[entity_df_event_timestamp_col], utc=True + entity_df_with_features[entity_df_event_timestamp_col] = ( + dd.to_datetime( + entity_df_with_features[entity_df_event_timestamp_col], + utc=True, + ) ) else: - entity_df_with_features[ - entity_df_event_timestamp_col - ] = pd.to_datetime( - entity_df_with_features[entity_df_event_timestamp_col], utc=True + entity_df_with_features[entity_df_event_timestamp_col] = ( + pd.to_datetime( + entity_df_with_features[entity_df_event_timestamp_col], + utc=True, + ) ) # Sort event timestamp values diff --git a/sdk/python/feast/infra/offline_stores/redshift_source.py b/sdk/python/feast/infra/offline_stores/redshift_source.py index 52ab50ba00..f8cd53b246 100644 --- a/sdk/python/feast/infra/offline_stores/redshift_source.py +++ b/sdk/python/feast/infra/offline_stores/redshift_source.py @@ -220,9 +220,9 @@ def get_table_column_names_and_types( if config.offline_store.cluster_id: # Provisioned cluster - paginator_kwargs[ - "ClusterIdentifier" - ] = config.offline_store.cluster_id + paginator_kwargs["ClusterIdentifier"] = ( + config.offline_store.cluster_id + ) paginator_kwargs["DbUser"] = config.offline_store.user elif config.offline_store.workgroup: # Redshift serverless diff --git a/sdk/python/feast/infra/offline_stores/snowflake.py b/sdk/python/feast/infra/offline_stores/snowflake.py index cfaca038e7..907e4d4483 100644 --- a/sdk/python/feast/infra/offline_stores/snowflake.py +++ b/sdk/python/feast/infra/offline_stores/snowflake.py @@ -286,7 +286,6 @@ def get_historical_features( @contextlib.contextmanager def query_generator() -> Iterator[str]: - table_name = offline_utils.get_temp_entity_table_name() _upload_entity_df(entity_df, snowflake_conn, config, table_name) @@ -412,7 +411,6 @@ def __init__( feature_views: Optional[List[FeatureView]] = None, metadata: Optional[RetrievalMetadata] = None, ): - if feature_views is None: feature_views = [] if not isinstance(query, str): @@ -507,7 +505,6 @@ def to_snowflake( return None def to_arrow_batches(self) -> Iterator[pyarrow.Table]: - table_name = "temp_arrow_batches_" + uuid.uuid4().hex self.to_snowflake(table_name=table_name, allow_overwrite=True, temporary=True) @@ -520,7 +517,6 @@ def to_arrow_batches(self) -> Iterator[pyarrow.Table]: return arrow_batches def to_pandas_batches(self) -> Iterator[pd.DataFrame]: - table_name = "temp_pandas_batches_" + uuid.uuid4().hex self.to_snowflake(table_name=table_name, allow_overwrite=True, temporary=True) @@ -624,13 +620,10 @@ def _get_entity_schema( snowflake_conn: SnowflakeConnection, config: RepoConfig, ) -> Dict[str, np.dtype]: - if isinstance(entity_df, pd.DataFrame): - return dict(zip(entity_df.columns, entity_df.dtypes)) else: - query = f"SELECT * FROM ({entity_df}) LIMIT 1" limited_entity_df = execute_snowflake_statement( snowflake_conn, query @@ -645,7 +638,6 @@ def _upload_entity_df( config: RepoConfig, table_name: str, ) -> None: - if isinstance(entity_df, pd.DataFrame): # Write the data from the DataFrame to the table # Known issues with following entity data types: BINARY @@ -669,7 +661,6 @@ def _upload_entity_df( def _fix_entity_selections_identifiers(query_context) -> list: - for i, qc in enumerate(query_context): for j, es in enumerate(qc.entity_selections): query_context[i].entity_selections[j] = f'"{es}"'.replace(" AS ", '" AS "') diff --git a/sdk/python/feast/infra/offline_stores/snowflake_source.py b/sdk/python/feast/infra/offline_stores/snowflake_source.py index 9a2c6e09bc..c0b2417099 100644 --- a/sdk/python/feast/infra/offline_stores/snowflake_source.py +++ b/sdk/python/feast/infra/offline_stores/snowflake_source.py @@ -395,7 +395,6 @@ def __init__(self, table_ref: str): @staticmethod def from_proto(storage_proto: SavedDatasetStorageProto) -> SavedDatasetStorage: - return SavedDatasetSnowflakeStorage( table_ref=SnowflakeOptions.from_proto(storage_proto.snowflake_storage).table ) diff --git a/sdk/python/feast/infra/online_stores/contrib/hazelcast_online_store/hazelcast_online_store.py b/sdk/python/feast/infra/online_stores/contrib/hazelcast_online_store/hazelcast_online_store.py index 7ec803a69c..2537ecbf45 100644 --- a/sdk/python/feast/infra/online_stores/contrib/hazelcast_online_store/hazelcast_online_store.py +++ b/sdk/python/feast/infra/online_stores/contrib/hazelcast_online_store/hazelcast_online_store.py @@ -17,6 +17,7 @@ """ Hazelcast online store for Feast. """ + import base64 import threading from datetime import datetime, timezone @@ -200,7 +201,6 @@ def online_read( entity_keys: List[EntityKeyProto], requested_features: Optional[List[str]] = None, ) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]: - online_store_config = config.online_store if not isinstance(online_store_config, HazelcastOnlineStoreConfig): raise HazelcastInvalidConfig( diff --git a/sdk/python/feast/infra/online_stores/contrib/hbase_online_store/hbase.py b/sdk/python/feast/infra/online_stores/contrib/hbase_online_store/hbase.py index 4b2d8ae39c..d46b848c12 100644 --- a/sdk/python/feast/infra/online_stores/contrib/hbase_online_store/hbase.py +++ b/sdk/python/feast/infra/online_stores/contrib/hbase_online_store/hbase.py @@ -107,9 +107,9 @@ def online_write_batch( ) values_dict = {} for feature_name, val in values.items(): - values_dict[ - HbaseConstants.get_col_from_feature(feature_name) - ] = val.SerializeToString() + values_dict[HbaseConstants.get_col_from_feature(feature_name)] = ( + val.SerializeToString() + ) if isinstance(timestamp, datetime): values_dict[HbaseConstants.DEFAULT_EVENT_TS] = struct.pack( ">L", int(calendar.timegm(timestamp.timetuple())) diff --git a/sdk/python/feast/infra/online_stores/contrib/mysql_online_store/mysql.py b/sdk/python/feast/infra/online_stores/contrib/mysql_online_store/mysql.py index cf07d5fef1..26916a9fcb 100644 --- a/sdk/python/feast/infra/online_stores/contrib/mysql_online_store/mysql.py +++ b/sdk/python/feast/infra/online_stores/contrib/mysql_online_store/mysql.py @@ -41,7 +41,6 @@ class MySQLOnlineStore(OnlineStore): _conn: Optional[Connection] = None def _get_conn(self, config: RepoConfig) -> Connection: - online_store_config = config.online_store assert isinstance(online_store_config, MySQLOnlineStoreConfig) @@ -65,7 +64,6 @@ def online_write_batch( ], progress: Optional[Callable[[int], Any]], ) -> None: - conn = self._get_conn(config) cur = conn.cursor() @@ -178,7 +176,6 @@ def update( # We don't create any special state for the entities in this implementation. for table in tables_to_keep: - table_name = _table_id(project, table) index_name = f"{table_name}_ek" cur.execute( diff --git a/sdk/python/feast/infra/online_stores/datastore.py b/sdk/python/feast/infra/online_stores/datastore.py index ae96e16c64..149354b472 100644 --- a/sdk/python/feast/infra/online_stores/datastore.py +++ b/sdk/python/feast/infra/online_stores/datastore.py @@ -169,7 +169,6 @@ def online_write_batch( ], progress: Optional[Callable[[int], Any]], ) -> None: - online_config = config.online_store assert isinstance(online_config, DatastoreOnlineStoreConfig) client = self._get_client(online_config) @@ -259,7 +258,6 @@ def online_read( entity_keys: List[EntityKeyProto], requested_features: Optional[List[str]] = None, ) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]: - online_config = config.online_store assert isinstance(online_config, DatastoreOnlineStoreConfig) client = self._get_client(online_config) diff --git a/sdk/python/feast/infra/online_stores/sqlite.py b/sdk/python/feast/infra/online_stores/sqlite.py index 4a6aa28889..745a9ed5a5 100644 --- a/sdk/python/feast/infra/online_stores/sqlite.py +++ b/sdk/python/feast/infra/online_stores/sqlite.py @@ -38,9 +38,9 @@ class SqliteOnlineStoreConfig(FeastConfigBaseModel): """Online store config for local (SQLite-based) store""" - type: Literal[ - "sqlite", "feast.infra.online_stores.sqlite.SqliteOnlineStore" - ] = "sqlite" + type: Literal["sqlite", "feast.infra.online_stores.sqlite.SqliteOnlineStore"] = ( + "sqlite" + ) """ Online store type selector""" path: StrictStr = "data/online.db" @@ -86,7 +86,6 @@ def online_write_batch( ], progress: Optional[Callable[[int], Any]], ) -> None: - conn = self._get_conn(config) project = config.project diff --git a/sdk/python/feast/infra/registry/base_registry.py b/sdk/python/feast/infra/registry/base_registry.py index c67164103e..ed1fc3ab87 100644 --- a/sdk/python/feast/infra/registry/base_registry.py +++ b/sdk/python/feast/infra/registry/base_registry.py @@ -489,7 +489,6 @@ def get_validation_reference( def list_validation_references( self, project: str, allow_cache: bool = False ) -> List[ValidationReference]: - """ Retrieve a list of validation references from the registry @@ -550,14 +549,12 @@ def apply_user_metadata( project: str, feature_view: BaseFeatureView, metadata_bytes: Optional[bytes], - ): - ... + ): ... @abstractmethod def get_user_metadata( self, project: str, feature_view: BaseFeatureView - ) -> Optional[bytes]: - ... + ) -> Optional[bytes]: ... @abstractmethod def proto(self) -> RegistryProto: @@ -642,9 +639,9 @@ def to_dict(self, project: str) -> Dict[str, List[Any]]: ): if "userDefinedFunction" not in odfv_dict["spec"]: odfv_dict["spec"]["userDefinedFunction"] = {} - odfv_dict["spec"]["userDefinedFunction"][ - "body" - ] = on_demand_feature_view.feature_transformation.udf_string + odfv_dict["spec"]["userDefinedFunction"]["body"] = ( + on_demand_feature_view.feature_transformation.udf_string + ) odfv_dict["spec"]["featureTransformation"]["userDefinedFunction"][ "body" ] = on_demand_feature_view.feature_transformation.udf_string @@ -669,9 +666,9 @@ def to_dict(self, project: str) -> Dict[str, List[Any]]: ): sfv_dict = self._message_to_sorted_dict(stream_feature_view.to_proto()) - sfv_dict["spec"]["userDefinedFunction"][ - "body" - ] = stream_feature_view.udf_string + sfv_dict["spec"]["userDefinedFunction"]["body"] = ( + stream_feature_view.udf_string + ) registry_dict["streamFeatureViews"].append(sfv_dict) for saved_dataset in sorted( diff --git a/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py b/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py index 8548e4dbd8..dd965c4bed 100644 --- a/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py +++ b/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py @@ -48,7 +48,6 @@ def __init__(self, config: Any, autocommit=True): self.autocommit = autocommit def __enter__(self): - assert self.config.type in [ "snowflake.registry", "snowflake.offline", @@ -512,7 +511,6 @@ def chunk_helper(lst: pd.DataFrame, n: int) -> Iterator[Tuple[int, pd.DataFrame] def parse_private_key_path(key_path: str, private_key_passphrase: str) -> bytes: - with open(key_path, "rb") as key: p_key = serialization.load_pem_private_key( key.read(), diff --git a/sdk/python/feast/on_demand_feature_view.py b/sdk/python/feast/on_demand_feature_view.py index f83500cbc9..8a61b11065 100644 --- a/sdk/python/feast/on_demand_feature_view.py +++ b/sdk/python/feast/on_demand_feature_view.py @@ -160,9 +160,9 @@ def __init__( # noqa: C901 elif isinstance(odfv_source, FeatureViewProjection): self.source_feature_view_projections[odfv_source.name] = odfv_source else: - self.source_feature_view_projections[ - odfv_source.name - ] = odfv_source.projection + self.source_feature_view_projections[odfv_source.name] = ( + odfv_source.projection + ) self.feature_transformation = feature_transformation @@ -430,7 +430,6 @@ def get_transformed_features_dict( self, feature_dict: Dict[str, Any], # type: ignore ) -> Dict[str, Any]: - # we need a mapping from full feature name to short and back to do a renaming # The simplest thing to do is to make the full reference, copy the columns with the short reference # and rerun @@ -669,7 +668,6 @@ def mainify(obj) -> None: obj.__module__ = "__main__" def decorator(user_function): - return_annotation = inspect.signature(user_function).return_annotation if ( return_annotation diff --git a/sdk/python/feast/repo_operations.py b/sdk/python/feast/repo_operations.py index 000e000438..0b659b960c 100644 --- a/sdk/python/feast/repo_operations.py +++ b/sdk/python/feast/repo_operations.py @@ -201,7 +201,6 @@ def parse_repo(repo_root: Path) -> RepoContents: @log_exceptions_and_usage def plan(repo_config: RepoConfig, repo_path: Path, skip_source_validation: bool): - os.chdir(repo_path) project, registry, repo, store = _prepare_registry_and_repo(repo_config, repo_path) diff --git a/sdk/python/feast/templates/athena/feature_repo/test_workflow.py b/sdk/python/feast/templates/athena/feature_repo/test_workflow.py index bf69a4bff0..8d6479da80 100644 --- a/sdk/python/feast/templates/athena/feature_repo/test_workflow.py +++ b/sdk/python/feast/templates/athena/feature_repo/test_workflow.py @@ -11,9 +11,7 @@ def test_end_to_end(): - try: - # Before running this test method # 1. Upload the driver_stats.parquet file to your S3 bucket. # (https://github.com/feast-dev/feast-custom-offline-store-demo/tree/main/feature_repo/data) diff --git a/sdk/python/feast/templates/snowflake/bootstrap.py b/sdk/python/feast/templates/snowflake/bootstrap.py index 01f4045fe7..2224dc5359 100644 --- a/sdk/python/feast/templates/snowflake/bootstrap.py +++ b/sdk/python/feast/templates/snowflake/bootstrap.py @@ -55,7 +55,6 @@ def bootstrap(): f'Should I upload example data to Snowflake (overwriting "{project_name}_feast_driver_hourly_stats" table)?', default=True, ): - snowflake_conn = snowflake.connector.connect( account=snowflake_deployment_url, user=snowflake_user, diff --git a/sdk/python/feast/utils.py b/sdk/python/feast/utils.py index 70fbda964d..89a1a9ab41 100644 --- a/sdk/python/feast/utils.py +++ b/sdk/python/feast/utils.py @@ -70,9 +70,9 @@ def _get_requested_feature_views_to_features_dict( Set full_feature_names to True to have feature names prefixed by their feature view name.""" feature_views_to_feature_map: Dict["FeatureView", List[str]] = defaultdict(list) - on_demand_feature_views_to_feature_map: Dict[ - "OnDemandFeatureView", List[str] - ] = defaultdict(list) + on_demand_feature_views_to_feature_map: Dict["OnDemandFeatureView", List[str]] = ( + defaultdict(list) + ) for ref in feature_refs: ref_parts = ref.split(":") diff --git a/sdk/python/pyproject.toml b/sdk/python/pyproject.toml new file mode 100644 index 0000000000..10ad007fa9 --- /dev/null +++ b/sdk/python/pyproject.toml @@ -0,0 +1,15 @@ +[tool.ruff] +exclude = [".git","__pycache__","docs/conf.py","dist","feast/protos","feast/embedded_go/lib","feast/infra/utils/snowflake/snowpark/snowflake_udfs.py"] + +[tool.ruff.lint] +select = ["E","F","W","I"] +ignore = ["E203", "E266", "E501", "E721"] + +[tool.ruff.lint.isort] +known-first-party = ["feast", "feast", "feast_serving_server", "feast_core_server"] +default-section = "third-party" + +[tool.mypy] +files = ["feast","tests"] +ignore_missing_imports = true +exclude = ["feast/embedded_go/lib"] diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index ac1994da37..af7a87c11b 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -18,6 +18,8 @@ anyio==4.3.0 # watchfiles appdirs==1.4.4 # via fissix +appnope==0.1.4 + # via ipykernel argon2-cffi==23.1.0 # via jupyter-server argon2-cffi-bindings==21.2.0 @@ -57,8 +59,6 @@ beautifulsoup4==4.12.3 # via nbconvert bidict==0.23.1 # via ibis-framework -black==22.12.0 - # via feast (setup.py) bleach==6.1.0 # via nbconvert boto3==1.34.69 @@ -105,7 +105,6 @@ charset-normalizer==3.3.2 # snowflake-connector-python click==8.1.7 # via - # black # bowler # dask # feast (setup.py) @@ -193,8 +192,6 @@ firebase-admin==5.4.0 # via feast (setup.py) fissix==21.11.13 # via bowler -flake8==6.0.0 - # via feast (setup.py) fqdn==1.5.1 # via jsonschema fsspec==2023.12.2 @@ -354,8 +351,6 @@ isodate==0.6.1 # via azure-storage-blob isoduration==20.11.0 # via jsonschema -isort==5.13.2 - # via feast (setup.py) jedi==0.19.1 # via ipython jinja2==3.1.3 @@ -447,8 +442,6 @@ matplotlib-inline==0.1.6 # via # ipykernel # ipython -mccabe==0.7.0 - # via flake8 mdurl==0.1.2 # via markdown-it-py minio==7.1.0 @@ -482,9 +475,7 @@ mypy==1.9.0 # feast (setup.py) # sqlalchemy mypy-extensions==1.0.0 - # via - # black - # mypy + # via mypy mypy-protobuf==3.3.0 # via feast (setup.py) nbclient==0.10.0 @@ -559,8 +550,6 @@ parsy==2.1 # via ibis-framework partd==1.4.1 # via dask -pathspec==0.12.1 - # via black pbr==6.0.0 # via mock pexpect==4.9.0 @@ -569,7 +558,6 @@ pip-tools==7.4.1 # via feast (setup.py) platformdirs==3.11.0 # via - # black # jupyter-core # snowflake-connector-python # virtualenv @@ -647,8 +635,6 @@ pyasn1-modules==0.3.0 # via google-auth pybindgen==0.22.1 # via feast (setup.py) -pycodestyle==2.10.0 - # via flake8 pycparser==2.21 # via cffi pydantic==2.6.4 @@ -658,8 +644,6 @@ pydantic==2.6.4 # great-expectations pydantic-core==2.16.3 # via pydantic -pyflakes==3.0.1 - # via flake8 pygments==2.17.2 # via # feast (setup.py) @@ -807,6 +791,8 @@ rsa==4.9 # via google-auth ruamel-yaml==0.17.17 # via great-expectations +ruff==0.3.4 + # via feast (setup.py) s3transfer==0.10.1 # via boto3 scipy==1.12.0 @@ -889,7 +875,6 @@ toml==0.10.2 # via feast (setup.py) tomli==2.0.1 # via - # black # build # coverage # jupyterlab diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index 367b5dc050..53c7fd1bce 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -18,6 +18,8 @@ anyio==4.3.0 # watchfiles appdirs==1.4.4 # via fissix +appnope==0.1.4 + # via ipykernel argon2-cffi==23.1.0 # via jupyter-server argon2-cffi-bindings==21.2.0 @@ -57,8 +59,6 @@ beautifulsoup4==4.12.3 # via nbconvert bidict==0.23.1 # via ibis-framework -black==22.12.0 - # via feast (setup.py) bleach==6.1.0 # via nbconvert boto3==1.34.69 @@ -105,7 +105,6 @@ charset-normalizer==3.3.2 # snowflake-connector-python click==8.1.7 # via - # black # bowler # dask # feast (setup.py) @@ -193,8 +192,6 @@ firebase-admin==5.4.0 # via feast (setup.py) fissix==21.11.13 # via bowler -flake8==6.0.0 - # via feast (setup.py) fqdn==1.5.1 # via jsonschema fsspec==2023.12.2 @@ -362,8 +359,6 @@ isodate==0.6.1 # via azure-storage-blob isoduration==20.11.0 # via jsonschema -isort==5.13.2 - # via feast (setup.py) jedi==0.19.1 # via ipython jinja2==3.1.3 @@ -455,8 +450,6 @@ matplotlib-inline==0.1.6 # via # ipykernel # ipython -mccabe==0.7.0 - # via flake8 mdurl==0.1.2 # via markdown-it-py minio==7.1.0 @@ -490,9 +483,7 @@ mypy==1.9.0 # feast (setup.py) # sqlalchemy mypy-extensions==1.0.0 - # via - # black - # mypy + # via mypy mypy-protobuf==3.3.0 # via feast (setup.py) nbclient==0.10.0 @@ -567,8 +558,6 @@ parsy==2.1 # via ibis-framework partd==1.4.1 # via dask -pathspec==0.12.1 - # via black pbr==6.0.0 # via mock pexpect==4.9.0 @@ -577,7 +566,6 @@ pip-tools==7.4.1 # via feast (setup.py) platformdirs==3.11.0 # via - # black # jupyter-core # snowflake-connector-python # virtualenv @@ -655,8 +643,6 @@ pyasn1-modules==0.3.0 # via google-auth pybindgen==0.22.1 # via feast (setup.py) -pycodestyle==2.10.0 - # via flake8 pycparser==2.21 # via cffi pydantic==2.6.4 @@ -666,8 +652,6 @@ pydantic==2.6.4 # great-expectations pydantic-core==2.16.3 # via pydantic -pyflakes==3.0.1 - # via flake8 pygments==2.17.2 # via # feast (setup.py) @@ -817,6 +801,8 @@ ruamel-yaml==0.17.17 # via great-expectations ruamel-yaml-clib==0.2.8 # via ruamel-yaml +ruff==0.3.3 + # via feast (setup.py) s3transfer==0.10.1 # via boto3 scipy==1.12.0 @@ -899,7 +885,6 @@ toml==0.10.2 # via feast (setup.py) tomli==2.0.1 # via - # black # build # coverage # jupyterlab @@ -979,7 +964,6 @@ typing-extensions==4.10.0 # async-lru # azure-core # azure-storage-blob - # black # fastapi # great-expectations # ibis-framework diff --git a/sdk/python/setup.cfg b/sdk/python/setup.cfg deleted file mode 100644 index d934249d69..0000000000 --- a/sdk/python/setup.cfg +++ /dev/null @@ -1,22 +0,0 @@ -[isort] -src_paths = feast,tests -multi_line_output=3 -include_trailing_comma=True -force_grid_wrap=0 -use_parentheses=True -line_length=88 -skip=feast/protos,feast/embedded_go/lib -known_first_party=feast,feast_serving_server,feast_core_server -default_section=THIRDPARTY - -[flake8] -ignore = E203, E266, E501, W503, C901 -max-line-length = 88 -max-complexity = 20 -select = B,C,E,F,W,T4 -exclude = .git,__pycache__,docs/conf.py,dist,feast/protos,feast/embedded_go/lib,feast/infra/utils/snowflake/snowpark/snowflake_udfs.py - -[mypy] -files=feast,tests -ignore_missing_imports=true -exclude=feast/embedded_go/lib diff --git a/sdk/python/tests/integration/feature_repos/repo_configuration.py b/sdk/python/tests/integration/feature_repos/repo_configuration.py index f745bafa13..d2450bf868 100644 --- a/sdk/python/tests/integration/feature_repos/repo_configuration.py +++ b/sdk/python/tests/integration/feature_repos/repo_configuration.py @@ -440,9 +440,9 @@ def construct_test_environment( aws_registry_path = os.getenv( "AWS_REGISTRY_PATH", "s3://feast-integration-tests/registries" ) - registry: Union[ - str, RegistryConfig - ] = f"{aws_registry_path}/{project}/registry.db" + registry: Union[str, RegistryConfig] = ( + f"{aws_registry_path}/{project}/registry.db" + ) else: registry = RegistryConfig( path=str(Path(repo_dir_name) / "registry.db"), diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py index 066497a0bc..4fcd9533e8 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py @@ -42,7 +42,6 @@ def create_dataset(self): self.client.update_dataset(self.dataset, ["default_table_expiration_ms"]) def teardown(self): - for table in self.tables: self.client.delete_table(table, not_found_ok=True) @@ -68,7 +67,6 @@ def create_data_source( field_mapping: Optional[Dict[str, str]] = None, timestamp_field: Optional[str] = "ts", ) -> DataSource: - destination_name = self.get_prefixed_table_name(destination_name) self.create_dataset() diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py index 008bb8d881..c70dae9863 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py @@ -43,7 +43,6 @@ def create_data_source( field_mapping: Optional[Dict[str, str]] = None, timestamp_field: Optional[str] = "ts", ) -> DataSource: - destination_name = self.get_prefixed_table_name(destination_name) f = tempfile.NamedTemporaryFile( @@ -98,7 +97,6 @@ def create_data_source( field_mapping: Optional[Dict[str, str]] = None, timestamp_field: Optional[str] = "ts", ) -> DataSource: - destination_name = self.get_prefixed_table_name(destination_name) dataset_path = tempfile.TemporaryDirectory( diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py index 5a4e3f1085..60fb8950a9 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py @@ -20,7 +20,6 @@ class RedshiftDataSourceCreator(DataSourceCreator): - tables: List[str] = [] def __init__(self, project_name: str, *args, **kwargs): @@ -54,7 +53,6 @@ def create_data_source( field_mapping: Optional[Dict[str, str]] = None, timestamp_field: Optional[str] = "ts", ) -> DataSource: - destination_name = self.get_prefixed_table_name(destination_name) aws_utils.upload_df_to_redshift( diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py index 1481b11a10..237be2ac01 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py @@ -24,7 +24,6 @@ class SnowflakeDataSourceCreator(DataSourceCreator): - tables: List[str] = [] def __init__(self, project_name: str, *args, **kwargs): @@ -53,7 +52,6 @@ def create_data_source( field_mapping: Optional[Dict[str, str]] = None, timestamp_field: Optional[str] = "ts", ) -> DataSource: - destination_name = self.get_prefixed_table_name(destination_name) with GetSnowflakeConnection(self.offline_store_config) as conn: diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store/hazelcast.py b/sdk/python/tests/integration/feature_repos/universal/online_store/hazelcast.py index 65d74135ae..d50f2b75a3 100644 --- a/sdk/python/tests/integration/feature_repos/universal/online_store/hazelcast.py +++ b/sdk/python/tests/integration/feature_repos/universal/online_store/hazelcast.py @@ -12,7 +12,6 @@ class HazelcastOnlineStoreCreator(OnlineStoreCreator): - cluster_name: str = "" container: DockerContainer = None diff --git a/sdk/python/tests/integration/materialization/contrib/spark/test_spark.py b/sdk/python/tests/integration/materialization/contrib/spark/test_spark.py index c7028a09ef..e85c1d7311 100644 --- a/sdk/python/tests/integration/materialization/contrib/spark/test_spark.py +++ b/sdk/python/tests/integration/materialization/contrib/spark/test_spark.py @@ -57,7 +57,6 @@ def test_spark_materialization_consistency(): ) try: - fs.apply([driver, driver_stats_fv]) print(df) diff --git a/sdk/python/tests/integration/registration/test_registry.py b/sdk/python/tests/integration/registration/test_registry.py index 3bc2b3fb39..232f035609 100644 --- a/sdk/python/tests/integration/registration/test_registry.py +++ b/sdk/python/tests/integration/registration/test_registry.py @@ -111,7 +111,6 @@ def minio_registry() -> Registry: ], ) def test_apply_entity_integration(test_registry): - entity = Entity( name="driver_car_id", description="Car driver id", diff --git a/sdk/python/tests/integration/registration/test_universal_types.py b/sdk/python/tests/integration/registration/test_universal_types.py index 7c24589c6f..3ce5876bd6 100644 --- a/sdk/python/tests/integration/registration/test_universal_types.py +++ b/sdk/python/tests/integration/registration/test_universal_types.py @@ -144,7 +144,7 @@ def test_feature_get_online_features_types_match( fs.materialize( environment.start_date, environment.end_date - - timedelta(hours=1) # throwing out last record to make sure + - timedelta(hours=1), # throwing out last record to make sure # we can successfully infer type even from all empty values ) diff --git a/sdk/python/tests/unit/cli/test_cli.py b/sdk/python/tests/unit/cli/test_cli.py index d15e1d1616..a286c847dd 100644 --- a/sdk/python/tests/unit/cli/test_cli.py +++ b/sdk/python/tests/unit/cli/test_cli.py @@ -105,7 +105,6 @@ def test_3rd_party_registry_store_with_fs_yaml_override_by_env_var() -> None: @contextmanager def setup_third_party_provider_repo(provider_name: str): with tempfile.TemporaryDirectory() as repo_dir_name: - # Construct an example repo in a temporary dir repo_path = Path(repo_dir_name) @@ -141,7 +140,6 @@ def setup_third_party_registry_store_repo( registry_store: str, fs_yaml_file_name: str = "feature_store.yaml" ): with tempfile.TemporaryDirectory() as repo_dir_name: - # Construct an example repo in a temporary dir repo_path = Path(repo_dir_name) diff --git a/sdk/python/tests/unit/infra/offline_stores/test_offline_store.py b/sdk/python/tests/unit/infra/offline_stores/test_offline_store.py index f93237fce5..9d8c4a7ec1 100644 --- a/sdk/python/tests/unit/infra/offline_stores/test_offline_store.py +++ b/sdk/python/tests/unit/infra/offline_stores/test_offline_store.py @@ -167,7 +167,6 @@ def retrieval_job(request, environment): full_feature_names=False, ) elif request.param is MsSqlServerRetrievalJob: - return MsSqlServerRetrievalJob( query="query", engine=MagicMock(), diff --git a/sdk/python/tests/unit/infra/scaffolding/test_repo_config.py b/sdk/python/tests/unit/infra/scaffolding/test_repo_config.py index ca4ed6472b..e1839fbd8b 100644 --- a/sdk/python/tests/unit/infra/scaffolding/test_repo_config.py +++ b/sdk/python/tests/unit/infra/scaffolding/test_repo_config.py @@ -12,7 +12,6 @@ def _test_config(config_text, expect_error: Optional[str]): Try loading a repo config and check raised error against a regex. """ with tempfile.TemporaryDirectory() as repo_dir_name: - repo_path = Path(repo_dir_name) repo_config = repo_path / "feature_store.yaml" diff --git a/sdk/python/tests/unit/infra/test_inference_unit_tests.py b/sdk/python/tests/unit/infra/test_inference_unit_tests.py index be97a838bd..e4acef9713 100644 --- a/sdk/python/tests/unit/infra/test_inference_unit_tests.py +++ b/sdk/python/tests/unit/infra/test_inference_unit_tests.py @@ -142,7 +142,7 @@ def view_with_missing_feature(features_df: pd.DataFrame) -> pd.DataFrame: mode="pandas", ) def python_native_test_invalid_pandas_view( - input_dict: Dict[str, Any] + input_dict: Dict[str, Any], ) -> Dict[str, Any]: output_dict: Dict[str, Any] = { "output": input_dict["some_date"], diff --git a/sdk/python/tests/unit/online_store/test_online_retrieval.py b/sdk/python/tests/unit/online_store/test_online_retrieval.py index 926c7226fc..f9ab42ae5e 100644 --- a/sdk/python/tests/unit/online_store/test_online_retrieval.py +++ b/sdk/python/tests/unit/online_store/test_online_retrieval.py @@ -276,7 +276,7 @@ def test_online_to_df(): ) provider = store._get_provider() - for (d, c) in zip(driver_ids, customer_ids): + for d, c in zip(driver_ids, customer_ids): """ driver table: lon lat diff --git a/sdk/python/tests/unit/test_type_map.py b/sdk/python/tests/unit/test_type_map.py index 9b21900e6d..87e5ef0548 100644 --- a/sdk/python/tests/unit/test_type_map.py +++ b/sdk/python/tests/unit/test_type_map.py @@ -43,7 +43,6 @@ def test_null_unix_timestamp_list(): ), ) def test_python_values_to_proto_values_bool(values): - protos = python_values_to_proto_values(values, ValueType.BOOL) converted = feast_value_type_to_python_type(protos[0]) diff --git a/sdk/python/tests/utils/e2e_test_validation.py b/sdk/python/tests/utils/e2e_test_validation.py index d8c769f12c..798e82de9b 100644 --- a/sdk/python/tests/utils/e2e_test_validation.py +++ b/sdk/python/tests/utils/e2e_test_validation.py @@ -180,7 +180,6 @@ def make_feature_store_yaml( repo_dir_name: Path, offline_creator: DataSourceCreator, ): - offline_store_config = offline_creator.create_offline_store_config() online_store = test_repo_config.online_store diff --git a/sdk/python/tests/utils/feature_records.py b/sdk/python/tests/utils/feature_records.py index 3f210f9e1c..2c26f3c000 100644 --- a/sdk/python/tests/utils/feature_records.py +++ b/sdk/python/tests/utils/feature_records.py @@ -260,7 +260,7 @@ def get_expected_training_df( if "val_to_add" in expected_df.columns: expected_df[ get_response_feature_name("conv_rate_plus_val_to_add", full_feature_names) - ] = (expected_df[conv_feature_name] + expected_df["val_to_add"]) + ] = expected_df[conv_feature_name] + expected_df["val_to_add"] return expected_df @@ -291,7 +291,6 @@ def assert_feature_service_correctness( expected_df, event_timestamp, ): - job_from_df = store.get_historical_features( entity_df=entity_df, features=store.get_feature_service(feature_service.name), diff --git a/setup.cfg b/setup.cfg index 2781169a71..2a9acf13da 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,25 +1,2 @@ -[isort] -src_paths = feast,tests -multi_line_output=3 -include_trailing_comma=True -force_grid_wrap=0 -use_parentheses=True -line_length=88 -skip=feast/protos,feast/embedded_go/lib -known_first_party=feast,feast_serving_server,feast_core_server -default_section=THIRDPARTY - -[flake8] -ignore = E203, E266, E501, W503 -max-line-length = 88 -max-complexity = 20 -select = B,C,E,F,W,T4 -exclude = .git,__pycache__,docs/conf.py,dist,feast/protos,feast/embedded_go/lib - -[mypy] -files=feast,tests -ignore_missing_imports=true -exclude=feast/embedded_go/lib - [bdist_wheel] universal = 1 diff --git a/setup.py b/setup.py index ef3ba1d784..e14e723d0e 100644 --- a/setup.py +++ b/setup.py @@ -156,9 +156,7 @@ "build", "virtualenv==20.23.0", "cryptography>=35.0,<43", - "flake8>=6.0.0,<6.1.0", - "black>=22.6.0,<23", - "isort>=5,<6", + "ruff>=0.3.3", "grpcio-testing>=1.56.2,<2", # FastAPI does not correctly pull starlette dependency on httpx see thread(https://github.com/tiangolo/fastapi/issues/5656). "httpx>=0.23.3", From ec4c15c0104fa8f4cebdbf29f9e067baab07b09b Mon Sep 17 00:00:00 2001 From: Shuchu Han Date: Wed, 3 Apr 2024 15:46:20 -0400 Subject: [PATCH 105/122] fix: Upgrade sqlalchemy from 1.x to 2.x regarding PVE-2022-51668. (#4065) * fix: Upgrade sqlalchemy from 1.x to 2.x regarding PVE-2022-51668. Signed-off-by: Shuchu Han * fix: fix typo. Signed-off-by: Shuchu Han --------- Signed-off-by: Shuchu Han --- sdk/python/feast/infra/registry/sql.py | 45 ++++++++++++++------------ setup.py | 2 +- 2 files changed, 25 insertions(+), 22 deletions(-) diff --git a/sdk/python/feast/infra/registry/sql.py b/sdk/python/feast/infra/registry/sql.py index f9030a6875..98b23e1943 100644 --- a/sdk/python/feast/infra/registry/sql.py +++ b/sdk/python/feast/infra/registry/sql.py @@ -205,7 +205,7 @@ def teardown(self): saved_datasets, validation_references, }: - with self.engine.connect() as conn: + with self.engine.begin() as conn: stmt = delete(t) conn.execute(stmt) @@ -399,7 +399,7 @@ def apply_feature_service( ) def delete_data_source(self, name: str, project: str, commit: bool = True): - with self.engine.connect() as conn: + with self.engine.begin() as conn: stmt = delete(data_sources).where( data_sources.c.data_source_name == name, data_sources.c.project_id == project, @@ -441,7 +441,7 @@ def _list_on_demand_feature_views(self, project: str) -> List[OnDemandFeatureVie ) def _list_project_metadata(self, project: str) -> List[ProjectMetadata]: - with self.engine.connect() as conn: + with self.engine.begin() as conn: stmt = select(feast_metadata).where( feast_metadata.c.project_id == project, ) @@ -449,8 +449,11 @@ def _list_project_metadata(self, project: str) -> List[ProjectMetadata]: if rows: project_metadata = ProjectMetadata(project_name=project) for row in rows: - if row["metadata_key"] == FeastMetadataKeys.PROJECT_UUID.value: - project_metadata.project_uuid = row["metadata_value"] + if ( + row._mapping["metadata_key"] + == FeastMetadataKeys.PROJECT_UUID.value + ): + project_metadata.project_uuid = row._mapping["metadata_value"] break # TODO(adchia): Add other project metadata in a structured way return [project_metadata] @@ -557,7 +560,7 @@ def apply_user_metadata( table = self._infer_fv_table(feature_view) name = feature_view.name - with self.engine.connect() as conn: + with self.engine.begin() as conn: stmt = select(table).where( getattr(table.c, "feature_view_name") == name, table.c.project_id == project, @@ -612,11 +615,11 @@ def get_user_metadata( table = self._infer_fv_table(feature_view) name = feature_view.name - with self.engine.connect() as conn: + with self.engine.begin() as conn: stmt = select(table).where(getattr(table.c, "feature_view_name") == name) row = conn.execute(stmt).first() if row: - return row["user_metadata"] + return row._mapping["user_metadata"] else: raise FeatureViewNotFoundException(feature_view.name, project=project) @@ -674,7 +677,7 @@ def _apply_object( name = name or (obj.name if hasattr(obj, "name") else None) assert name, f"name needs to be provided for {obj}" - with self.engine.connect() as conn: + with self.engine.begin() as conn: update_datetime = datetime.utcnow() update_time = int(update_datetime.timestamp()) stmt = select(table).where( @@ -723,7 +726,7 @@ def _apply_object( def _maybe_init_project_metadata(self, project): # Initialize project metadata if needed - with self.engine.connect() as conn: + with self.engine.begin() as conn: update_datetime = datetime.utcnow() update_time = int(update_datetime.timestamp()) stmt = select(feast_metadata).where( @@ -732,7 +735,7 @@ def _maybe_init_project_metadata(self, project): ) row = conn.execute(stmt).first() if row: - usage.set_current_project_uuid(row["metadata_value"]) + usage.set_current_project_uuid(row._mapping["metadata_value"]) else: new_project_uuid = f"{uuid.uuid4()}" values = { @@ -753,7 +756,7 @@ def _delete_object( id_field_name: str, not_found_exception: Optional[Callable], ): - with self.engine.connect() as conn: + with self.engine.begin() as conn: stmt = delete(table).where( getattr(table.c, id_field_name) == name, table.c.project_id == project ) @@ -777,13 +780,13 @@ def _get_object( ): self._maybe_init_project_metadata(project) - with self.engine.connect() as conn: + with self.engine.begin() as conn: stmt = select(table).where( getattr(table.c, id_field_name) == name, table.c.project_id == project ) row = conn.execute(stmt).first() if row: - _proto = proto_class.FromString(row[proto_field_name]) + _proto = proto_class.FromString(row._mapping[proto_field_name]) return python_class.from_proto(_proto) if not_found_exception: raise not_found_exception(name, project) @@ -799,20 +802,20 @@ def _list_objects( proto_field_name: str, ): self._maybe_init_project_metadata(project) - with self.engine.connect() as conn: + with self.engine.begin() as conn: stmt = select(table).where(table.c.project_id == project) rows = conn.execute(stmt).all() if rows: return [ python_class.from_proto( - proto_class.FromString(row[proto_field_name]) + proto_class.FromString(row._mapping[proto_field_name]) ) for row in rows ] return [] def _set_last_updated_metadata(self, last_updated: datetime, project: str): - with self.engine.connect() as conn: + with self.engine.begin() as conn: stmt = select(feast_metadata).where( feast_metadata.c.metadata_key == FeastMetadataKeys.LAST_UPDATED_TIMESTAMP.value, @@ -846,7 +849,7 @@ def _set_last_updated_metadata(self, last_updated: datetime, project: str): conn.execute(insert_stmt) def _get_last_updated_metadata(self, project: str): - with self.engine.connect() as conn: + with self.engine.begin() as conn: stmt = select(feast_metadata).where( feast_metadata.c.metadata_key == FeastMetadataKeys.LAST_UPDATED_TIMESTAMP.value, @@ -855,13 +858,13 @@ def _get_last_updated_metadata(self, project: str): row = conn.execute(stmt).first() if not row: return None - update_time = int(row["last_updated_timestamp"]) + update_time = int(row._mapping["last_updated_timestamp"]) return datetime.utcfromtimestamp(update_time) def _get_all_projects(self) -> Set[str]: projects = set() - with self.engine.connect() as conn: + with self.engine.begin() as conn: for table in { entities, data_sources, @@ -872,6 +875,6 @@ def _get_all_projects(self) -> Set[str]: stmt = select(table) rows = conn.execute(stmt).all() for row in rows: - projects.add(row["project_id"]) + projects.add(row._mapping["project_id"]) return projects diff --git a/setup.py b/setup.py index e14e723d0e..f94fb25bb5 100644 --- a/setup.py +++ b/setup.py @@ -57,7 +57,7 @@ "pygments>=2.12.0,<3", "PyYAML>=5.4.0,<7", "requests", - "SQLAlchemy[mypy]>1,<2", + "SQLAlchemy[mypy]>1", "tabulate>=0.8.0,<1", "tenacity>=7,<9", "toml>=0.10.0,<1", From c06dda84a26c5df3e761a18adaa81f87b1bcc0de Mon Sep 17 00:00:00 2001 From: Tornike Gurgenidze Date: Thu, 4 Apr 2024 02:46:37 +0400 Subject: [PATCH 106/122] fix: Disable minio tests in workflows on master and nightly (#4072) disable minio integration tests on github workflows Signed-off-by: tokoko --- .github/fork_workflows/fork_pr_integration_tests_aws.yml | 8 ++++---- .github/fork_workflows/fork_pr_integration_tests_gcp.yml | 4 ++-- .../fork_pr_integration_tests_snowflake.yml | 4 ++-- .github/workflows/master_only.yml | 6 +----- .github/workflows/nightly-ci.yml | 4 +--- 5 files changed, 10 insertions(+), 16 deletions(-) diff --git a/.github/fork_workflows/fork_pr_integration_tests_aws.yml b/.github/fork_workflows/fork_pr_integration_tests_aws.yml index 49fd16ef5d..be75c4f987 100644 --- a/.github/fork_workflows/fork_pr_integration_tests_aws.yml +++ b/.github/fork_workflows/fork_pr_integration_tests_aws.yml @@ -153,9 +153,9 @@ jobs: env: FEAST_SERVER_DOCKER_IMAGE_TAG: ${{ needs.build-docker-image.outputs.DOCKER_IMAGE_TAG }} run: | - pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread -k "aws and not Snowflake and not BigQuery" - pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread -k "File and not Snowflake and not BigQuery" - pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread -k "dynamo and not Snowflake and not BigQuery" - pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread -k "Redshift and not Snowflake and not BigQuery" + pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread -k "aws and not Snowflake and not BigQuery and not minio_registry" + pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread -k "File and not Snowflake and not BigQuery and not minio_registry" + pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread -k "dynamo and not Snowflake and not BigQuery and not minio_registry" + pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread -k "Redshift and not Snowflake and not BigQuery and not minio_registry" diff --git a/.github/fork_workflows/fork_pr_integration_tests_gcp.yml b/.github/fork_workflows/fork_pr_integration_tests_gcp.yml index cf85cc8873..0793fbd6e5 100644 --- a/.github/fork_workflows/fork_pr_integration_tests_gcp.yml +++ b/.github/fork_workflows/fork_pr_integration_tests_gcp.yml @@ -96,6 +96,6 @@ jobs: if: ${{ always() }} # this will guarantee that step won't be canceled and resources won't leak # Run only BigQuery and File tests without dynamo and redshift tests. run: | - pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread -k "BigQuery and not dynamo and not Redshift and not Snowflake" - pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread -k "File and not dynamo and not Redshift and not Snowflake" + pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread -k "BigQuery and not dynamo and not Redshift and not Snowflake and not minio_registry" + pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread -k "File and not dynamo and not Redshift and not Snowflake and not minio_registry" diff --git a/.github/fork_workflows/fork_pr_integration_tests_snowflake.yml b/.github/fork_workflows/fork_pr_integration_tests_snowflake.yml index 0b1c8a48ce..b9b6f8df06 100644 --- a/.github/fork_workflows/fork_pr_integration_tests_snowflake.yml +++ b/.github/fork_workflows/fork_pr_integration_tests_snowflake.yml @@ -92,6 +92,6 @@ jobs: SNOWFLAKE_CI_WAREHOUSE: ${{ secrets.SNOWFLAKE_CI_WAREHOUSE }} # Run only Snowflake BigQuery and File tests without dynamo and redshift tests. run: | - pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread -k "Snowflake and not dynamo and not Redshift and not Bigquery and not gcp" - pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread -k "File and not dynamo and not Redshift and not Bigquery and not gcp" + pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread -k "Snowflake and not dynamo and not Redshift and not Bigquery and not gcp and not minio_registry" + pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread -k "File and not dynamo and not Redshift and not Bigquery and not gcp and not minio_registry" diff --git a/.github/workflows/master_only.yml b/.github/workflows/master_only.yml index ded5310b50..225f24a828 100644 --- a/.github/workflows/master_only.yml +++ b/.github/workflows/master_only.yml @@ -134,19 +134,15 @@ jobs: - name: Test python and go env: FEAST_SERVER_DOCKER_IMAGE_TAG: ${{ needs.build-lambda-docker-image.outputs.DOCKER_IMAGE_TAG }} - FEAST_USAGE: "False" - IS_TEST: "True" SNOWFLAKE_CI_DEPLOYMENT: ${{ secrets.SNOWFLAKE_CI_DEPLOYMENT }} SNOWFLAKE_CI_USER: ${{ secrets.SNOWFLAKE_CI_USER }} SNOWFLAKE_CI_PASSWORD: ${{ secrets.SNOWFLAKE_CI_PASSWORD }} SNOWFLAKE_CI_ROLE: ${{ secrets.SNOWFLAKE_CI_ROLE }} SNOWFLAKE_CI_WAREHOUSE: ${{ secrets.SNOWFLAKE_CI_WAREHOUSE }} - run: pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread + run: make test-python-integration - name: Benchmark python env: FEAST_SERVER_DOCKER_IMAGE_TAG: ${{ needs.build-lambda-docker-image.outputs.DOCKER_IMAGE_TAG }} - FEAST_USAGE: "False" - IS_TEST: "True" SNOWFLAKE_CI_DEPLOYMENT: ${{ secrets.SNOWFLAKE_CI_DEPLOYMENT }} SNOWFLAKE_CI_USER: ${{ secrets.SNOWFLAKE_CI_USER }} SNOWFLAKE_CI_PASSWORD: ${{ secrets.SNOWFLAKE_CI_PASSWORD }} diff --git a/.github/workflows/nightly-ci.yml b/.github/workflows/nightly-ci.yml index e39f2e1c00..4dea41d4ad 100644 --- a/.github/workflows/nightly-ci.yml +++ b/.github/workflows/nightly-ci.yml @@ -214,11 +214,9 @@ jobs: if: ${{ always() }} # this will guarantee that step won't be canceled and resources won't leak env: FEAST_SERVER_DOCKER_IMAGE_TAG: ${{ needs.build-docker-image.outputs.DOCKER_IMAGE_TAG }} - FEAST_USAGE: "False" - IS_TEST: "True" SNOWFLAKE_CI_DEPLOYMENT: ${{ secrets.SNOWFLAKE_CI_DEPLOYMENT }} SNOWFLAKE_CI_USER: ${{ secrets.SNOWFLAKE_CI_USER }} SNOWFLAKE_CI_PASSWORD: ${{ secrets.SNOWFLAKE_CI_PASSWORD }} SNOWFLAKE_CI_ROLE: ${{ secrets.SNOWFLAKE_CI_ROLE }} SNOWFLAKE_CI_WAREHOUSE: ${{ secrets.SNOWFLAKE_CI_WAREHOUSE }} - run: pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread \ No newline at end of file + run: make test-python-integration \ No newline at end of file From 54c608068a3bf8911546161ceea911c4d2f463af Mon Sep 17 00:00:00 2001 From: Francisco Javier Arceo Date: Thu, 4 Apr 2024 10:16:16 -0400 Subject: [PATCH 107/122] chore: Adding more tests for On Demand Feature Views (#4069) * checking in progress...trying to fix tests Signed-off-by: Francisco Javier Arceo * testing more... Signed-off-by: Francisco Javier Arceo * fixed Signed-off-by: Francisco Javier Arceo * fixed some tests Signed-off-by: Francisco Javier Arceo * fixed test and serialization Signed-off-by: Francisco Javier Arceo * removed commented out code Signed-off-by: Francisco Javier Arceo * lint Signed-off-by: Francisco Javier Arceo * added a test to make it explicit that feature calculation must happen on a list Signed-off-by: Francisco Javier Arceo * linter Signed-off-by: Francisco Javier Arceo --------- Signed-off-by: Francisco Javier Arceo --- sdk/python/feast/on_demand_feature_view.py | 13 ++ .../example_repos/example_feature_repo_1.py | 14 ++ .../online_store/test_online_retrieval.py | 11 ++ .../test_on_demand_pandas_transformation.py | 93 ++++++++++ .../test_on_demand_python_transformation.py | 172 ++++++++++++++++++ 5 files changed, 303 insertions(+) create mode 100644 sdk/python/tests/unit/test_on_demand_pandas_transformation.py create mode 100644 sdk/python/tests/unit/test_on_demand_python_transformation.py diff --git a/sdk/python/feast/on_demand_feature_view.py b/sdk/python/feast/on_demand_feature_view.py index 8a61b11065..71ac6a1b10 100644 --- a/sdk/python/feast/on_demand_feature_view.py +++ b/sdk/python/feast/on_demand_feature_view.py @@ -300,10 +300,23 @@ def from_proto( == "user_defined_function" and on_demand_feature_view_proto.spec.feature_transformation.user_defined_function.body_text != "" + and on_demand_feature_view_proto.spec.mode == "pandas" ): transformation = PandasTransformation.from_proto( on_demand_feature_view_proto.spec.feature_transformation.user_defined_function ) + elif ( + on_demand_feature_view_proto.spec.feature_transformation.WhichOneof( + "transformation" + ) + == "user_defined_function" + and on_demand_feature_view_proto.spec.feature_transformation.user_defined_function.body_text + != "" + and on_demand_feature_view_proto.spec.mode == "python" + ): + transformation = PythonTransformation.from_proto( + on_demand_feature_view_proto.spec.feature_transformation.user_defined_function + ) elif ( on_demand_feature_view_proto.spec.feature_transformation.WhichOneof( "transformation" diff --git a/sdk/python/tests/example_repos/example_feature_repo_1.py b/sdk/python/tests/example_repos/example_feature_repo_1.py index eca9aee57c..fbf1fbb9b0 100644 --- a/sdk/python/tests/example_repos/example_feature_repo_1.py +++ b/sdk/python/tests/example_repos/example_feature_repo_1.py @@ -1,6 +1,9 @@ from datetime import timedelta +import pandas as pd + from feast import Entity, FeatureService, FeatureView, Field, FileSource, PushSource +from feast.on_demand_feature_view import on_demand_feature_view from feast.types import Float32, Int64, String # Note that file source paths are not validated, so there doesn't actually need to be any data @@ -99,6 +102,17 @@ ) +@on_demand_feature_view( + sources=[customer_profile], + schema=[Field(name="on_demand_age", dtype=Int64)], + mode="pandas", +) +def customer_profile_pandas_odfv(inputs: pd.DataFrame) -> pd.DataFrame: + outputs = pd.DataFrame() + outputs["on_demand_age"] = inputs["age"] + 1 + return outputs + + all_drivers_feature_service = FeatureService( name="driver_locations_service", features=[driver_locations], diff --git a/sdk/python/tests/unit/online_store/test_online_retrieval.py b/sdk/python/tests/unit/online_store/test_online_retrieval.py index f9ab42ae5e..5368b1e11c 100644 --- a/sdk/python/tests/unit/online_store/test_online_retrieval.py +++ b/sdk/python/tests/unit/online_store/test_online_retrieval.py @@ -124,6 +124,17 @@ def test_online() -> None: assert "trips" in result + result = store.get_online_features( + features=["customer_profile_pandas_odfv:on_demand_age"], + entity_rows=[{"driver_id": 1, "customer_id": "5"}], + full_feature_names=False, + ).to_dict() + + assert "on_demand_age" in result + assert result["driver_id"] == [1] + assert result["customer_id"] == ["5"] + assert result["on_demand_age"] == [4] + # invalid table reference with pytest.raises(FeatureViewNotFoundException): store.get_online_features( diff --git a/sdk/python/tests/unit/test_on_demand_pandas_transformation.py b/sdk/python/tests/unit/test_on_demand_pandas_transformation.py new file mode 100644 index 0000000000..c5f066dd83 --- /dev/null +++ b/sdk/python/tests/unit/test_on_demand_pandas_transformation.py @@ -0,0 +1,93 @@ +import os +import tempfile +from datetime import datetime, timedelta + +import pandas as pd + +from feast import Entity, FeatureStore, FeatureView, FileSource, RepoConfig +from feast.driver_test_data import create_driver_hourly_stats_df +from feast.field import Field +from feast.infra.online_stores.sqlite import SqliteOnlineStoreConfig +from feast.on_demand_feature_view import on_demand_feature_view +from feast.types import Float32, Float64, Int64 + + +def test_pandas_transformation(): + with tempfile.TemporaryDirectory() as data_dir: + store = FeatureStore( + config=RepoConfig( + project="test_on_demand_python_transformation", + registry=os.path.join(data_dir, "registry.db"), + provider="local", + entity_key_serialization_version=2, + online_store=SqliteOnlineStoreConfig( + path=os.path.join(data_dir, "online.db") + ), + ) + ) + + # Generate test data. + end_date = datetime.now().replace(microsecond=0, second=0, minute=0) + start_date = end_date - timedelta(days=15) + + driver_entities = [1001, 1002, 1003, 1004, 1005] + driver_df = create_driver_hourly_stats_df(driver_entities, start_date, end_date) + driver_stats_path = os.path.join(data_dir, "driver_stats.parquet") + driver_df.to_parquet(path=driver_stats_path, allow_truncated_timestamps=True) + + driver = Entity(name="driver", join_keys=["driver_id"]) + + driver_stats_source = FileSource( + name="driver_hourly_stats_source", + path=driver_stats_path, + timestamp_field="event_timestamp", + created_timestamp_column="created", + ) + + driver_stats_fv = FeatureView( + name="driver_hourly_stats", + entities=[driver], + ttl=timedelta(days=0), + schema=[ + Field(name="conv_rate", dtype=Float32), + Field(name="acc_rate", dtype=Float32), + Field(name="avg_daily_trips", dtype=Int64), + ], + online=True, + source=driver_stats_source, + ) + + @on_demand_feature_view( + sources=[driver_stats_fv], + schema=[Field(name="conv_rate_plus_acc", dtype=Float64)], + mode="pandas", + ) + def pandas_view(inputs: pd.DataFrame) -> pd.DataFrame: + df = pd.DataFrame() + df["conv_rate_plus_acc"] = inputs["conv_rate"] + inputs["acc_rate"] + return df + + store.apply([driver, driver_stats_source, driver_stats_fv, pandas_view]) + + entity_rows = [ + { + "driver_id": 1001, + } + ] + store.write_to_online_store( + feature_view_name="driver_hourly_stats", df=driver_df + ) + + online_response = store.get_online_features( + entity_rows=entity_rows, + features=[ + "driver_hourly_stats:conv_rate", + "driver_hourly_stats:acc_rate", + "driver_hourly_stats:avg_daily_trips", + "pandas_view:conv_rate_plus_acc", + ], + ).to_df() + + assert online_response["conv_rate_plus_acc"].equals( + online_response["conv_rate"] + online_response["acc_rate"] + ) diff --git a/sdk/python/tests/unit/test_on_demand_python_transformation.py b/sdk/python/tests/unit/test_on_demand_python_transformation.py new file mode 100644 index 0000000000..4913b6c1b1 --- /dev/null +++ b/sdk/python/tests/unit/test_on_demand_python_transformation.py @@ -0,0 +1,172 @@ +import os +import tempfile +import unittest +from datetime import datetime, timedelta +from typing import Any, Dict + +import pandas as pd +import pytest + +from feast import Entity, FeatureStore, FeatureView, FileSource, RepoConfig +from feast.driver_test_data import create_driver_hourly_stats_df +from feast.field import Field +from feast.infra.online_stores.sqlite import SqliteOnlineStoreConfig +from feast.on_demand_feature_view import on_demand_feature_view +from feast.types import Float32, Float64, Int64 + + +class TestOnDemandPythonTransformation(unittest.TestCase): + def setUp(self): + with tempfile.TemporaryDirectory() as data_dir: + self.store = FeatureStore( + config=RepoConfig( + project="test_on_demand_python_transformation", + registry=os.path.join(data_dir, "registry.db"), + provider="local", + entity_key_serialization_version=2, + online_store=SqliteOnlineStoreConfig( + path=os.path.join(data_dir, "online.db") + ), + ) + ) + + # Generate test data. + end_date = datetime.now().replace(microsecond=0, second=0, minute=0) + start_date = end_date - timedelta(days=15) + + driver_entities = [1001, 1002, 1003, 1004, 1005] + driver_df = create_driver_hourly_stats_df( + driver_entities, start_date, end_date + ) + driver_stats_path = os.path.join(data_dir, "driver_stats.parquet") + driver_df.to_parquet( + path=driver_stats_path, allow_truncated_timestamps=True + ) + + driver = Entity(name="driver", join_keys=["driver_id"]) + + driver_stats_source = FileSource( + name="driver_hourly_stats_source", + path=driver_stats_path, + timestamp_field="event_timestamp", + created_timestamp_column="created", + ) + + driver_stats_fv = FeatureView( + name="driver_hourly_stats", + entities=[driver], + ttl=timedelta(days=0), + schema=[ + Field(name="conv_rate", dtype=Float32), + Field(name="acc_rate", dtype=Float32), + Field(name="avg_daily_trips", dtype=Int64), + ], + online=True, + source=driver_stats_source, + ) + + @on_demand_feature_view( + sources=[driver_stats_fv], + schema=[Field(name="conv_rate_plus_acc_pandas", dtype=Float64)], + mode="pandas", + ) + def pandas_view(inputs: pd.DataFrame) -> pd.DataFrame: + df = pd.DataFrame() + df["conv_rate_plus_acc_pandas"] = ( + inputs["conv_rate"] + inputs["acc_rate"] + ) + return df + + @on_demand_feature_view( + sources=[driver_stats_fv[["conv_rate", "acc_rate"]]], + schema=[Field(name="conv_rate_plus_acc_python", dtype=Float64)], + mode="python", + ) + def python_view(inputs: Dict[str, Any]) -> Dict[str, Any]: + output: Dict[str, Any] = { + "conv_rate_plus_acc_python": [ + conv_rate + acc_rate + for conv_rate, acc_rate in zip( + inputs["conv_rate"], inputs["acc_rate"] + ) + ] + } + return output + + @on_demand_feature_view( + sources=[driver_stats_fv[["conv_rate", "acc_rate"]]], + schema=[ + Field(name="conv_rate_plus_acc_python_singleton", dtype=Float64) + ], + mode="python", + ) + def python_singleton_view(inputs: Dict[str, Any]) -> Dict[str, Any]: + output: Dict[str, Any] = dict(conv_rate_plus_acc_python=float("-inf")) + output["conv_rate_plus_acc_python_singleton"] = ( + inputs["conv_rate"] + inputs["acc_rate"] + ) + return output + + with pytest.raises(TypeError): + # Note the singleton view will fail as the type is + # expected to be a List which can be confirmed in _infer_features_dict + self.store.apply( + [ + driver, + driver_stats_source, + driver_stats_fv, + pandas_view, + python_view, + python_singleton_view, + ] + ) + + self.store.apply( + [driver, driver_stats_source, driver_stats_fv, pandas_view, python_view] + ) + self.store.write_to_online_store( + feature_view_name="driver_hourly_stats", df=driver_df + ) + + def test_python_pandas_parity(self): + entity_rows = [ + { + "driver_id": 1001, + } + ] + + online_python_response = self.store.get_online_features( + entity_rows=entity_rows, + features=[ + "driver_hourly_stats:conv_rate", + "driver_hourly_stats:acc_rate", + "python_view:conv_rate_plus_acc_python", + ], + ).to_dict() + + online_pandas_response = self.store.get_online_features( + entity_rows=entity_rows, + features=[ + "driver_hourly_stats:conv_rate", + "driver_hourly_stats:acc_rate", + "pandas_view:conv_rate_plus_acc_pandas", + ], + ).to_df() + + assert len(online_python_response) == 4 + assert all( + key in online_python_response.keys() + for key in [ + "driver_id", + "acc_rate", + "conv_rate", + "conv_rate_plus_acc_python", + ] + ) + assert len(online_python_response["conv_rate_plus_acc_python"]) == 1 + assert ( + online_python_response["conv_rate_plus_acc_python"][0] + == online_pandas_response["conv_rate_plus_acc_pandas"][0] + == online_python_response["conv_rate"][0] + + online_python_response["acc_rate"][0] + ) From f3a9c649a85f8aea7a57c055bb00f58295738b3b Mon Sep 17 00:00:00 2001 From: Francisco Javier Arceo Date: Thu, 4 Apr 2024 10:40:58 -0400 Subject: [PATCH 108/122] chore: Update pull_request_template.md (#4074) Update pull_request_template.md --- .github/pull_request_template.md | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index e8d00798c0..4bec4d79e1 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -9,11 +9,16 @@ --> -**What this PR does / why we need it**: +# What this PR does / why we need it: + -**Which issue(s) this PR fixes**: +# Which issue(s) this PR fixes: -Fixes # + + +# Fixes From c50a9ff783fa400542422990ff835da930bcb6bf Mon Sep 17 00:00:00 2001 From: Tornike Gurgenidze Date: Thu, 4 Apr 2024 22:49:16 +0400 Subject: [PATCH 109/122] feat: Refactor ODFV schema inference (#4076) * refactor odfv scheam inference Signed-off-by: tokoko * bugfix odfv schema inference Signed-off-by: tokoko * remove print statement Signed-off-by: tokoko --------- Signed-off-by: tokoko --- sdk/python/feast/on_demand_feature_view.py | 217 +++++------------- .../transformation/pandas_transformation.py | 20 ++ .../transformation/python_transformation.py | 19 +- .../substrait_transformation.py | 53 +++++ .../feature_repos/universal/feature_views.py | 2 +- 5 files changed, 144 insertions(+), 167 deletions(-) diff --git a/sdk/python/feast/on_demand_feature_view.py b/sdk/python/feast/on_demand_feature_view.py index 71ac6a1b10..9d1c360cb3 100644 --- a/sdk/python/feast/on_demand_feature_view.py +++ b/sdk/python/feast/on_demand_feature_view.py @@ -34,10 +34,6 @@ from feast.transformation.pandas_transformation import PandasTransformation from feast.transformation.python_transformation import PythonTransformation from feast.transformation.substrait_transformation import SubstraitTransformation -from feast.type_map import ( - feast_value_type_to_pandas_type, - python_type_to_feast_value_type, -) from feast.usage import log_exceptions from feast.value_type import ValueType @@ -490,69 +486,15 @@ def get_transformed_features( ) def infer_features(self) -> None: - if self.mode in {"pandas", "substrait"}: - self._infer_features_df() - elif self.mode == "python": - self._infer_features_dict() - else: - raise Exception( - f'Invalid OnDemandFeatureMode: {self.mode}. Expected one of "pandas" or "python".' - ) - - def _infer_features_dict(self): - """ - Infers the set of features associated to this feature view from the input source. - - Raises: - RegistryInferenceFailure: The set of features could not be inferred. - """ - rand_dict_value: Dict[str, Any] = { - "float": [1.0], - "int": [1], - "str": ["hello world"], - "bytes": [str.encode("hello world")], - "bool": [True], - "datetime64[ns]": [datetime.utcnow()], - } - - feature_dict = {} - for feature_view_projection in self.source_feature_view_projections.values(): - for feature in feature_view_projection.features: - dtype = feast_value_type_to_pandas_type(feature.dtype.to_value_type()) - feature_dict[f"{feature_view_projection.name}__{feature.name}"] = ( - rand_dict_value[dtype] if dtype in rand_dict_value else [None] - ) - feature_dict[f"{feature.name}"] = ( - rand_dict_value[dtype] if dtype in rand_dict_value else [None] - ) - for request_data in self.source_request_sources.values(): - for field in request_data.schema: - dtype = feast_value_type_to_pandas_type(field.dtype.to_value_type()) - feature_dict[f"{field.name}"] = ( - rand_dict_value[dtype] if dtype in rand_dict_value else [None] - ) - - output_dict: Dict[str, List[Any]] = self.feature_transformation.transform( - feature_dict + inferred_features = self.feature_transformation.infer_features( + self._construct_random_input() ) - inferred_features = [] - for f, dt in output_dict.items(): - inferred_features.append( - Field( - name=f, - dtype=from_value_type( - python_type_to_feast_value_type( - f, type_name=type(dt[0]).__name__ - ) - ), - ) - ) if self.features: missing_features = [] - for specified_features in self.features: - if specified_features not in inferred_features: - missing_features.append(specified_features) + for specified_feature in self.features: + if specified_feature not in inferred_features: + missing_features.append(specified_feature) if missing_features: raise SpecifiedFeaturesNotPresentError( missing_features, inferred_features, self.name @@ -566,66 +508,42 @@ def _infer_features_dict(self): f"Could not infer Features for the feature view '{self.name}'.", ) - def _infer_features_df(self) -> None: - """ - Infers the set of features associated to this feature view from the input source. - - Raises: - RegistryInferenceFailure: The set of features could not be inferred. - """ - rand_df_value: Dict[str, Any] = { - "float": 1.0, - "int": 1, - "str": "hello world", - "bytes": str.encode("hello world"), - "bool": True, - "datetime64[ns]": datetime.utcnow(), + def _construct_random_input(self) -> Dict[str, List[Any]]: + rand_dict_value: Dict[ValueType, List[Any]] = { + ValueType.BYTES: [str.encode("hello world")], + ValueType.STRING: ["hello world"], + ValueType.INT32: [1], + ValueType.INT64: [1], + ValueType.DOUBLE: [1.0], + ValueType.FLOAT: [1.0], + ValueType.BOOL: [True], + ValueType.UNIX_TIMESTAMP: [datetime.utcnow()], + ValueType.BYTES_LIST: [[str.encode("hello world")]], + ValueType.STRING_LIST: [["hello world"]], + ValueType.INT32_LIST: [[1]], + ValueType.INT64_LIST: [[1]], + ValueType.DOUBLE_LIST: [[1.0]], + ValueType.FLOAT_LIST: [[1.0]], + ValueType.BOOL_LIST: [[True]], + ValueType.UNIX_TIMESTAMP_LIST: [[datetime.utcnow()]], } - df = pd.DataFrame() + feature_dict = {} for feature_view_projection in self.source_feature_view_projections.values(): for feature in feature_view_projection.features: - dtype = feast_value_type_to_pandas_type(feature.dtype.to_value_type()) - df[f"{feature_view_projection.name}__{feature.name}"] = pd.Series( - dtype=dtype + feature_dict[f"{feature_view_projection.name}__{feature.name}"] = ( + rand_dict_value.get(feature.dtype.to_value_type(), [None]) + ) + feature_dict[f"{feature.name}"] = rand_dict_value.get( + feature.dtype.to_value_type(), [None] ) - sample_val = rand_df_value[dtype] if dtype in rand_df_value else None - df[f"{feature.name}"] = pd.Series(data=sample_val, dtype=dtype) for request_data in self.source_request_sources.values(): for field in request_data.schema: - dtype = feast_value_type_to_pandas_type(field.dtype.to_value_type()) - sample_val = rand_df_value[dtype] if dtype in rand_df_value else None - df[f"{field.name}"] = pd.Series(sample_val, dtype=dtype) - - output_df: pd.DataFrame = self.feature_transformation.transform(df) - inferred_features = [] - for f, dt in zip(output_df.columns, output_df.dtypes): - inferred_features.append( - Field( - name=f, - dtype=from_value_type( - python_type_to_feast_value_type(f, type_name=str(dt)) - ), + feature_dict[f"{field.name}"] = rand_dict_value.get( + field.dtype.to_value_type(), [None] ) - ) - if self.features: - missing_features = [] - for specified_features in self.features: - if specified_features not in inferred_features: - missing_features.append(specified_features) - if missing_features: - raise SpecifiedFeaturesNotPresentError( - missing_features, inferred_features, self.name - ) - else: - self.features = inferred_features - - if not self.features: - raise RegistryInferenceFailure( - "OnDemandFeatureView", - f"Could not infer Features for the feature view '{self.name}'.", - ) + return feature_dict @staticmethod def get_requested_odfvs( @@ -682,59 +600,28 @@ def mainify(obj) -> None: def decorator(user_function): return_annotation = inspect.signature(user_function).return_annotation - if ( - return_annotation - and return_annotation.__module__ == "ibis.expr.types.relations" - and return_annotation.__name__ == "Table" - ): - import ibis - import ibis.expr.datatypes as dt - from ibis_substrait.compiler.core import SubstraitCompiler - - compiler = SubstraitCompiler() - - input_fields: Field = [] - - for s in sources: - if isinstance(s, FeatureView): - fields = s.projection.features - else: - fields = s.features - - input_fields.extend( - [ - ( - f.name, - dt.dtype( - feast_value_type_to_pandas_type(f.dtype.to_value_type()) - ), - ) - for f in fields - ] + udf_string = dill.source.getsource(user_function) + mainify(user_function) + if mode == "pandas": + if return_annotation not in (inspect._empty, pd.DataFrame): + raise TypeError( + f"return signature for {user_function} is {return_annotation} but should be pd.DataFrame" ) + transformation = PandasTransformation(user_function, udf_string) + elif mode == "python": + if return_annotation not in (inspect._empty, Dict[str, Any]): + raise TypeError( + f"return signature for {user_function} is {return_annotation} but should be Dict[str, Any]" + ) + transformation = PythonTransformation(user_function, udf_string) + elif mode == "substrait": + from ibis.expr.types.relations import Table - expr = user_function(ibis.table(input_fields, "t")) - - transformation = SubstraitTransformation( - substrait_plan=compiler.compile(expr).SerializeToString() - ) - else: - udf_string = dill.source.getsource(user_function) - mainify(user_function) - if mode == "pandas": - if return_annotation not in (inspect._empty, pd.DataFrame): - raise TypeError( - f"return signature for {user_function} is {return_annotation} but should be pd.DataFrame" - ) - transformation = PandasTransformation(user_function, udf_string) - elif mode == "python": - if return_annotation not in (inspect._empty, Dict[str, Any]): - raise TypeError( - f"return signature for {user_function} is {return_annotation} but should be Dict[str, Any]" - ) - transformation = PythonTransformation(user_function, udf_string) - elif mode == "substrait": - pass + if return_annotation not in (inspect._empty, Table): + raise TypeError( + f"return signature for {user_function} is {return_annotation} but should be ibis.expr.types.relations.Table" + ) + transformation = SubstraitTransformation.from_ibis(user_function, sources) on_demand_feature_view_obj = OnDemandFeatureView( name=user_function.__name__, diff --git a/sdk/python/feast/transformation/pandas_transformation.py b/sdk/python/feast/transformation/pandas_transformation.py index 1838a882f2..d48055c694 100644 --- a/sdk/python/feast/transformation/pandas_transformation.py +++ b/sdk/python/feast/transformation/pandas_transformation.py @@ -1,11 +1,16 @@ from types import FunctionType +from typing import Any, Dict, List import dill import pandas as pd +from feast.field import Field, from_value_type from feast.protos.feast.core.Transformation_pb2 import ( UserDefinedFunctionV2 as UserDefinedFunctionProto, ) +from feast.type_map import ( + python_type_to_feast_value_type, +) class PandasTransformation: @@ -33,6 +38,21 @@ def transform(self, input_df: pd.DataFrame) -> pd.DataFrame: ) return output_df + def infer_features(self, random_input: Dict[str, List[Any]]) -> List[Field]: + df = pd.DataFrame.from_dict(random_input) + + output_df: pd.DataFrame = self.transform(df) + + return [ + Field( + name=f, + dtype=from_value_type( + python_type_to_feast_value_type(f, type_name=str(dt)) + ), + ) + for f, dt in zip(output_df.columns, output_df.dtypes) + ] + def __eq__(self, other): if not isinstance(other, PandasTransformation): raise TypeError( diff --git a/sdk/python/feast/transformation/python_transformation.py b/sdk/python/feast/transformation/python_transformation.py index 9519f23c05..9f5fac6675 100644 --- a/sdk/python/feast/transformation/python_transformation.py +++ b/sdk/python/feast/transformation/python_transformation.py @@ -1,11 +1,15 @@ from types import FunctionType -from typing import Dict +from typing import Any, Dict, List import dill +from feast.field import Field, from_value_type from feast.protos.feast.core.Transformation_pb2 import ( UserDefinedFunctionV2 as UserDefinedFunctionProto, ) +from feast.type_map import ( + python_type_to_feast_value_type, +) class PythonTransformation: @@ -33,6 +37,19 @@ def transform(self, input_dict: Dict) -> Dict: ) return {**input_dict, **output_dict} + def infer_features(self, random_input: Dict[str, List[Any]]) -> List[Field]: + output_dict: Dict[str, List[Any]] = self.transform(random_input) + + return [ + Field( + name=f, + dtype=from_value_type( + python_type_to_feast_value_type(f, type_name=type(dt[0]).__name__) + ), + ) + for f, dt in output_dict.items() + ] + def __eq__(self, other): if not isinstance(other, PythonTransformation): raise TypeError( diff --git a/sdk/python/feast/transformation/substrait_transformation.py b/sdk/python/feast/transformation/substrait_transformation.py index b3dbe7a4b4..48b708ac70 100644 --- a/sdk/python/feast/transformation/substrait_transformation.py +++ b/sdk/python/feast/transformation/substrait_transformation.py @@ -1,10 +1,18 @@ +from typing import Any, Dict, List + import pandas as pd import pyarrow import pyarrow.substrait as substrait # type: ignore # noqa +from feast.feature_view import FeatureView +from feast.field import Field, from_value_type from feast.protos.feast.core.Transformation_pb2 import ( SubstraitTransformationV2 as SubstraitTransformationProto, ) +from feast.type_map import ( + feast_value_type_to_pandas_type, + python_type_to_feast_value_type, +) class SubstraitTransformation: @@ -26,6 +34,20 @@ def table_provider(names, schema: pyarrow.Schema): ).read_all() return table.to_pandas() + def infer_features(self, random_input: Dict[str, List[Any]]) -> List[Field]: + df = pd.DataFrame.from_dict(random_input) + output_df: pd.DataFrame = self.transform(df) + + return [ + Field( + name=f, + dtype=from_value_type( + python_type_to_feast_value_type(f, type_name=str(dt)) + ), + ) + for f, dt in zip(output_df.columns, output_df.dtypes) + ] + def __eq__(self, other): if not isinstance(other, SubstraitTransformation): raise TypeError( @@ -48,3 +70,34 @@ def from_proto( return SubstraitTransformation( substrait_plan=substrait_transformation_proto.substrait_plan ) + + @classmethod + def from_ibis(cls, user_function, sources): + import ibis + import ibis.expr.datatypes as dt + from ibis_substrait.compiler.core import SubstraitCompiler + + compiler = SubstraitCompiler() + + input_fields = [] + + for s in sources: + fields = s.projection.features if isinstance(s, FeatureView) else s.features + + input_fields.extend( + [ + ( + f.name, + dt.dtype( + feast_value_type_to_pandas_type(f.dtype.to_value_type()) + ), + ) + for f in fields + ] + ) + + expr = user_function(ibis.table(input_fields, "t")) + + return SubstraitTransformation( + substrait_plan=compiler.compile(expr).SerializeToString() + ) diff --git a/sdk/python/tests/integration/feature_repos/universal/feature_views.py b/sdk/python/tests/integration/feature_repos/universal/feature_views.py index 55d2ed8425..48f6e27b8a 100644 --- a/sdk/python/tests/integration/feature_repos/universal/feature_views.py +++ b/sdk/python/tests/integration/feature_repos/universal/feature_views.py @@ -127,7 +127,7 @@ def create_similarity_request_source(): return RequestSource( name="similarity_input", schema=[ - Field(name="vector_doube", dtype=Array(Float64)), + Field(name="vector_double", dtype=Array(Float64)), Field(name="vector_float", dtype=Array(Float32)), ], ) From 194305631bbb6cca251dbb46df5b5575ffb2391b Mon Sep 17 00:00:00 2001 From: Francisco Javier Arceo Date: Fri, 5 Apr 2024 15:09:36 -0400 Subject: [PATCH 110/122] feat: Bumping requirements (#4079) Signed-off-by: Francisco Javier Arceo --- infra/charts/feast/requirements.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/infra/charts/feast/requirements.yaml b/infra/charts/feast/requirements.yaml index ec098f2f7b..601d9a7a84 100644 --- a/infra/charts/feast/requirements.yaml +++ b/infra/charts/feast/requirements.yaml @@ -1,12 +1,12 @@ dependencies: - name: feature-server alias: feature-server - version: 0.35.0 + version: 0.36.0 condition: feature-server.enabled repository: https://feast-helm-charts.storage.googleapis.com - name: transformation-service alias: transformation-service - version: 0.35.0 + version: 0.36.0 condition: transformation-service.enabled repository: https://feast-helm-charts.storage.googleapis.com - name: redis From 1ba65b4e13a2af3e9cea879d1c1e48891a0f0610 Mon Sep 17 00:00:00 2001 From: Francisco Javier Arceo Date: Fri, 5 Apr 2024 17:02:02 -0400 Subject: [PATCH 111/122] revert: Reverting bumping requirements (#4081) Revert "feat: Bumping requirements (#4079)" This reverts commit 194305631bbb6cca251dbb46df5b5575ffb2391b. --- infra/charts/feast/requirements.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/infra/charts/feast/requirements.yaml b/infra/charts/feast/requirements.yaml index 601d9a7a84..ec098f2f7b 100644 --- a/infra/charts/feast/requirements.yaml +++ b/infra/charts/feast/requirements.yaml @@ -1,12 +1,12 @@ dependencies: - name: feature-server alias: feature-server - version: 0.36.0 + version: 0.35.0 condition: feature-server.enabled repository: https://feast-helm-charts.storage.googleapis.com - name: transformation-service alias: transformation-service - version: 0.36.0 + version: 0.35.0 condition: transformation-service.enabled repository: https://feast-helm-charts.storage.googleapis.com - name: redis From 318a2b8bfc94f10c81206071fcb1d41f19683288 Mon Sep 17 00:00:00 2001 From: Tornike Gurgenidze Date: Tue, 9 Apr 2024 03:07:19 +0400 Subject: [PATCH 112/122] feat: Pull duckdb from contribs, add to CI (#4059) * pull duckdb/ibis from contrib folder Signed-off-by: tokoko * fix linter failures Signed-off-by: tokoko --------- Signed-off-by: tokoko --- Makefile | 12 ---- docs/project/development-guide.md | 2 +- environment-setup.md | 2 +- .../contrib/duckdb_offline_store/__init__.py | 0 .../contrib/duckdb_repo_configuration.py | 19 ------ .../contrib/ibis_offline_store/__init__.py | 0 .../duckdb_offline_store => }/duckdb.py | 2 +- .../{contrib/ibis_offline_store => }/ibis.py | 23 ++++++-- sdk/python/feast/repo_config.py | 2 +- .../feature_repos/repo_configuration.py | 2 + .../universal/data_sources/file.py | 8 +++ .../test_universal_historical_retrieval.py | 2 +- .../unit/infra/offline_stores/test_ibis.py | 58 +++++++++++++++---- 13 files changed, 80 insertions(+), 52 deletions(-) delete mode 100644 sdk/python/feast/infra/offline_stores/contrib/duckdb_offline_store/__init__.py delete mode 100644 sdk/python/feast/infra/offline_stores/contrib/duckdb_repo_configuration.py delete mode 100644 sdk/python/feast/infra/offline_stores/contrib/ibis_offline_store/__init__.py rename sdk/python/feast/infra/offline_stores/{contrib/duckdb_offline_store => }/duckdb.py (82%) rename sdk/python/feast/infra/offline_stores/{contrib/ibis_offline_store => }/ibis.py (95%) diff --git a/Makefile b/Makefile index 26e79cf6a9..813a27f4e3 100644 --- a/Makefile +++ b/Makefile @@ -176,18 +176,6 @@ test-python-universal-athena: not s3_registry and \ not test_snowflake" \ sdk/python/tests - -test-python-universal-duckdb: - PYTHONPATH='.' \ - FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.offline_stores.contrib.duckdb_repo_configuration \ - python -m pytest -n 8 --integration \ - -k "not test_nullable_online_store and \ - not gcs_registry and \ - not s3_registry and \ - not test_snowflake and \ - not bigquery and \ - not test_spark_materialization_consistency" \ - sdk/python/tests test-python-universal-postgres-offline: PYTHONPATH='.' \ diff --git a/docs/project/development-guide.md b/docs/project/development-guide.md index 28baa789bb..2d4ab0c7c6 100644 --- a/docs/project/development-guide.md +++ b/docs/project/development-guide.md @@ -187,7 +187,7 @@ make lint-python ### Unit Tests Unit tests (`pytest`) for the Feast Python SDK / CLI can run as follows: ```sh -make test-python +make test-python-unit ``` > :warning: Local configuration can interfere with Unit tests and cause them to fail: diff --git a/environment-setup.md b/environment-setup.md index a6c30c2aa2..5dde9dfd94 100644 --- a/environment-setup.md +++ b/environment-setup.md @@ -19,5 +19,5 @@ make install-python-ci-dependencies PYTHON=3.9 4. start the docker daemon 5. run unit tests: ```bash -make test-python +make test-python-unit ``` \ No newline at end of file diff --git a/sdk/python/feast/infra/offline_stores/contrib/duckdb_offline_store/__init__.py b/sdk/python/feast/infra/offline_stores/contrib/duckdb_offline_store/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/sdk/python/feast/infra/offline_stores/contrib/duckdb_repo_configuration.py b/sdk/python/feast/infra/offline_stores/contrib/duckdb_repo_configuration.py deleted file mode 100644 index 263ae97466..0000000000 --- a/sdk/python/feast/infra/offline_stores/contrib/duckdb_repo_configuration.py +++ /dev/null @@ -1,19 +0,0 @@ -from feast.infra.offline_stores.contrib.duckdb_offline_store.duckdb import ( - DuckDBOfflineStoreConfig, -) -from tests.integration.feature_repos.universal.data_sources.file import ( # noqa: E402 - FileDataSourceCreator, -) - - -class DuckDBDataSourceCreator(FileDataSourceCreator): - def create_offline_store_config(self): - self.duckdb_offline_store_config = DuckDBOfflineStoreConfig() - return self.duckdb_offline_store_config - - -AVAILABLE_OFFLINE_STORES = [ - ("local", DuckDBDataSourceCreator), -] - -AVAILABLE_ONLINE_STORES = {"sqlite": ({"type": "sqlite"}, None)} diff --git a/sdk/python/feast/infra/offline_stores/contrib/ibis_offline_store/__init__.py b/sdk/python/feast/infra/offline_stores/contrib/ibis_offline_store/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/sdk/python/feast/infra/offline_stores/contrib/duckdb_offline_store/duckdb.py b/sdk/python/feast/infra/offline_stores/duckdb.py similarity index 82% rename from sdk/python/feast/infra/offline_stores/contrib/duckdb_offline_store/duckdb.py rename to sdk/python/feast/infra/offline_stores/duckdb.py index f927f2ff92..d43286f371 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/duckdb_offline_store/duckdb.py +++ b/sdk/python/feast/infra/offline_stores/duckdb.py @@ -1,7 +1,7 @@ import ibis from pydantic import StrictStr -from feast.infra.offline_stores.contrib.ibis_offline_store.ibis import IbisOfflineStore +from feast.infra.offline_stores.ibis import IbisOfflineStore from feast.repo_config import FeastConfigBaseModel diff --git a/sdk/python/feast/infra/offline_stores/contrib/ibis_offline_store/ibis.py b/sdk/python/feast/infra/offline_stores/ibis.py similarity index 95% rename from sdk/python/feast/infra/offline_stores/contrib/ibis_offline_store/ibis.py rename to sdk/python/feast/infra/offline_stores/ibis.py index 37fc6a4718..f9c6b2d20b 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/ibis_offline_store/ibis.py +++ b/sdk/python/feast/infra/offline_stores/ibis.py @@ -134,7 +134,9 @@ def get_historical_features( entity_table, feature_views, event_timestamp_col ) - def read_fv(feature_view, feature_refs, full_feature_names): + def read_fv( + feature_view: FeatureView, feature_refs: List[str], full_feature_names: bool + ) -> Tuple: fv_table: Table = ibis.read_parquet(feature_view.batch_source.name) for old_name, new_name in feature_view.batch_source.field_mapping.items(): @@ -175,6 +177,7 @@ def read_fv(feature_view, feature_refs, full_feature_names): return ( fv_table, feature_view.batch_source.timestamp_field, + feature_view.batch_source.created_timestamp_column, feature_view.projection.join_key_map or {e.name: e.name for e in feature_view.entity_columns}, feature_refs, @@ -351,12 +354,19 @@ def metadata(self) -> Optional[RetrievalMetadata]: def point_in_time_join( entity_table: Table, - feature_tables: List[Tuple[Table, str, Dict[str, str], List[str], timedelta]], + feature_tables: List[Tuple[Table, str, str, Dict[str, str], List[str], timedelta]], event_timestamp_col="event_timestamp", ): # TODO handle ttl all_entities = [event_timestamp_col] - for feature_table, timestamp_field, join_key_map, _, _ in feature_tables: + for ( + feature_table, + timestamp_field, + created_timestamp_field, + join_key_map, + _, + _, + ) in feature_tables: all_entities.extend(join_key_map.values()) r = ibis.literal("") @@ -371,6 +381,7 @@ def point_in_time_join( for ( feature_table, timestamp_field, + created_timestamp_field, join_key_map, feature_refs, ttl, @@ -395,9 +406,13 @@ def point_in_time_join( feature_table = feature_table.drop(s.endswith("_y")) + order_by_fields = [ibis.desc(feature_table[timestamp_field])] + if created_timestamp_field: + order_by_fields.append(ibis.desc(feature_table[created_timestamp_field])) + feature_table = ( feature_table.group_by(by="entity_row_id") - .order_by(ibis.desc(feature_table[timestamp_field])) + .order_by(order_by_fields) .mutate(rn=ibis.row_number()) ) diff --git a/sdk/python/feast/repo_config.py b/sdk/python/feast/repo_config.py index 5708754622..fe3491c6fe 100644 --- a/sdk/python/feast/repo_config.py +++ b/sdk/python/feast/repo_config.py @@ -75,7 +75,7 @@ "postgres": "feast.infra.offline_stores.contrib.postgres_offline_store.postgres.PostgreSQLOfflineStore", "athena": "feast.infra.offline_stores.contrib.athena_offline_store.athena.AthenaOfflineStore", "mssql": "feast.infra.offline_stores.contrib.mssql_offline_store.mssql.MsSqlServerOfflineStore", - "duckdb": "feast.infra.offline_stores.contrib.duckdb_offline_store.duckdb.DuckDBOfflineStore", + "duckdb": "feast.infra.offline_stores.duckdb.DuckDBOfflineStore", } FEATURE_SERVER_CONFIG_CLASS_FOR_TYPE = { diff --git a/sdk/python/tests/integration/feature_repos/repo_configuration.py b/sdk/python/tests/integration/feature_repos/repo_configuration.py index d2450bf868..6eb5204161 100644 --- a/sdk/python/tests/integration/feature_repos/repo_configuration.py +++ b/sdk/python/tests/integration/feature_repos/repo_configuration.py @@ -31,6 +31,7 @@ BigQueryDataSourceCreator, ) from tests.integration.feature_repos.universal.data_sources.file import ( + DuckDBDataSourceCreator, FileDataSourceCreator, ) from tests.integration.feature_repos.universal.data_sources.redshift import ( @@ -108,6 +109,7 @@ AVAILABLE_OFFLINE_STORES: List[Tuple[str, Type[DataSourceCreator]]] = [ ("local", FileDataSourceCreator), + ("local", DuckDBDataSourceCreator), ] AVAILABLE_ONLINE_STORES: Dict[ diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py index c70dae9863..6d4baa19ed 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py @@ -15,6 +15,7 @@ from feast.data_format import ParquetFormat from feast.data_source import DataSource from feast.feature_logging import LoggingDestination +from feast.infra.offline_stores.duckdb import DuckDBOfflineStoreConfig from feast.infra.offline_stores.file import FileOfflineStoreConfig from feast.infra.offline_stores.file_source import ( FileLoggingDestination, @@ -214,3 +215,10 @@ def create_offline_store_config(self) -> FeastConfigBaseModel: def teardown(self): self.minio.stop() self.f.close() + + +# TODO split up DataSourceCreator and OfflineStoreCreator +class DuckDBDataSourceCreator(FileDataSourceCreator): + def create_offline_store_config(self): + self.duckdb_offline_store_config = DuckDBOfflineStoreConfig() + return self.duckdb_offline_store_config diff --git a/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py b/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py index 9baba2397b..7e106b3e2a 100644 --- a/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py +++ b/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py @@ -518,7 +518,7 @@ def test_historical_features_with_no_ttl( @pytest.mark.integration @pytest.mark.universal_offline_stores -def test_historical_features_from_bigquery_sources_containing_backfills(environment): +def test_historical_features_containing_backfills(environment): store = environment.feature_store now = datetime.now().replace(microsecond=0, second=0, minute=0) diff --git a/sdk/python/tests/unit/infra/offline_stores/test_ibis.py b/sdk/python/tests/unit/infra/offline_stores/test_ibis.py index 5f105e2af7..fea1399552 100644 --- a/sdk/python/tests/unit/infra/offline_stores/test_ibis.py +++ b/sdk/python/tests/unit/infra/offline_stores/test_ibis.py @@ -3,10 +3,9 @@ import ibis import pyarrow as pa +import pyarrow.compute as pc -from feast.infra.offline_stores.contrib.ibis_offline_store.ibis import ( - point_in_time_join, -) +from feast.infra.offline_stores.ibis import point_in_time_join def pa_datetime(year, month, day): @@ -16,12 +15,13 @@ def pa_datetime(year, month, day): def customer_table(): return pa.Table.from_arrays( arrays=[ - pa.array([1, 1, 2]), + pa.array([1, 1, 2, 3]), pa.array( [ pa_datetime(2024, 1, 1), pa_datetime(2024, 1, 2), pa_datetime(2024, 1, 1), + pa_datetime(2024, 1, 3), ] ), ], @@ -32,24 +32,38 @@ def customer_table(): def features_table_1(): return pa.Table.from_arrays( arrays=[ - pa.array([1, 1, 1, 2]), + pa.array([1, 1, 1, 2, 3, 3]), pa.array( [ pa_datetime(2023, 12, 31), pa_datetime(2024, 1, 2), pa_datetime(2024, 1, 3), pa_datetime(2023, 1, 3), + pa_datetime(2024, 1, 1), + pa_datetime(2024, 1, 1), ] ), - pa.array([11, 22, 33, 22]), + pa.array( + [ + pa_datetime(2023, 12, 31), + pa_datetime(2024, 1, 2), + pa_datetime(2024, 1, 3), + pa_datetime(2023, 1, 3), + pa_datetime(2024, 1, 3), + pa_datetime(2024, 1, 2), + ] + ), + pa.array([11, 22, 33, 22, 10, 20]), ], - names=["customer_id", "event_timestamp", "feature1"], + names=["customer_id", "event_timestamp", "created", "feature1"], ) def point_in_time_join_brute( entity_table: pa.Table, - feature_tables: List[Tuple[pa.Table, str, Dict[str, str], List[str], timedelta]], + feature_tables: List[ + Tuple[pa.Table, str, str, Dict[str, str], List[str], timedelta] + ], event_timestamp_col="event_timestamp", ): ret_fields = [entity_table.schema.field(n) for n in entity_table.schema.names] @@ -63,6 +77,7 @@ def point_in_time_join_brute( for ( feature_table, timestamp_key, + created_timestamp_key, join_key_map, feature_refs, ttl, @@ -72,7 +87,9 @@ def point_in_time_join_brute( [ feature_table.schema.field(f) for f in feature_table.schema.names - if f not in join_key_map.values() and f != timestamp_key + if f not in join_key_map.values() + and f != timestamp_key + and f != created_timestamp_key ] ) @@ -82,9 +99,11 @@ def check_equality(ft_dict, batch_dict, x, y): ) ft_dict = feature_table.to_pydict() + found_matches = [ - (j, ft_dict[timestamp_key][j]) - for j in range(entity_table.num_rows) + (j, (ft_dict[timestamp_key][j], ft_dict[created_timestamp_key][j])) + # (j, ft_dict[timestamp_key][j]) + for j in range(feature_table.num_rows) if check_equality(ft_dict, batch_dict, j, i) and ft_dict[timestamp_key][j] <= row_timestmap and ft_dict[timestamp_key][j] >= row_timestmap - ttl @@ -93,6 +112,7 @@ def check_equality(ft_dict, batch_dict, x, y): index_found = ( max(found_matches, key=itemgetter(1))[0] if found_matches else None ) + for col in ft_dict.keys(): if col not in feature_refs: continue @@ -108,6 +128,18 @@ def check_equality(ft_dict, batch_dict, x, y): return pa.Table.from_pydict(ret, schema=pa.schema(ret_fields)) +def tables_equal_ignore_order(actual: pa.Table, expected: pa.Table): + sort_keys = [(name, "ascending") for name in actual.column_names] + sort_indices = pc.sort_indices(actual, sort_keys) + actual = pc.take(actual, sort_indices) + + sort_keys = [(name, "ascending") for name in expected.column_names] + sort_indices = pc.sort_indices(expected, sort_keys) + expected = pc.take(expected, sort_indices) + + return actual.equals(expected) + + def test_point_in_time_join(): expected = point_in_time_join_brute( customer_table(), @@ -115,6 +147,7 @@ def test_point_in_time_join(): ( features_table_1(), "event_timestamp", + "created", {"customer_id": "customer_id"}, ["feature1"], timedelta(days=10), @@ -128,6 +161,7 @@ def test_point_in_time_join(): ( ibis.memtable(features_table_1()), "event_timestamp", + "created", {"customer_id": "customer_id"}, ["feature1"], timedelta(days=10), @@ -135,4 +169,4 @@ def test_point_in_time_join(): ], ).to_pyarrow() - assert actual.equals(expected) + assert tables_equal_ignore_order(actual, expected) From 7617bdb7f4222edb69893c37621bd87b940b3227 Mon Sep 17 00:00:00 2001 From: Harry Date: Fri, 12 Apr 2024 05:57:49 +0700 Subject: [PATCH 113/122] feat: Kubernetes materialization engine written based on bytewax (#4087) * feat: Kubernetes materialization engine written based on bytewax Signed-off-by: Harry * fix: Resolve incorrect path Signed-off-by: Harry * fix: Simplify engine name Signed-off-by: Harry --------- Signed-off-by: Harry --- .../materialization/kubernetes/Dockerfile | 22 + .../materialization/kubernetes/__init__.py | 0 .../kubernetes/k8s_materialization_engine.py | 421 ++++++++++++++++++ .../kubernetes/k8s_materialization_job.py | 62 +++ .../kubernetes/k8s_materialization_task.py | 10 + .../infra/materialization/kubernetes/main.py | 85 ++++ sdk/python/feast/repo_config.py | 1 + .../materialization/kubernetes/README.md | 22 + .../kubernetes/eks-config.yaml | 13 + .../materialization/kubernetes/test_k8s.py | 65 +++ setup.py | 10 +- 11 files changed, 707 insertions(+), 4 deletions(-) create mode 100644 sdk/python/feast/infra/materialization/kubernetes/Dockerfile create mode 100644 sdk/python/feast/infra/materialization/kubernetes/__init__.py create mode 100644 sdk/python/feast/infra/materialization/kubernetes/k8s_materialization_engine.py create mode 100644 sdk/python/feast/infra/materialization/kubernetes/k8s_materialization_job.py create mode 100644 sdk/python/feast/infra/materialization/kubernetes/k8s_materialization_task.py create mode 100644 sdk/python/feast/infra/materialization/kubernetes/main.py create mode 100644 sdk/python/tests/integration/materialization/kubernetes/README.md create mode 100644 sdk/python/tests/integration/materialization/kubernetes/eks-config.yaml create mode 100644 sdk/python/tests/integration/materialization/kubernetes/test_k8s.py diff --git a/sdk/python/feast/infra/materialization/kubernetes/Dockerfile b/sdk/python/feast/infra/materialization/kubernetes/Dockerfile new file mode 100644 index 0000000000..956287a1d6 --- /dev/null +++ b/sdk/python/feast/infra/materialization/kubernetes/Dockerfile @@ -0,0 +1,22 @@ +FROM python:3.9-slim-bullseye AS build + +RUN apt-get update && \ + apt-get install --no-install-suggests --no-install-recommends --yes git + +WORKDIR /app + +COPY sdk/python/feast/infra/materialization/kuberentes/main.py /app + +# Copy necessary parts of the Feast codebase +COPY sdk/python sdk/python +COPY protos protos +COPY go go +COPY setup.py setup.py +COPY pyproject.toml pyproject.toml +COPY README.md README.md + +# We need this mount thingy because setuptools_scm needs access to the +# git dir to infer the version of feast we're installing. +# https://github.com/pypa/setuptools_scm#usage-from-docker +# I think it also assumes that this dockerfile is being built from the root of the directory. +RUN --mount=source=.git,target=.git,type=bind pip3 install --no-cache-dir '.[aws,gcp,k8s,snowflake,postgres]' diff --git a/sdk/python/feast/infra/materialization/kubernetes/__init__.py b/sdk/python/feast/infra/materialization/kubernetes/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/infra/materialization/kubernetes/k8s_materialization_engine.py b/sdk/python/feast/infra/materialization/kubernetes/k8s_materialization_engine.py new file mode 100644 index 0000000000..2e7129b037 --- /dev/null +++ b/sdk/python/feast/infra/materialization/kubernetes/k8s_materialization_engine.py @@ -0,0 +1,421 @@ +import logging +import uuid +from datetime import datetime +from time import sleep +from typing import Callable, List, Literal, Sequence, Union + +import yaml +from kubernetes import client, utils +from kubernetes import config as k8s_config +from kubernetes.client.exceptions import ApiException +from kubernetes.utils import FailToCreateError +from pydantic import StrictStr +from tqdm import tqdm + +from feast import FeatureView, RepoConfig +from feast.batch_feature_view import BatchFeatureView +from feast.entity import Entity +from feast.infra.materialization.batch_materialization_engine import ( + BatchMaterializationEngine, + MaterializationJob, + MaterializationJobStatus, + MaterializationTask, +) +from feast.infra.offline_stores.offline_store import OfflineStore +from feast.infra.online_stores.online_store import OnlineStore +from feast.infra.registry.base_registry import BaseRegistry +from feast.repo_config import FeastConfigBaseModel +from feast.stream_feature_view import StreamFeatureView +from feast.utils import _get_column_names + +from .k8s_materialization_job import KubernetesMaterializationJob + +logger = logging.getLogger(__name__) + + +class KubernetesMaterializationEngineConfig(FeastConfigBaseModel): + """Batch Materialization Engine config for Kubernetes""" + + type: Literal["k8s"] = "k8s" + """ Materialization type selector""" + + namespace: StrictStr = "default" + """ (optional) The namespace in Kubernetes to use when creating services, configuration maps and jobs. + """ + + image: StrictStr = "feast/feast-k8s-materialization:latest" + """ (optional) The container image to use when running the materialization job.""" + + env: List[dict] = [] + """ (optional) A list of environment variables to set in the created Kubernetes pods. + These environment variables can be used to reference Kubernetes secrets. + """ + + image_pull_secrets: List[dict] = [] + """ (optional) The secrets to use when pulling the image to run for the materialization job """ + + resources: dict = {} + """ (optional) The resource requests and limits for the materialization containers """ + + service_account_name: StrictStr = "" + """ (optional) The service account name to use when running the job """ + + annotations: dict = {} + """ (optional) Annotations to apply to the job container. Useful for linking the service account to IAM roles, operational metadata, etc """ + + include_security_context_capabilities: bool = True + """ (optional) Include security context capabilities in the init and job container spec """ + + labels: dict = {} + """ (optional) additional labels to append to kubernetes objects """ + + max_parallelism: int = 10 + """ (optional) Maximum number of pods allowed to run in parallel within a single job""" + + synchronous: bool = False + """ (optional) If true, wait for materialization for one feature to complete before moving to the next """ + + retry_limit: int = 2 + """ (optional) Maximum number of times to retry a materialization worker pod""" + + mini_batch_size: int = 1000 + """ (optional) Number of rows to process per write operation (default 1000)""" + + active_deadline_seconds: int = 86400 + """ (optional) Maximum amount of time a materialization job is allowed to run""" + + job_batch_size: int = 100 + """ (optional) Maximum number of pods to process per job. Only applies to synchronous materialization""" + + print_pod_logs_on_failure: bool = True + """(optional) Print pod logs on job failure. Only applies to synchronous materialization""" + + +class KubernetesMaterializationEngine(BatchMaterializationEngine): + def __init__( + self, + *, + repo_config: RepoConfig, + offline_store: OfflineStore, + online_store: OnlineStore, + **kwargs, + ): + super().__init__( + repo_config=repo_config, + offline_store=offline_store, + online_store=online_store, + **kwargs, + ) + self.repo_config = repo_config + self.offline_store = offline_store + self.online_store = online_store + + k8s_config.load_config() + + self.k8s_client = client.api_client.ApiClient() + self.v1 = client.CoreV1Api(self.k8s_client) + self.batch_v1 = client.BatchV1Api(self.k8s_client) + self.batch_engine_config = repo_config.batch_engine + self.namespace = self.batch_engine_config.namespace + + def update( + self, + project: str, + views_to_delete: Sequence[ + Union[BatchFeatureView, StreamFeatureView, FeatureView] + ], + views_to_keep: Sequence[ + Union[BatchFeatureView, StreamFeatureView, FeatureView] + ], + entities_to_delete: Sequence[Entity], + entities_to_keep: Sequence[Entity], + ): + """This method ensures that any necessary infrastructure or resources needed by the + engine are set up ahead of materialization.""" + pass + + def teardown_infra( + self, + project: str, + fvs: Sequence[Union[BatchFeatureView, StreamFeatureView, FeatureView]], + entities: Sequence[Entity], + ): + """This method ensures that any infrastructure or resources set up by ``update()``are torn down.""" + pass + + def materialize( + self, + registry: BaseRegistry, + tasks: List[MaterializationTask], + ) -> List[MaterializationJob]: + return [ + self._materialize_one( + registry, + task.feature_view, + task.start_time, + task.end_time, + task.project, + task.tqdm_builder, + ) + for task in tasks + ] + + def _materialize_one( + self, + registry: BaseRegistry, + feature_view: Union[BatchFeatureView, StreamFeatureView, FeatureView], + start_date: datetime, + end_date: datetime, + project: str, + tqdm_builder: Callable[[int], tqdm], + ): + entities = [] + for entity_name in feature_view.entities: + entities.append(registry.get_entity(entity_name, project)) + + ( + join_key_columns, + feature_name_columns, + timestamp_field, + created_timestamp_column, + ) = _get_column_names(feature_view, entities) + + offline_job = self.offline_store.pull_latest_from_table_or_query( + config=self.repo_config, + data_source=feature_view.batch_source, + join_key_columns=join_key_columns, + feature_name_columns=feature_name_columns, + timestamp_field=timestamp_field, + created_timestamp_column=created_timestamp_column, + start_date=start_date, + end_date=end_date, + ) + + paths = offline_job.to_remote_storage() + if self.batch_engine_config.synchronous: + offset = 0 + total_pods = len(paths) + batch_size = self.batch_engine_config.job_batch_size + if batch_size < 1: + raise ValueError("job_batch_size must be a value greater than 0") + if batch_size < self.batch_engine_config.max_parallelism: + logger.warning( + "job_batch_size is less than max_parallelism. Setting job_batch_size = max_parallelism" + ) + batch_size = self.batch_engine_config.max_parallelism + + while True: + next_offset = min(offset + batch_size, total_pods) + job = self._await_path_materialization( + paths[offset:next_offset], + feature_view, + offset, + next_offset, + total_pods, + ) + offset += batch_size + if ( + offset >= total_pods + or job.status() == MaterializationJobStatus.ERROR + ): + break + else: + job_id = str(uuid.uuid4()) + job = self._create_kubernetes_job(job_id, paths, feature_view) + + return job + + def _await_path_materialization( + self, paths, feature_view, batch_start, batch_end, total_pods + ): + job_id = str(uuid.uuid4()) + job = self._create_kubernetes_job(job_id, paths, feature_view) + + try: + while job.status() in ( + MaterializationJobStatus.WAITING, + MaterializationJobStatus.RUNNING, + ): + logger.info( + f"{feature_view.name} materialization for pods {batch_start}-{batch_end} " + f"(of {total_pods}) running..." + ) + sleep(30) + logger.info( + f"{feature_view.name} materialization for pods {batch_start}-{batch_end} " + f"(of {total_pods}) complete with status {job.status()}" + ) + except BaseException as e: + logger.info(f"Deleting job {job.job_id()}") + try: + self.batch_v1.delete_namespaced_job(job.job_id(), self.namespace) + except ApiException as ae: + logger.warning(f"Could not delete job due to API Error: {ae.body}") + raise e + finally: + logger.info(f"Deleting configmap {self._configmap_name(job_id)}") + try: + self.v1.delete_namespaced_config_map( + self._configmap_name(job_id), self.namespace + ) + except ApiException as ae: + logger.warning( + f"Could not delete configmap due to API Error: {ae.body}" + ) + + if ( + job.status() == MaterializationJobStatus.ERROR + and self.batch_engine_config.print_pod_logs_on_failure + ): + self._print_pod_logs(job.job_id(), feature_view, batch_start) + + return job + + def _print_pod_logs(self, job_id, feature_view, offset=0): + pods_list = self.v1.list_namespaced_pod( + namespace=self.namespace, + label_selector=f"job-name={job_id}", + ).items + for i, pod in enumerate(pods_list): + logger.info(f"Logging output for {feature_view.name} pod {offset+i}") + try: + logger.info( + self.v1.read_namespaced_pod_log(pod.metadata.name, self.namespace) + ) + except ApiException as e: + logger.warning(f"Could not retrieve pod logs due to: {e.body}") + + def _create_kubernetes_job(self, job_id, paths, feature_view): + try: + # Create a k8s configmap with information needed by pods + self._create_configuration_map(job_id, paths, feature_view, self.namespace) + + # Create the k8s job definition + self._create_job_definition( + job_id=job_id, + namespace=self.namespace, + pods=len(paths), # Create a pod for each parquet file + env=self.batch_engine_config.env, + ) + job = KubernetesMaterializationJob(job_id, self.namespace) + logger.info(f"Created job `{job.job_id()}` on namespace `{self.namespace}`") + return job + except FailToCreateError as failures: + return KubernetesMaterializationJob(job_id, self.namespace, error=failures) + + def _create_configuration_map(self, job_id, paths, feature_view, namespace): + """Create a Kubernetes configmap for this job""" + + feature_store_configuration = yaml.dump(self.repo_config.dict(by_alias=True)) + + materialization_config = yaml.dump( + {"paths": paths, "feature_view": feature_view.name} + ) + + labels = {"feast-materializer": "configmap"} + configmap_manifest = { + "kind": "ConfigMap", + "apiVersion": "v1", + "metadata": { + "name": self._configmap_name(job_id), + "labels": {**labels, **self.batch_engine_config.labels}, + }, + "data": { + "feature_store.yaml": feature_store_configuration, + "materialization_config.yaml": materialization_config, + }, + } + self.v1.create_namespaced_config_map( + namespace=namespace, + body=configmap_manifest, + ) + + def _configmap_name(self, job_id): + return f"feast-{job_id}" + + def _create_job_definition(self, job_id, namespace, pods, env, index_offset=0): + """Create a kubernetes job definition.""" + job_env = [ + { + "name": "MINI_BATCH_SIZE", + "value": str(self.batch_engine_config.mini_batch_size), + }, + ] + # Add any Feast configured environment variables + job_env.extend(env) + + securityContextCapabilities = None + if self.batch_engine_config.include_security_context_capabilities: + securityContextCapabilities = { + "add": ["NET_BIND_SERVICE"], + "drop": ["ALL"], + } + + job_labels = {"feast-materializer": "job"} + pod_labels = {"feast-materializer": "pod"} + job_definition = { + "apiVersion": "batch/v1", + "kind": "Job", + "metadata": { + "name": f"feast-materialization-{job_id}", + "namespace": namespace, + "labels": {**job_labels, **self.batch_engine_config.labels}, + }, + "spec": { + "ttlSecondsAfterFinished": 3600, + "backoffLimit": self.batch_engine_config.retry_limit, + "completions": pods, + "parallelism": min(pods, self.batch_engine_config.max_parallelism), + "activeDeadlineSeconds": self.batch_engine_config.active_deadline_seconds, + "completionMode": "Indexed", + "template": { + "metadata": { + "annotations": self.batch_engine_config.annotations, + "labels": {**pod_labels, **self.batch_engine_config.labels}, + }, + "spec": { + "restartPolicy": "Never", + "subdomain": f"feast-materialization-{job_id}", + "imagePullSecrets": self.batch_engine_config.image_pull_secrets, + "serviceAccountName": self.batch_engine_config.service_account_name, + "containers": [ + { + "command": ["python", "main.py"], + "env": job_env, + "image": self.batch_engine_config.image, + "imagePullPolicy": "Always", + "name": "feast", + "resources": self.batch_engine_config.resources, + "securityContext": { + "allowPrivilegeEscalation": False, + "capabilities": securityContextCapabilities, + "readOnlyRootFilesystem": False, + }, + "terminationMessagePath": "/dev/termination-log", + "terminationMessagePolicy": "File", + "volumeMounts": [ + { + "mountPath": "/var/feast/", + "name": self._configmap_name(job_id), + }, + ], + } + ], + "volumes": [ + { + "configMap": { + "defaultMode": 420, + "name": self._configmap_name(job_id), + }, + "name": "python-files", + }, + { + "configMap": {"name": self._configmap_name(job_id)}, + "name": self._configmap_name(job_id), + }, + ], + }, + }, + }, + } + utils.create_from_dict(self.k8s_client, job_definition) diff --git a/sdk/python/feast/infra/materialization/kubernetes/k8s_materialization_job.py b/sdk/python/feast/infra/materialization/kubernetes/k8s_materialization_job.py new file mode 100644 index 0000000000..612b20155d --- /dev/null +++ b/sdk/python/feast/infra/materialization/kubernetes/k8s_materialization_job.py @@ -0,0 +1,62 @@ +from typing import Optional + +from kubernetes import client + +from feast.infra.materialization.batch_materialization_engine import ( + MaterializationJob, + MaterializationJobStatus, +) + + +class KubernetesMaterializationJob(MaterializationJob): + def __init__( + self, + job_id: str, + namespace: str, + error: Optional[BaseException] = None, + ): + super().__init__() + self._job_id = job_id + self.namespace = namespace + self._error: Optional[BaseException] = error + self.batch_v1 = client.BatchV1Api() + + def error(self): + return self._error + + def status(self): + if self._error is not None: + return MaterializationJobStatus.ERROR + else: + job_status = self.batch_v1.read_namespaced_job_status( + self.job_id(), self.namespace + ).status + if job_status.active is not None: + if job_status.completion_time is None: + return MaterializationJobStatus.RUNNING + else: + if ( + job_status.completion_time is not None + and job_status.conditions[0].type == "Complete" + ): + return MaterializationJobStatus.SUCCEEDED + + if ( + job_status.conditions is not None + and job_status.conditions[0].type == "Failed" + ): + self._error = Exception( + f"Job {self.job_id()} failed with reason: " + f"{job_status.conditions[0].message}" + ) + return MaterializationJobStatus.ERROR + return MaterializationJobStatus.WAITING + + def should_be_retried(self): + return False + + def job_id(self): + return f"feast-materialization-{self._job_id}" + + def url(self): + return None diff --git a/sdk/python/feast/infra/materialization/kubernetes/k8s_materialization_task.py b/sdk/python/feast/infra/materialization/kubernetes/k8s_materialization_task.py new file mode 100644 index 0000000000..607dcb5b26 --- /dev/null +++ b/sdk/python/feast/infra/materialization/kubernetes/k8s_materialization_task.py @@ -0,0 +1,10 @@ +from feast.infra.materialization.batch_materialization_engine import MaterializationTask + + +class KubernetesMaterializationTask(MaterializationTask): + def __init__(self, project, feature_view, start_date, end_date, tqdm): + self.project = project + self.feature_view = feature_view + self.start_date = start_date + self.end_date = end_date + self.tqdm = tqdm diff --git a/sdk/python/feast/infra/materialization/kubernetes/main.py b/sdk/python/feast/infra/materialization/kubernetes/main.py new file mode 100644 index 0000000000..d80cad3edb --- /dev/null +++ b/sdk/python/feast/infra/materialization/kubernetes/main.py @@ -0,0 +1,85 @@ +import logging +import os +from typing import List + +import pyarrow as pa +import pyarrow.parquet as pq +import yaml + +from feast import FeatureStore, FeatureView, RepoConfig +from feast.utils import _convert_arrow_to_proto, _run_pyarrow_field_mapping + +logger = logging.getLogger(__name__) +DEFAULT_BATCH_SIZE = 1000 + + +class KubernetesMaterializer: + def __init__( + self, + config: RepoConfig, + feature_view: FeatureView, + paths: List[str], + worker_index: int, + ): + self.config = config + self.feature_store = FeatureStore(config=config) + + self.feature_view = feature_view + self.worker_index = worker_index + self.paths = paths + self.mini_batch_size = int(os.getenv("MINI_BATCH_SIZE", DEFAULT_BATCH_SIZE)) + + def process_path(self, path): + logger.info(f"Processing path {path}") + dataset = pq.ParquetDataset(path, use_legacy_dataset=False) + batches = [] + for fragment in dataset.fragments: + for batch in fragment.to_table().to_batches( + max_chunksize=self.mini_batch_size + ): + batches.append(batch) + return batches + + def run(self): + for mini_batch in self.process_path(self.paths[self.worker_index]): + table: pa.Table = pa.Table.from_batches([mini_batch]) + + if self.feature_view.batch_source.field_mapping is not None: + table = _run_pyarrow_field_mapping( + table, self.feature_view.batch_source.field_mapping + ) + join_key_to_value_type = { + entity.name: entity.dtype.to_value_type() + for entity in self.feature_view.entity_columns + } + rows_to_write = _convert_arrow_to_proto( + table, self.feature_view, join_key_to_value_type + ) + self.feature_store._get_provider().online_write_batch( + config=self.config, + table=self.feature_view, + data=rows_to_write, + progress=None, + ) + + +if __name__ == "__main__": + logging.basicConfig(level=logging.INFO) + + with open("/var/feast/feature_store.yaml") as f: + feast_config = yaml.load(f, Loader=yaml.Loader) + + with open("/var/feast/materialization_config.yaml") as b: + materialization_cfg = yaml.load(b, Loader=yaml.Loader) + + config = RepoConfig(**feast_config) + store = FeatureStore(config=config) + + KubernetesMaterializer( + config=config, + feature_view=store.get_feature_view( + materialization_cfg["feature_view"] + ), + paths=materialization_cfg["paths"], + worker_index=int(os.environ["JOB_COMPLETION_INDEX"]), + ).run() diff --git a/sdk/python/feast/repo_config.py b/sdk/python/feast/repo_config.py index fe3491c6fe..26ada13fc5 100644 --- a/sdk/python/feast/repo_config.py +++ b/sdk/python/feast/repo_config.py @@ -47,6 +47,7 @@ "snowflake.engine": "feast.infra.materialization.snowflake_engine.SnowflakeMaterializationEngine", "lambda": "feast.infra.materialization.aws_lambda.lambda_engine.LambdaMaterializationEngine", "bytewax": "feast.infra.materialization.contrib.bytewax.bytewax_materialization_engine.BytewaxMaterializationEngine", + "k8s": "feast.infra.materialization.kubernetes.kubernetes_materialization_engine.KubernetesMaterializationEngine", "spark.engine": "feast.infra.materialization.contrib.spark.spark_materialization_engine.SparkMaterializationEngine", } diff --git a/sdk/python/tests/integration/materialization/kubernetes/README.md b/sdk/python/tests/integration/materialization/kubernetes/README.md new file mode 100644 index 0000000000..715258c1cd --- /dev/null +++ b/sdk/python/tests/integration/materialization/kubernetes/README.md @@ -0,0 +1,22 @@ +# Running kubernetes engine integration tests + +To run the kubernetes engine integration tests, you'll need to provision a cluster using [eksctl.](https://docs.aws.amazon.com/eks/latest/userguide/eksctl.html). + +## Creating an EKS cluster + +In this directory is a configuration file for a single-node EKS cluster + +To create the EKS cluster needed for testing, issue the following command: + +``` shell +> eksctl create cluster -f ./eks-config.yaml +``` + +When the tests are complete, delete the created cluster with: + +``` shell +> eksctl delete cluster feast-cluster +``` + + + diff --git a/sdk/python/tests/integration/materialization/kubernetes/eks-config.yaml b/sdk/python/tests/integration/materialization/kubernetes/eks-config.yaml new file mode 100644 index 0000000000..b1ecb7ef69 --- /dev/null +++ b/sdk/python/tests/integration/materialization/kubernetes/eks-config.yaml @@ -0,0 +1,13 @@ +apiVersion: eksctl.io/v1alpha5 +kind: ClusterConfig + +metadata: + name: feast-cluster + version: "1.22" + region: us-west-2 + +managedNodeGroups: +- name: ng-1 + instanceType: c6a.large + desiredCapacity: 1 + privateNetworking: true diff --git a/sdk/python/tests/integration/materialization/kubernetes/test_k8s.py b/sdk/python/tests/integration/materialization/kubernetes/test_k8s.py new file mode 100644 index 0000000000..a944ae3e94 --- /dev/null +++ b/sdk/python/tests/integration/materialization/kubernetes/test_k8s.py @@ -0,0 +1,65 @@ +from datetime import timedelta + +import pytest + +from feast import Entity, Feature, FeatureView, ValueType +from tests.data.data_creator import create_basic_driver_dataset +from tests.integration.feature_repos.integration_test_repo_config import ( + IntegrationTestRepoConfig, + RegistryLocation, +) +from tests.integration.feature_repos.repo_configuration import ( + construct_test_environment, +) +from tests.integration.feature_repos.universal.data_sources.redshift import ( + RedshiftDataSourceCreator, +) +from tests.utils.e2e_test_validation import validate_offline_online_store_consistency + + +@pytest.mark.integration +@pytest.mark.skip(reason="Run this test manually after creating an EKS cluster.") +def test_kubernetes_materialization(): + config = IntegrationTestRepoConfig( + provider="aws", + online_store={"type": "dynamodb", "region": "us-west-2"}, + offline_store_creator=RedshiftDataSourceCreator, + batch_engine={"type": "k8s"}, + registry_location=RegistryLocation.S3, + ) + env = construct_test_environment(config, None) + + df = create_basic_driver_dataset() + ds = env.data_source_creator.create_data_source( + df, + env.feature_store.project, + field_mapping={"ts_1": "ts"}, + ) + + fs = env.feature_store + driver = Entity( + name="driver_id", + join_key="driver_id", + value_type=ValueType.INT64, + ) + + driver_stats_fv = FeatureView( + name="driver_hourly_stats", + entities=["driver_id"], + ttl=timedelta(weeks=52), + features=[Feature(name="value", dtype=ValueType.FLOAT)], + batch_source=ds, + ) + + try: + fs.apply([driver, driver_stats_fv]) + + # materialization is run in two steps and + # we use timestamp from generated dataframe as a split point + split_dt = df["ts_1"][4].to_pydatetime() - timedelta(seconds=1) + + print(f"Split datetime: {split_dt}") + + validate_offline_online_store_consistency(fs, driver_stats_fv, split_dt) + finally: + fs.teardown() diff --git a/setup.py b/setup.py index f94fb25bb5..9f1676524f 100644 --- a/setup.py +++ b/setup.py @@ -93,6 +93,8 @@ BYTEWAX_REQUIRED = ["bytewax==0.15.1", "docker>=5.0.2", "kubernetes<=20.13.0"] +KUBERNETES_REQUIRED = ["kubernetes<=20.13.0"] + SNOWFLAKE_REQUIRED = [ "snowflake-connector-python[pandas]>=3.7,<4", ] @@ -147,9 +149,7 @@ "grpcio-health-checking>=1.56.2,<2", ] -DUCKDB_REQUIRED = [ - "ibis-framework[duckdb]" -] +DUCKDB_REQUIRED = ["ibis-framework[duckdb]"] CI_REQUIRED = ( [ @@ -197,6 +197,7 @@ + REDIS_REQUIRED + AWS_REQUIRED + BYTEWAX_REQUIRED + + KUBERNETES_REQUIRED + SNOWFLAKE_REQUIRED + SPARK_REQUIRED + POSTGRES_REQUIRED @@ -359,6 +360,7 @@ def run(self): "gcp": GCP_REQUIRED, "aws": AWS_REQUIRED, "bytewax": BYTEWAX_REQUIRED, + "k8s": KUBERNETES_REQUIRED, "redis": REDIS_REQUIRED, "snowflake": SNOWFLAKE_REQUIRED, "spark": SPARK_REQUIRED, @@ -374,7 +376,7 @@ def run(self): "grpcio": GRPCIO_REQUIRED, "rockset": ROCKSET_REQUIRED, "ibis": IBIS_REQUIRED, - "duckdb": DUCKDB_REQUIRED + "duckdb": DUCKDB_REQUIRED, }, include_package_data=True, license="Apache", From 9efb243c548b075ca8288e04b09b84a9fa49dc7c Mon Sep 17 00:00:00 2001 From: Tommy Hughes IV Date: Thu, 11 Apr 2024 18:17:24 -0500 Subject: [PATCH 114/122] fix: Feature Server image startup in OpenShift clusters (#4096) create necessary feature server dir(s) during image build Signed-off-by: Tommy Hughes --- sdk/python/feast/infra/feature_servers/multicloud/Dockerfile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile b/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile index 5d2d425cda..1a55c6e851 100644 --- a/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile +++ b/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile @@ -18,4 +18,5 @@ RUN apt install -y -V ca-certificates lsb-release wget RUN wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb RUN apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb RUN apt update -RUN apt -y install libarrow-dev \ No newline at end of file +RUN apt -y install libarrow-dev +RUN mkdir -m 775 /.cache \ No newline at end of file From 3c6ce86813d7bfded81efe7c898a2b346bc40c7a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 12 Apr 2024 02:00:53 +0000 Subject: [PATCH 115/122] chore: Bump idna from 3.6 to 3.7 in /sdk/python/requirements (#4097) Bumps [idna](https://github.com/kjd/idna) from 3.6 to 3.7. - [Release notes](https://github.com/kjd/idna/releases) - [Changelog](https://github.com/kjd/idna/blob/master/HISTORY.rst) - [Commits](https://github.com/kjd/idna/compare/v3.6...v3.7) --- updated-dependencies: - dependency-name: idna dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- sdk/python/requirements/py3.10-ci-requirements.txt | 2 +- sdk/python/requirements/py3.10-requirements.txt | 2 +- sdk/python/requirements/py3.9-ci-requirements.txt | 2 +- sdk/python/requirements/py3.9-requirements.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index af7a87c11b..235c4dfb72 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -321,7 +321,7 @@ ibis-substrait==3.2.0 # via feast (setup.py) identify==2.5.35 # via pre-commit -idna==3.6 +idna==3.7 # via # anyio # httpx diff --git a/sdk/python/requirements/py3.10-requirements.txt b/sdk/python/requirements/py3.10-requirements.txt index 6603171d45..aa55cffd5f 100644 --- a/sdk/python/requirements/py3.10-requirements.txt +++ b/sdk/python/requirements/py3.10-requirements.txt @@ -54,7 +54,7 @@ h11==0.14.0 # via uvicorn httptools==0.6.1 # via uvicorn -idna==3.6 +idna==3.7 # via # anyio # requests diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index 53c7fd1bce..717e82361a 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -321,7 +321,7 @@ ibis-substrait==3.2.0 # via feast (setup.py) identify==2.5.35 # via pre-commit -idna==3.6 +idna==3.7 # via # anyio # httpx diff --git a/sdk/python/requirements/py3.9-requirements.txt b/sdk/python/requirements/py3.9-requirements.txt index 3b8f555ca7..5d1b681405 100644 --- a/sdk/python/requirements/py3.9-requirements.txt +++ b/sdk/python/requirements/py3.9-requirements.txt @@ -54,7 +54,7 @@ h11==0.14.0 # via uvicorn httptools==0.6.1 # via uvicorn -idna==3.6 +idna==3.7 # via # anyio # requests From ec19036fcc4c77084a2dd5aae5576f8f43393eba Mon Sep 17 00:00:00 2001 From: Hao Xu Date: Sun, 14 Apr 2024 22:58:18 -0700 Subject: [PATCH 116/122] feat: Enable Vector database and retrieve_online_documents API (#4061) * feat: add document store * feat: add document store * feat: add document store * feat: add document store * remove DocumentStore * format * format * format * format * format * format * remove unused vars * add test * add test * format * format * format * format * format * fix not implemented issue * fix not implemented issue * fix test * format * format * format * format * format * format * update testcontainer * format * fix postgres integration test * format * fix postgres test * fix postgres test * fix postgres test * fix postgres test * fix postgres test * format * format * format --- Makefile | 2 +- sdk/python/feast/feature_store.py | 103 ++++++++++++++++++ sdk/python/feast/infra/key_encoding_utils.py | 8 ++ .../tests/data_source.py | 4 + .../infra/online_stores/contrib/postgres.py | 97 ++++++++++++++++- .../contrib/postgres_repo_configuration.py | 16 ++- .../feast/infra/online_stores/online_store.py | 27 +++++ .../feast/infra/passthrough_provider.py | 17 +++ sdk/python/feast/infra/provider.py | 24 ++++ sdk/python/tests/conftest.py | 16 ++- sdk/python/tests/data/data_creator.py | 19 ++++ sdk/python/tests/foo_provider.py | 10 ++ .../universal/online_store/postgres.py | 68 ++++++++++++ .../online_store/test_universal_online.py | 18 ++- setup.py | 2 +- 15 files changed, 419 insertions(+), 12 deletions(-) create mode 100644 sdk/python/tests/integration/feature_repos/universal/online_store/postgres.py diff --git a/Makefile b/Makefile index 813a27f4e3..6fcf95dc7d 100644 --- a/Makefile +++ b/Makefile @@ -200,7 +200,7 @@ test-python-universal-postgres-offline: test-python-universal-postgres-online: PYTHONPATH='.' \ FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.online_stores.contrib.postgres_repo_configuration \ - PYTEST_PLUGINS=sdk.python.feast.infra.offline_stores.contrib.postgres_offline_store.tests \ + PYTEST_PLUGINS=sdk.python.tests.integration.feature_repos.universal.online_store.postgres \ python -m pytest -n 8 --integration \ -k "not test_universal_cli and \ not test_go_feature_server and \ diff --git a/sdk/python/feast/feature_store.py b/sdk/python/feast/feature_store.py index 62ce9d6e38..15598e1d60 100644 --- a/sdk/python/feast/feature_store.py +++ b/sdk/python/feast/feature_store.py @@ -1690,6 +1690,72 @@ def _get_online_features( ) return OnlineResponse(online_features_response) + @log_exceptions_and_usage + def retrieve_online_documents( + self, + feature: str, + query: Union[str, List[float]], + top_k: int, + ) -> OnlineResponse: + """ + Retrieves the top k closest document features. Note, embeddings are a subset of features. + + Args: + feature: The list of document features that should be retrieved from the online document store. These features can be + specified either as a list of string document feature references or as a feature service. String feature + references must have format "feature_view:feature", e.g, "document_fv:document_embeddings". + query: The query to retrieve the closest document features for. + top_k: The number of closest document features to retrieve. + """ + return self._retrieve_online_documents( + feature=feature, + query=query, + top_k=top_k, + ) + + def _retrieve_online_documents( + self, + feature: str, + query: Union[str, List[float]], + top_k: int, + ): + if isinstance(query, str): + raise ValueError( + "Using embedding functionality is not supported for document retrieval. Please embed the query before calling retrieve_online_documents." + ) + ( + requested_feature_views, + _, + ) = self._get_feature_views_to_use( + features=[feature], allow_cache=True, hide_dummy_entity=False + ) + requested_feature = ( + feature.split(":")[1] if isinstance(feature, str) else feature + ) + provider = self._get_provider() + document_features = self._retrieve_from_online_store( + provider, + requested_feature_views[0], + requested_feature, + query, + top_k, + ) + document_feature_vals = [feature[2] for feature in document_features] + document_feature_distance_vals = [feature[3] for feature in document_features] + online_features_response = GetOnlineFeaturesResponse(results=[]) + + # TODO Refactor to better way of populating result + # TODO populate entity in the response after returning entity in document_features is supported + self._populate_result_rows_from_columnar( + online_features_response=online_features_response, + data={requested_feature: document_feature_vals}, + ) + self._populate_result_rows_from_columnar( + online_features_response=online_features_response, + data={"distance": document_feature_distance_vals}, + ) + return OnlineResponse(online_features_response) + @staticmethod def _get_columnar_entity_values( rowise: Optional[List[Dict[str, Any]]], columnar: Optional[Dict[str, List[Any]]] @@ -1906,6 +1972,43 @@ def _read_from_online_store( read_row_protos.append((event_timestamps, statuses, values)) return read_row_protos + def _retrieve_from_online_store( + self, + provider: Provider, + table: FeatureView, + requested_feature: str, + query: List[float], + top_k: int, + ) -> List[Tuple[Timestamp, "FieldStatus.ValueType", Value, Value]]: + """ + Search and return document features from the online document store. + """ + documents = provider.retrieve_online_documents( + config=self.config, + table=table, + requested_feature=requested_feature, + query=query, + top_k=top_k, + ) + + read_row_protos = [] + row_ts_proto = Timestamp() + + for row_ts, feature_val, distance_val in documents: + # Reset timestamp to default or update if row_ts is not None + if row_ts is not None: + row_ts_proto.FromDatetime(row_ts) + + if feature_val is None or distance_val is None: + feature_val = Value() + distance_val = Value() + status = FieldStatus.NOT_FOUND + else: + status = FieldStatus.PRESENT + + read_row_protos.append((row_ts_proto, status, feature_val, distance_val)) + return read_row_protos + @staticmethod def _populate_response_from_feature_data( feature_data: Iterable[ diff --git a/sdk/python/feast/infra/key_encoding_utils.py b/sdk/python/feast/infra/key_encoding_utils.py index 62b6b72724..e50e438c3d 100644 --- a/sdk/python/feast/infra/key_encoding_utils.py +++ b/sdk/python/feast/infra/key_encoding_utils.py @@ -72,3 +72,11 @@ def serialize_entity_key( output.append(val_bytes) return b"".join(output) + + +def get_val_str(val): + accept_value_types = ["float_list_val", "double_list_val", "int_list_val"] + for accept_type in accept_value_types: + if val.HasField(accept_type): + return str(getattr(val, accept_type).val) + return None diff --git a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/data_source.py b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/data_source.py index f50cdc4c41..a23d90e186 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/data_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/data_source.py @@ -7,6 +7,7 @@ from testcontainers.core.waiting_utils import wait_for_logs from feast.data_source import DataSource +from feast.feature_logging import LoggingDestination from feast.infra.offline_stores.contrib.postgres_offline_store.postgres import ( PostgreSQLOfflineStoreConfig, PostgreSQLSource, @@ -57,6 +58,9 @@ def postgres_container(): class PostgreSQLDataSourceCreator(DataSourceCreator, OnlineStoreCreator): + def create_logged_features_destination(self) -> LoggingDestination: + return None # type: ignore + def __init__( self, project_name: str, fixture_request: pytest.FixtureRequest, **kwargs ): diff --git a/sdk/python/feast/infra/online_stores/contrib/postgres.py b/sdk/python/feast/infra/online_stores/contrib/postgres.py index 308528aaec..2dcb618783 100644 --- a/sdk/python/feast/infra/online_stores/contrib/postgres.py +++ b/sdk/python/feast/infra/online_stores/contrib/postgres.py @@ -2,7 +2,7 @@ import logging from collections import defaultdict from datetime import datetime -from typing import Any, Callable, Dict, List, Literal, Optional, Sequence, Tuple +from typing import Any, Callable, Dict, List, Literal, Optional, Sequence, Tuple, Union import psycopg2 import pytz @@ -12,7 +12,7 @@ from feast import Entity from feast.feature_view import FeatureView -from feast.infra.key_encoding_utils import serialize_entity_key +from feast.infra.key_encoding_utils import get_val_str, serialize_entity_key from feast.infra.online_stores.online_store import OnlineStore from feast.infra.utils.postgres.connection_utils import _get_conn, _get_connection_pool from feast.infra.utils.postgres.postgres_config import ConnectionType, PostgreSQLConfig @@ -25,6 +25,12 @@ class PostgreSQLOnlineStoreConfig(PostgreSQLConfig): type: Literal["postgres"] = "postgres" + # Whether to enable the pgvector extension for vector similarity search + pgvector_enabled: Optional[bool] = False + + # If pgvector is enabled, the length of the vector field + vector_len: Optional[int] = 512 + class PostgreSQLOnlineStore(OnlineStore): _conn: Optional[psycopg2._psycopg.connection] = None @@ -68,11 +74,19 @@ def online_write_batch( created_ts = _to_naive_utc(created_ts) for feature_name, val in values.items(): + val_str: Union[str, bytes] + if ( + "pgvector_enabled" in config.online_config + and config.online_config["pgvector_enabled"] + ): + val_str = get_val_str(val) + else: + val_str = val.SerializeToString() insert_values.append( ( entity_key_bin, feature_name, - val.SerializeToString(), + val_str, timestamp, created_ts, ) @@ -212,6 +226,12 @@ def update( for table in tables_to_keep: table_name = _table_id(project, table) + value_type = "BYTEA" + if ( + "pgvector_enabled" in config.online_config + and config.online_config["pgvector_enabled"] + ): + value_type = f'vector({config.online_config["vector_len"]})' cur.execute( sql.SQL( """ @@ -219,7 +239,7 @@ def update( ( entity_key BYTEA, feature_name TEXT, - value BYTEA, + value {}, event_ts TIMESTAMPTZ, created_ts TIMESTAMPTZ, PRIMARY KEY(entity_key, feature_name) @@ -228,6 +248,7 @@ def update( """ ).format( sql.Identifier(table_name), + sql.SQL(value_type), sql.Identifier(f"{table_name}_ek"), sql.Identifier(table_name), ) @@ -251,6 +272,74 @@ def teardown( logging.exception("Teardown failed") raise + def retrieve_online_documents( + self, + config: RepoConfig, + table: FeatureView, + requested_feature: str, + embedding: List[float], + top_k: int, + ) -> List[Tuple[Optional[datetime], Optional[ValueProto], Optional[ValueProto]]]: + """ + + Args: + config: Feast configuration object + table: FeatureView object as the table to search + requested_feature: The requested feature as the column to search + embedding: The query embedding to search for + top_k: The number of items to return + Returns: + List of tuples containing the event timestamp and the document feature + + """ + project = config.project + + # Convert the embedding to a string to be used in postgres vector search + query_embedding_str = f"[{','.join(str(el) for el in embedding)}]" + + result: List[ + Tuple[Optional[datetime], Optional[ValueProto], Optional[ValueProto]] + ] = [] + with self._get_conn(config) as conn, conn.cursor() as cur: + table_name = _table_id(project, table) + + # Search query template to find the top k items that are closest to the given embedding + # SELECT * FROM items ORDER BY embedding <-> '[3,1,2]' LIMIT 5; + cur.execute( + sql.SQL( + """ + SELECT + entity_key, + feature_name, + value, + value <-> %s as distance, + event_ts FROM {table_name} + WHERE feature_name = {feature_name} + ORDER BY distance + LIMIT {top_k}; + """ + ).format( + table_name=sql.Identifier(table_name), + feature_name=sql.Literal(requested_feature), + top_k=sql.Literal(top_k), + ), + (query_embedding_str,), + ) + rows = cur.fetchall() + + for entity_key, feature_name, value, distance, event_ts in rows: + # TODO Deserialize entity_key to return the entity in response + # entity_key_proto = EntityKeyProto() + # entity_key_proto_bin = bytes(entity_key) + + # TODO Convert to List[float] for value type proto + feature_value_proto = ValueProto(string_val=value) + + distance_value_proto = ValueProto(float_val=distance) + result.append((event_ts, feature_value_proto, distance_value_proto)) + + return result + def _table_id(project: str, table: FeatureView) -> str: return f"{project}_{table.name}" diff --git a/sdk/python/feast/infra/online_stores/contrib/postgres_repo_configuration.py b/sdk/python/feast/infra/online_stores/contrib/postgres_repo_configuration.py index 2a9f0d54cd..6e4ca3f950 100644 --- a/sdk/python/feast/infra/online_stores/contrib/postgres_repo_configuration.py +++ b/sdk/python/feast/infra/online_stores/contrib/postgres_repo_configuration.py @@ -1,10 +1,18 @@ -from feast.infra.offline_stores.contrib.postgres_offline_store.tests.data_source import ( - PostgreSQLDataSourceCreator, -) from tests.integration.feature_repos.integration_test_repo_config import ( IntegrationTestRepoConfig, ) +from tests.integration.feature_repos.universal.online_store.postgres import ( + PGVectorOnlineStoreCreator, + PostgresOnlineStoreCreator, +) FULL_REPO_CONFIGS = [ - IntegrationTestRepoConfig(online_store_creator=PostgreSQLDataSourceCreator), + IntegrationTestRepoConfig( + online_store="postgres", online_store_creator=PostgresOnlineStoreCreator + ), + IntegrationTestRepoConfig( + online_store="pgvector", online_store_creator=PGVectorOnlineStoreCreator + ), ] + +AVAILABLE_ONLINE_STORES = {"pgvector": PGVectorOnlineStoreCreator} diff --git a/sdk/python/feast/infra/online_stores/online_store.py b/sdk/python/feast/infra/online_stores/online_store.py index fcc3376dce..fc1b3d4ad3 100644 --- a/sdk/python/feast/infra/online_stores/online_store.py +++ b/sdk/python/feast/infra/online_stores/online_store.py @@ -134,3 +134,30 @@ def teardown( entities: Entities whose corresponding infrastructure should be deleted. """ pass + + def retrieve_online_documents( + self, + config: RepoConfig, + table: FeatureView, + requested_feature: str, + embedding: List[float], + top_k: int, + ) -> List[Tuple[Optional[datetime], Optional[ValueProto], Optional[ValueProto]]]: + """ + Retrieves online feature values for the specified embeddings. + + Args: + config: The config for the current feature store. + table: The feature view whose feature values should be read. + requested_feature: The name of the feature whose embeddings should be used for retrieval. + embedding: The embeddings to use for retrieval. + top_k: The number of nearest neighbors to retrieve. + + Returns: + object: A list of top k closest documents to the specified embedding. Each item in the list is a tuple + where the first item is the event timestamp for the row, and the second item is a dict of feature + name to embeddings. + """ + raise NotImplementedError( + f"Online store {self.__class__.__name__} does not support online retrieval" + ) diff --git a/sdk/python/feast/infra/passthrough_provider.py b/sdk/python/feast/infra/passthrough_provider.py index aca18f4856..ec4df66d43 100644 --- a/sdk/python/feast/infra/passthrough_provider.py +++ b/sdk/python/feast/infra/passthrough_provider.py @@ -190,6 +190,23 @@ def online_read( ) return result + @log_exceptions_and_usage(sampler=RatioSampler(ratio=0.001)) + def retrieve_online_documents( + self, + config: RepoConfig, + table: FeatureView, + requested_feature: str, + query: List[float], + top_k: int, + ) -> List: + set_usage_attribute("provider", self.__class__.__name__) + result = [] + if self.online_store: + result = self.online_store.retrieve_online_documents( + config, table, requested_feature, query, top_k + ) + return result + def ingest_df( self, feature_view: FeatureView, diff --git a/sdk/python/feast/infra/provider.py b/sdk/python/feast/infra/provider.py index 2a9670cace..e71e87488d 100644 --- a/sdk/python/feast/infra/provider.py +++ b/sdk/python/feast/infra/provider.py @@ -295,6 +295,30 @@ def get_feature_server_endpoint(self) -> Optional[str]: """Returns endpoint for the feature server, if it exists.""" return None + @abstractmethod + def retrieve_online_documents( + self, + config: RepoConfig, + table: FeatureView, + requested_feature: str, + query: List[float], + top_k: int, + ) -> List[Tuple[Optional[datetime], Optional[ValueProto], Optional[ValueProto]]]: + """ + Searches for the top-k nearest neighbors of the given document in the online document store. + + Args: + config: The config for the current feature store. + table: The feature view whose embeddings should be searched. + requested_feature: the requested document feature name. + query: The query embedding to search for. + top_k: The number of nearest neighbors to return. + + Returns: + A list of dictionaries, where each dictionary contains the document feature. + """ + pass + def get_provider(config: RepoConfig) -> Provider: if "." not in config.provider: diff --git a/sdk/python/tests/conftest.py b/sdk/python/tests/conftest.py index 1c9a958ce3..6abe30822f 100644 --- a/sdk/python/tests/conftest.py +++ b/sdk/python/tests/conftest.py @@ -23,9 +23,13 @@ import pytest from _pytest.nodes import Item +from feast.data_source import DataSource from feast.feature_store import FeatureStore # noqa: E402 from feast.wait import wait_retry_backoff # noqa: E402 -from tests.data.data_creator import create_basic_driver_dataset # noqa: E402 +from tests.data.data_creator import ( # noqa: E402 + create_basic_driver_dataset, + create_document_dataset, +) from tests.integration.feature_repos.integration_test_repo_config import ( # noqa: E402 IntegrationTestRepoConfig, ) @@ -405,3 +409,13 @@ def fake_ingest_data(): "created": [pd.Timestamp(datetime.utcnow()).round("ms")], } return pd.DataFrame(data) + + +@pytest.fixture +def fake_document_data(environment: Environment) -> Tuple[pd.DataFrame, DataSource]: + df = create_document_dataset() + data_source = environment.data_source_creator.create_data_source( + df, + environment.feature_store.project, + ) + return df, data_source diff --git a/sdk/python/tests/data/data_creator.py b/sdk/python/tests/data/data_creator.py index 1fc66aee84..1be96f753a 100644 --- a/sdk/python/tests/data/data_creator.py +++ b/sdk/python/tests/data/data_creator.py @@ -78,3 +78,22 @@ def get_feature_values_for_dtype( return [[n, n] if n is not None else None for n in non_list_val] else: return non_list_val + + +def create_document_dataset() -> pd.DataFrame: + data = { + "item_id": [1, 2, 3], + "embedding_float": [[4.0, 5.0], [1.0, 2.0], [3.0, 4.0]], + "embedding_double": [[4.0, 5.0], [1.0, 2.0], [3.0, 4.0]], + "ts": [ + pd.Timestamp(datetime.utcnow()).round("ms"), + pd.Timestamp(datetime.utcnow()).round("ms"), + pd.Timestamp(datetime.utcnow()).round("ms"), + ], + "created_ts": [ + pd.Timestamp(datetime.utcnow()).round("ms"), + pd.Timestamp(datetime.utcnow()).round("ms"), + pd.Timestamp(datetime.utcnow()).round("ms"), + ], + } + return pd.DataFrame(data) diff --git a/sdk/python/tests/foo_provider.py b/sdk/python/tests/foo_provider.py index ba256a3813..7ba4adb114 100644 --- a/sdk/python/tests/foo_provider.py +++ b/sdk/python/tests/foo_provider.py @@ -103,3 +103,13 @@ def retrieve_feature_service_logs( registry: BaseRegistry, ) -> RetrievalJob: return RetrievalJob() + + def retrieve_online_documents( + self, + config: RepoConfig, + table: FeatureView, + requested_feature: str, + query: List[float], + top_k: int, + ) -> List[Tuple[Optional[datetime], Optional[ValueProto], Optional[ValueProto]]]: + return [] diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store/postgres.py b/sdk/python/tests/integration/feature_repos/universal/online_store/postgres.py new file mode 100644 index 0000000000..58e7af9c46 --- /dev/null +++ b/sdk/python/tests/integration/feature_repos/universal/online_store/postgres.py @@ -0,0 +1,68 @@ +from typing import Dict + +from testcontainers.core.container import DockerContainer +from testcontainers.core.waiting_utils import wait_for_logs +from testcontainers.postgres import PostgresContainer + +from tests.integration.feature_repos.universal.online_store_creator import ( + OnlineStoreCreator, +) + + +class PostgresOnlineStoreCreator(OnlineStoreCreator): + def __init__(self, project_name: str, **kwargs): + super().__init__(project_name) + self.container = PostgresContainer( + "postgres:16", + username="root", + password="test", + dbname="test", + ).with_exposed_ports(5432) + + def create_online_store(self) -> Dict[str, str]: + self.container.start() + return { + "host": "localhost", + "type": "postgres", + "user": "root", + "password": "test", + "database": "test", + "port": self.container.get_exposed_port(5432), + } + + def teardown(self): + self.container.stop() + + +class PGVectorOnlineStoreCreator(OnlineStoreCreator): + def __init__(self, project_name: str, **kwargs): + super().__init__(project_name) + self.container = ( + DockerContainer("pgvector/pgvector:pg16") + .with_env("POSTGRES_USER", "root") + .with_env("POSTGRES_PASSWORD", "test") + .with_env("POSTGRES_DB", "test") + .with_exposed_ports(5432) + ) + + def create_online_store(self) -> Dict[str, str]: + self.container.start() + log_string_to_wait_for = "database system is ready to accept connections" + wait_for_logs( + container=self.container, predicate=log_string_to_wait_for, timeout=10 + ) + command = "psql -h localhost -p 5432 -U root -d test -c 'CREATE EXTENSION IF NOT EXISTS vector;'" + self.container.exec(command) + return { + "host": "localhost", + "type": "postgres", + "user": "root", + "password": "test", + "database": "test", + "pgvector_enabled": True, + "vector_len": 2, + "port": self.container.get_exposed_port(5432), + } + + def teardown(self): + self.container.stop() diff --git a/sdk/python/tests/integration/online_store/test_universal_online.py b/sdk/python/tests/integration/online_store/test_universal_online.py index 8218971315..3ae7be9e1e 100644 --- a/sdk/python/tests/integration/online_store/test_universal_online.py +++ b/sdk/python/tests/integration/online_store/test_universal_online.py @@ -25,9 +25,10 @@ Environment, construct_universal_feature_views, ) -from tests.integration.feature_repos.universal.entities import driver +from tests.integration.feature_repos.universal.entities import driver, item from tests.integration.feature_repos.universal.feature_views import ( create_driver_hourly_stats_feature_view, + create_item_embeddings_feature_view, driver_feature_view, ) from tests.utils.data_source_test_creator import prep_file_source @@ -785,3 +786,18 @@ def assert_feature_service_entity_mapping_correctness( entity_rows=entity_rows, full_feature_names=full_feature_names, ) + + +@pytest.mark.integration +@pytest.mark.universal_online_stores(only=["pgvector"]) +def test_retrieve_online_documents(environment, fake_document_data): + fs = environment.feature_store + df, data_source = fake_document_data + item_embeddings_feature_view = create_item_embeddings_feature_view(data_source) + fs.apply([item_embeddings_feature_view, item()]) + fs.write_to_online_store("item_embeddings", df) + + documents = fs.retrieve_online_documents( + feature="item_embeddings:embedding_float", query=[1.0, 2.0], top_k=2 + ).to_dict() + assert len(documents["embedding_float"]) == 2 diff --git a/setup.py b/setup.py index 9f1676524f..c1d872a99f 100644 --- a/setup.py +++ b/setup.py @@ -177,7 +177,7 @@ "pytest-mock==1.10.4", "pytest-env", "Sphinx>4.0.0,<7", - "testcontainers>=3.5,<4", + "testcontainers==4.3.3", "firebase-admin>=5.2.0,<6", "pre-commit<3.3.2", "assertpy==1.1", From a05cdbcd38d80ce1abfff7d93bef9df589dbd61c Mon Sep 17 00:00:00 2001 From: lokeshrangineni Date: Mon, 15 Apr 2024 03:35:42 -0400 Subject: [PATCH 117/122] fix: Trying to import pyspark lazily to avoid the dependency on the library (#4091) --- .../contrib/spark_offline_store/spark_source.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark_source.py b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark_source.py index 0809043a01..4eb020ebd3 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark_source.py @@ -4,8 +4,6 @@ from enum import Enum from typing import Any, Callable, Dict, Iterable, Optional, Tuple -from pyspark.sql import SparkSession - from feast import flags_helper from feast.data_source import DataSource from feast.errors import DataSourceNoNameException, DataSourceNotFoundException @@ -162,6 +160,13 @@ def get_table_column_names_and_types( def get_table_query_string(self) -> str: """Returns a string that can directly be used to reference this table in SQL""" + try: + from pyspark.sql import SparkSession + except ImportError as e: + from feast.errors import FeastExtrasDependencyImportError + + raise FeastExtrasDependencyImportError("spark", str(e)) + if self.table: # Backticks make sure that spark sql knows this a table reference. table = ".".join([f"`{x}`" for x in self.table.split(".")]) From 9ed0a09746aca0eb73c6e214f082e0e3887ff836 Mon Sep 17 00:00:00 2001 From: Tornike Gurgenidze Date: Mon, 15 Apr 2024 11:38:25 +0400 Subject: [PATCH 118/122] feat: Make arrow primary interchange for offline ODFV execution (#4083) --- .../infra/offline_stores/offline_store.py | 64 ++++++------------- sdk/python/feast/on_demand_feature_view.py | 55 ++++++++++++++++ .../transformation/pandas_transformation.py | 14 ++++ .../transformation/python_transformation.py | 6 ++ .../substrait_transformation.py | 9 +++ .../offline_stores/test_offline_store.py | 2 +- 6 files changed, 104 insertions(+), 46 deletions(-) diff --git a/sdk/python/feast/infra/offline_stores/offline_store.py b/sdk/python/feast/infra/offline_stores/offline_store.py index 6c16ef2643..4851aecae2 100644 --- a/sdk/python/feast/infra/offline_stores/offline_store.py +++ b/sdk/python/feast/infra/offline_stores/offline_store.py @@ -76,36 +76,11 @@ def to_df( validation_reference (optional): The validation to apply against the retrieved dataframe. timeout (optional): The query timeout if applicable. """ - features_df = self._to_df_internal(timeout=timeout) - - if self.on_demand_feature_views: - # TODO(adchia): Fix requirement to specify dependent feature views in feature_refs - for odfv in self.on_demand_feature_views: - if odfv.mode not in {"pandas", "substrait"}: - raise Exception( - f'OnDemandFeatureView mode "{odfv.mode}" not supported for offline processing.' - ) - features_df = features_df.join( - odfv.get_transformed_features_df( - features_df, - self.full_feature_names, - ) - ) - - if validation_reference: - if not flags_helper.is_test(): - warnings.warn( - "Dataset validation is an experimental feature. " - "This API is unstable and it could and most probably will be changed in the future. " - "We do not guarantee that future changes will maintain backward compatibility.", - RuntimeWarning, - ) - - validation_result = validation_reference.profile.validate(features_df) - if not validation_result.is_success: - raise ValidationFailed(validation_result) - - return features_df + return ( + self.to_arrow(validation_reference=validation_reference, timeout=timeout) + .to_pandas() + .reset_index(drop=True) + ) def to_arrow( self, @@ -122,23 +97,20 @@ def to_arrow( validation_reference (optional): The validation to apply against the retrieved dataframe. timeout (optional): The query timeout if applicable. """ - if not self.on_demand_feature_views and not validation_reference: - return self._to_arrow_internal(timeout=timeout) - - features_df = self._to_df_internal(timeout=timeout) + features_table = self._to_arrow_internal(timeout=timeout) if self.on_demand_feature_views: for odfv in self.on_demand_feature_views: - if odfv.mode not in {"pandas", "substrait"}: - raise Exception( - f'OnDemandFeatureView mode "{odfv.mode}" not supported for offline processing.' - ) - features_df = features_df.join( - odfv.get_transformed_features_df( - features_df, - self.full_feature_names, - ) + transformed_arrow = odfv.transform_arrow( + features_table, self.full_feature_names ) + for col in transformed_arrow.column_names: + if col.startswith("__index"): + continue + features_table = features_table.append_column( + col, transformed_arrow[col] + ) + if validation_reference: if not flags_helper.is_test(): warnings.warn( @@ -148,11 +120,13 @@ def to_arrow( RuntimeWarning, ) - validation_result = validation_reference.profile.validate(features_df) + validation_result = validation_reference.profile.validate( + features_table.to_pandas() + ) if not validation_result.is_success: raise ValidationFailed(validation_result) - return pyarrow.Table.from_pandas(features_df) + return features_table def to_sql(self) -> str: """ diff --git a/sdk/python/feast/on_demand_feature_view.py b/sdk/python/feast/on_demand_feature_view.py index 9d1c360cb3..cfb322fb2d 100644 --- a/sdk/python/feast/on_demand_feature_view.py +++ b/sdk/python/feast/on_demand_feature_view.py @@ -8,6 +8,7 @@ import dill import pandas as pd +import pyarrow from typeguard import typechecked from feast.base_feature_view import BaseFeatureView @@ -391,6 +392,60 @@ def get_request_data_schema(self) -> Dict[str, ValueType]: def _get_projected_feature_name(self, feature: str) -> str: return f"{self.projection.name_to_use()}__{feature}" + def transform_arrow( + self, + pa_table: pyarrow.Table, + full_feature_names: bool = False, + ) -> pyarrow.Table: + if not isinstance(pa_table, pyarrow.Table): + raise TypeError("transform_arrow only accepts pyarrow.Table") + columns_to_cleanup = [] + for source_fv_projection in self.source_feature_view_projections.values(): + for feature in source_fv_projection.features: + full_feature_ref = f"{source_fv_projection.name}__{feature.name}" + if full_feature_ref in pa_table.column_names: + # Make sure the partial feature name is always present + pa_table = pa_table.append_column( + feature.name, pa_table[full_feature_ref] + ) + # pa_table[feature.name] = pa_table[full_feature_ref] + columns_to_cleanup.append(feature.name) + elif feature.name in pa_table.column_names: + # Make sure the full feature name is always present + # pa_table[full_feature_ref] = pa_table[feature.name] + pa_table = pa_table.append_column( + full_feature_ref, pa_table[feature.name] + ) + columns_to_cleanup.append(full_feature_ref) + + df_with_transformed_features: pyarrow.Table = ( + self.feature_transformation.transform_arrow(pa_table) + ) + + # Work out whether the correct columns names are used. + rename_columns: Dict[str, str] = {} + for feature in self.features: + short_name = feature.name + long_name = self._get_projected_feature_name(feature.name) + if ( + short_name in df_with_transformed_features.column_names + and full_feature_names + ): + rename_columns[short_name] = long_name + elif not full_feature_names: + rename_columns[long_name] = short_name + + # Cleanup extra columns used for transformation + for col in columns_to_cleanup: + if col in df_with_transformed_features.column_names: + df_with_transformed_features = df_with_transformed_features.dtop(col) + return df_with_transformed_features.rename_columns( + [ + rename_columns.get(c, c) + for c in df_with_transformed_features.column_names + ] + ) + def get_transformed_features_df( self, df_with_features: pd.DataFrame, diff --git a/sdk/python/feast/transformation/pandas_transformation.py b/sdk/python/feast/transformation/pandas_transformation.py index d48055c694..28f3c22b9f 100644 --- a/sdk/python/feast/transformation/pandas_transformation.py +++ b/sdk/python/feast/transformation/pandas_transformation.py @@ -3,6 +3,7 @@ import dill import pandas as pd +import pyarrow from feast.field import Field, from_value_type from feast.protos.feast.core.Transformation_pb2 import ( @@ -26,6 +27,19 @@ def __init__(self, udf: FunctionType, udf_string: str = ""): self.udf = udf self.udf_string = udf_string + def transform_arrow(self, pa_table: pyarrow.Table) -> pyarrow.Table: + if not isinstance(pa_table, pyarrow.Table): + raise TypeError( + f"pa_table should be type pyarrow.Table but got {type(pa_table).__name__}" + ) + output_df = self.udf.__call__(pa_table.to_pandas()) + output_df = pyarrow.Table.from_pandas(output_df) + if not isinstance(output_df, pyarrow.Table): + raise TypeError( + f"output_df should be type pyarrow.Table but got {type(output_df).__name__}" + ) + return output_df + def transform(self, input_df: pd.DataFrame) -> pd.DataFrame: if not isinstance(input_df, pd.DataFrame): raise TypeError( diff --git a/sdk/python/feast/transformation/python_transformation.py b/sdk/python/feast/transformation/python_transformation.py index 9f5fac6675..1245fc52ed 100644 --- a/sdk/python/feast/transformation/python_transformation.py +++ b/sdk/python/feast/transformation/python_transformation.py @@ -2,6 +2,7 @@ from typing import Any, Dict, List import dill +import pyarrow from feast.field import Field, from_value_type from feast.protos.feast.core.Transformation_pb2 import ( @@ -24,6 +25,11 @@ def __init__(self, udf: FunctionType, udf_string: str = ""): self.udf = udf self.udf_string = udf_string + def transform_arrow(self, pa_table: pyarrow.Table) -> pyarrow.Table: + raise Exception( + 'OnDemandFeatureView mode "python" not supported for offline processing.' + ) + def transform(self, input_dict: Dict) -> Dict: if not isinstance(input_dict, Dict): raise TypeError( diff --git a/sdk/python/feast/transformation/substrait_transformation.py b/sdk/python/feast/transformation/substrait_transformation.py index 48b708ac70..a816f8118a 100644 --- a/sdk/python/feast/transformation/substrait_transformation.py +++ b/sdk/python/feast/transformation/substrait_transformation.py @@ -34,6 +34,15 @@ def table_provider(names, schema: pyarrow.Schema): ).read_all() return table.to_pandas() + def transform_arrow(self, pa_table: pyarrow.Table) -> pyarrow.Table: + def table_provider(names, schema: pyarrow.Schema): + return pa_table.select(schema.names) + + table: pyarrow.Table = pyarrow.substrait.run_query( + self.substrait_plan, table_provider=table_provider + ).read_all() + return table + def infer_features(self, random_input: Dict[str, List[Any]]) -> List[Field]: df = pd.DataFrame.from_dict(random_input) output_df: pd.DataFrame = self.transform(df) diff --git a/sdk/python/tests/unit/infra/offline_stores/test_offline_store.py b/sdk/python/tests/unit/infra/offline_stores/test_offline_store.py index 9d8c4a7ec1..0232a8d379 100644 --- a/sdk/python/tests/unit/infra/offline_stores/test_offline_store.py +++ b/sdk/python/tests/unit/infra/offline_stores/test_offline_store.py @@ -216,7 +216,7 @@ def test_to_sql(): @pytest.mark.parametrize("timeout", (None, 30)) def test_to_df_timeout(retrieval_job, timeout: Optional[int]): - with patch.object(retrieval_job, "_to_df_internal") as mock_to_df_internal: + with patch.object(retrieval_job, "_to_arrow_internal") as mock_to_df_internal: retrieval_job.to_df(timeout=timeout) mock_to_df_internal.assert_called_once_with(timeout=timeout) From 3afa78e454b5478b041f1182edcebace916ef67b Mon Sep 17 00:00:00 2001 From: locnt241 <73770977+ElliotNguyen68@users.noreply.github.com> Date: Mon, 15 Apr 2024 14:38:58 +0700 Subject: [PATCH 119/122] fix: Fix type mapping spark (#4071) --- sdk/python/feast/type_map.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/sdk/python/feast/type_map.py b/sdk/python/feast/type_map.py index ad3e273d37..e7fdf97120 100644 --- a/sdk/python/feast/type_map.py +++ b/sdk/python/feast/type_map.py @@ -752,7 +752,7 @@ def _non_empty_value(value: Any) -> bool: def spark_to_feast_value_type(spark_type_as_str: str) -> ValueType: # TODO not all spark types are convertible - # Current non-convertible types: interval, map, struct, structfield, decimal, binary + # Current non-convertible types: interval, map, struct, structfield, binary type_map: Dict[str, ValueType] = { "null": ValueType.UNKNOWN, "byte": ValueType.BYTES, @@ -762,6 +762,7 @@ def spark_to_feast_value_type(spark_type_as_str: str) -> ValueType: "bigint": ValueType.INT64, "long": ValueType.INT64, "double": ValueType.DOUBLE, + "decimal": ValueType.DOUBLE, "float": ValueType.FLOAT, "boolean": ValueType.BOOL, "timestamp": ValueType.UNIX_TIMESTAMP, @@ -770,10 +771,15 @@ def spark_to_feast_value_type(spark_type_as_str: str) -> ValueType: "array": ValueType.INT32_LIST, "array": ValueType.INT64_LIST, "array": ValueType.DOUBLE_LIST, + "array": ValueType.DOUBLE_LIST, "array": ValueType.FLOAT_LIST, "array": ValueType.BOOL_LIST, "array": ValueType.UNIX_TIMESTAMP_LIST, } + if spark_type_as_str.startswith("decimal"): + spark_type_as_str = "decimal" + if spark_type_as_str.startswith("array Date: Mon, 15 Apr 2024 03:43:40 -0400 Subject: [PATCH 120/122] fix: Making the query_timeout variable as optional int because upstream is considered to be optional (#4092) --- sdk/python/feast/infra/contrib/spark_kafka_processor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/python/feast/infra/contrib/spark_kafka_processor.py b/sdk/python/feast/infra/contrib/spark_kafka_processor.py index fc4a34f17b..e148000bc9 100644 --- a/sdk/python/feast/infra/contrib/spark_kafka_processor.py +++ b/sdk/python/feast/infra/contrib/spark_kafka_processor.py @@ -21,7 +21,7 @@ class SparkProcessorConfig(ProcessorConfig): spark_session: SparkSession processing_time: str - query_timeout: int + query_timeout: Optional[int] = None class SparkKafkaProcessor(StreamProcessor): From b5a701359543e9e0f4088db54beb939e57131faa Mon Sep 17 00:00:00 2001 From: Shuchu Han Date: Mon, 15 Apr 2024 03:44:16 -0400 Subject: [PATCH 121/122] fix: Disable the Feast Usage feature by default. (#4090) --- docs/reference/usage.md | 6 +++--- sdk/python/feast/constants.py | 6 +++--- sdk/python/feast/usage.py | 12 +++++++----- 3 files changed, 13 insertions(+), 11 deletions(-) diff --git a/docs/reference/usage.md b/docs/reference/usage.md index d571675d7e..8c9c904661 100644 --- a/docs/reference/usage.md +++ b/docs/reference/usage.md @@ -2,11 +2,11 @@ ## How Feast SDK usage is measured -The Feast project logs anonymous usage statistics and errors in order to inform our planning. Several client methods are tracked, beginning in Feast 0.9. Users are assigned a UUID which is sent along with the name of the method, the Feast version, the OS \(using `sys.platform`\), and the current time. +The Feast project has a feature to log usage statistics and errors. Several client methods are tracked, beginning in Feast 0.9. Users are assigned a UUID which is sent along with the name of the method, the Feast version, the OS \(using `sys.platform`\), and the current time. The [source code](https://github.com/feast-dev/feast/blob/master/sdk/python/feast/usage.py) is available here. -## How to disable usage logging +## How to enable the usage logging -Set the environment variable `FEAST_USAGE` to `False`. +Set the environment variable `FEAST_USAGE` to `True` (in String type) and config your endpoint by the variable `FEAST_USAGE_ENDPOINT`. diff --git a/sdk/python/feast/constants.py b/sdk/python/feast/constants.py index e47da0ad6b..f986577117 100644 --- a/sdk/python/feast/constants.py +++ b/sdk/python/feast/constants.py @@ -29,11 +29,11 @@ # Environment variable for registry REGISTRY_ENV_NAME: str = "REGISTRY_BASE64" -# Environment variable for toggling usage +# Environment variable for toggling the Usage feature FEAST_USAGE = "FEAST_USAGE" -# Default value for FEAST_USAGE when environment variable is not set -DEFAULT_FEAST_USAGE_VALUE = "True" +# Environment variable for FEAST_USAGE_ENDPOINT +FEAST_USAGE_ENDPOINT = "FEAST_USAGE_ENDPOINT" # Environment variable for the path for overwriting universal test configs FULL_REPO_CONFIGS_MODULE_ENV_NAME: str = "FULL_REPO_CONFIGS_MODULE" diff --git a/sdk/python/feast/usage.py b/sdk/python/feast/usage.py index c6919eed4c..faf734cf01 100644 --- a/sdk/python/feast/usage.py +++ b/sdk/python/feast/usage.py @@ -30,15 +30,17 @@ import requests from feast import flags_helper -from feast.constants import DEFAULT_FEAST_USAGE_VALUE, FEAST_USAGE +from feast.constants import FEAST_USAGE, FEAST_USAGE_ENDPOINT from feast.version import get_version -USAGE_ENDPOINT = "https://usage.feast.dev" - _logger = logging.getLogger(__name__) _executor = concurrent.futures.ThreadPoolExecutor(max_workers=3) -_is_enabled = os.getenv(FEAST_USAGE, default=DEFAULT_FEAST_USAGE_VALUE) == "True" +_is_enabled = os.getenv(FEAST_USAGE, default="False") == "True" + +# Default usage endpoint value. +# Will raise an exception if the configured value is not working. +_usage_endpoint = os.getenv(FEAST_USAGE_ENDPOINT, default="") _constant_attributes = { "project_id": "", @@ -177,7 +179,7 @@ def _set_installation_id(): def _export(event: typing.Dict[str, typing.Any]): - _executor.submit(requests.post, USAGE_ENDPOINT, json=event, timeout=2) + _executor.submit(requests.post, _usage_endpoint, json=event, timeout=2) def _produce_event(ctx: UsageContext): From 474eb62e52a7a2389d4b76b052b4c59e82daf4ae Mon Sep 17 00:00:00 2001 From: feast-ci-bot Date: Tue, 16 Apr 2024 03:33:16 +0000 Subject: [PATCH 122/122] chore(release): release 0.36.0 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit # [0.36.0](https://github.com/feast-dev/feast/compare/v0.35.0...v0.36.0) (2024-04-16) ### Bug Fixes * Add __eq__, __hash__ to SparkSource for correct comparison ([#4028](https://github.com/feast-dev/feast/issues/4028)) ([e703b40](https://github.com/feast-dev/feast/commit/e703b40582e676d4ec92551e79a444a9c0949f66)) * Add conn.commit() to Postgresonline_write_batch.online_write_batch ([#3904](https://github.com/feast-dev/feast/issues/3904)) ([7d75fc5](https://github.com/feast-dev/feast/commit/7d75fc525a7f2f46811d168ce71f91b5736ad788)) * Add missing __init__.py to embedded_go ([#4051](https://github.com/feast-dev/feast/issues/4051)) ([6bb4c73](https://github.com/feast-dev/feast/commit/6bb4c73b49934706002f9346c2260ab4261e4638)) * Add missing init files in infra utils ([#4067](https://github.com/feast-dev/feast/issues/4067)) ([54910a1](https://github.com/feast-dev/feast/commit/54910a16253c3f901d3bd5399bc2ba9703a7254d)) * Added registryPath parameter documentation in WebUI reference ([#3983](https://github.com/feast-dev/feast/issues/3983)) ([5e0af8f](https://github.com/feast-dev/feast/commit/5e0af8f52832daec34edd19cbad5e20ac3fd74d0)), closes [#3974](https://github.com/feast-dev/feast/issues/3974) [#3974](https://github.com/feast-dev/feast/issues/3974) * Adding missing init files in materialization modules ([#4052](https://github.com/feast-dev/feast/issues/4052)) ([df05253](https://github.com/feast-dev/feast/commit/df0525355c32bbc40f890213edfa36512dd5bf55)) * Allow trancated timestamps when converting ([#3861](https://github.com/feast-dev/feast/issues/3861)) ([bdd7dfb](https://github.com/feast-dev/feast/commit/bdd7dfb6128dfc1f314a61a266da91c611ce7892)) * Azure blob storage support in Java feature server ([#2319](https://github.com/feast-dev/feast/issues/2319)) ([#4014](https://github.com/feast-dev/feast/issues/4014)) ([b9aabbd](https://github.com/feast-dev/feast/commit/b9aabbd35e27b26fb3af414da604062d6c8d17d0)) * Bugfix for grabbing historical data from Snowflake with array type features. ([#3964](https://github.com/feast-dev/feast/issues/3964)) ([1cc94f2](https://github.com/feast-dev/feast/commit/1cc94f2d23f88e0d9412b2fab8761abc81f5d35c)) * Bytewax materialization engine fails when loading feature_store.yaml ([#3912](https://github.com/feast-dev/feast/issues/3912)) ([987f0fd](https://github.com/feast-dev/feast/commit/987f0fdc99df1ef4507baff75e3df0e02bf42034)) * CI unittest warnings ([#4006](https://github.com/feast-dev/feast/issues/4006)) ([0441b8b](https://github.com/feast-dev/feast/commit/0441b8b9a7eae2eb478d12a8de911c1bd39ced37)) * Correct the returning class proto type of StreamFeatureView to StreamFeatureViewProto instead of FeatureViewProto. ([#3843](https://github.com/feast-dev/feast/issues/3843)) ([86d6221](https://github.com/feast-dev/feast/commit/86d62215f2338ea9d48c6e723e907c82cbe5500b)) * Create index only if not exists during MySQL online store update ([#3905](https://github.com/feast-dev/feast/issues/3905)) ([2f99a61](https://github.com/feast-dev/feast/commit/2f99a617b6a5d8eae1e27c780bbfa94594f54441)) * Disable minio tests in workflows on master and nightly ([#4072](https://github.com/feast-dev/feast/issues/4072)) ([c06dda8](https://github.com/feast-dev/feast/commit/c06dda84a26c5df3e761a18adaa81f87b1bcc0de)) * Disable the Feast Usage feature by default. ([#4090](https://github.com/feast-dev/feast/issues/4090)) ([b5a7013](https://github.com/feast-dev/feast/commit/b5a701359543e9e0f4088db54beb939e57131faa)) * Dump repo_config by alias ([#4063](https://github.com/feast-dev/feast/issues/4063)) ([e4bef67](https://github.com/feast-dev/feast/commit/e4bef6769265a9b5d87486e34ac00f022ca9ce28)) * Extend SQL registry config with a sqlalchemy_config_kwargs key ([#3997](https://github.com/feast-dev/feast/issues/3997)) ([21931d5](https://github.com/feast-dev/feast/commit/21931d59f8a2f8b69383de0dd371a780149ccda8)) * Feature Server image startup in OpenShift clusters ([#4096](https://github.com/feast-dev/feast/issues/4096)) ([9efb243](https://github.com/feast-dev/feast/commit/9efb243c548b075ca8288e04b09b84a9fa49dc7c)) * Fix copy method for StreamFeatureView ([#3951](https://github.com/feast-dev/feast/issues/3951)) ([cf06704](https://github.com/feast-dev/feast/commit/cf06704bd58c77931679f1c0c7e44de7042f931f)) * Fix for materializing entityless feature views in Snowflake ([#3961](https://github.com/feast-dev/feast/issues/3961)) ([1e64c77](https://github.com/feast-dev/feast/commit/1e64c77e1e146f952f450db9370e2da5c85a8500)) * Fix type mapping spark ([#4071](https://github.com/feast-dev/feast/issues/4071)) ([3afa78e](https://github.com/feast-dev/feast/commit/3afa78e454b5478b041f1182edcebace916ef67b)) * Fix typo as the cli does not support shortcut-f option. ([#3954](https://github.com/feast-dev/feast/issues/3954)) ([dd79dbb](https://github.com/feast-dev/feast/commit/dd79dbbac90caaf0617a5046c84a2618e532980b)) * Get container host addresses from testcontainers ([#3946](https://github.com/feast-dev/feast/issues/3946)) ([2cf1a0f](https://github.com/feast-dev/feast/commit/2cf1a0fa9efbceca2e79c5e375796696e248e3d9)) * Handle ComplexFeastType to None comparison ([#3876](https://github.com/feast-dev/feast/issues/3876)) ([fa8492d](https://github.com/feast-dev/feast/commit/fa8492dfe7f38ab493a8d35a412ec9334a0ff6b9)) * Hashlib md5 errors in FIPS for python 3.9+ ([#4019](https://github.com/feast-dev/feast/issues/4019)) ([6d9156b](https://github.com/feast-dev/feast/commit/6d9156b3d6372d654048ea2bfb7eec3f3908d038)) * Making the query_timeout variable as optional int because upstream is considered to be optional ([#4092](https://github.com/feast-dev/feast/issues/4092)) ([fd5b620](https://github.com/feast-dev/feast/commit/fd5b620b2c56c56286a5899b271da426c1a4ef67)) * Move gRPC dependencies to an extra ([#3900](https://github.com/feast-dev/feast/issues/3900)) ([f93c5fd](https://github.com/feast-dev/feast/commit/f93c5fd4b8bd0031942c4f6ba4e84ebc54be8522)) * Prevent spamming pull busybox from dockerhub ([#3923](https://github.com/feast-dev/feast/issues/3923)) ([7153cad](https://github.com/feast-dev/feast/commit/7153cad6082edfded96999c49ee1bdc9329e11c3)) * Quickstart notebook example ([#3976](https://github.com/feast-dev/feast/issues/3976)) ([b023aa5](https://github.com/feast-dev/feast/commit/b023aa5817bffe235f460c5df879141bb5945edb)) * Raise error when not able read of file source spark source ([#4005](https://github.com/feast-dev/feast/issues/4005)) ([34cabfb](https://github.com/feast-dev/feast/commit/34cabfb29a2692180dc6b6dda8bba9062beca4d2)) * remove not use input parameter in spark source ([#3980](https://github.com/feast-dev/feast/issues/3980)) ([7c90882](https://github.com/feast-dev/feast/commit/7c908822f8d9f5e32ab17d96e6b5dd79e5b59b3e)) * Remove parentheses in pull_latest_from_table_or_query ([#4026](https://github.com/feast-dev/feast/issues/4026)) ([dc4671e](https://github.com/feast-dev/feast/commit/dc4671ed7e28b4157112a81ee0a70925d02db8e8)) * Remove proto-plus imports ([#4044](https://github.com/feast-dev/feast/issues/4044)) ([ad8f572](https://github.com/feast-dev/feast/commit/ad8f5721af6d8ad8b7539b91e0616ebf6e47f47b)) * Remove unnecessary dependency on mysqlclient ([#3925](https://github.com/feast-dev/feast/issues/3925)) ([f494f02](https://github.com/feast-dev/feast/commit/f494f02e1254b91b56b0b69f4a15edafe8d7291a)) * Restore label check for all actions using pull_request_target ([#3978](https://github.com/feast-dev/feast/issues/3978)) ([591ba4e](https://github.com/feast-dev/feast/commit/591ba4e39842b5fbb49db32be4fce28e6d520d93)) * Revert mypy config ([#3952](https://github.com/feast-dev/feast/issues/3952)) ([6b8e96c](https://github.com/feast-dev/feast/commit/6b8e96c982a50587a13216666085fc61494cdfc9)) * Rewrite Spark materialization engine to use mapInPandas ([#3936](https://github.com/feast-dev/feast/issues/3936)) ([dbb59ba](https://github.com/feast-dev/feast/commit/dbb59ba0932e5962b34b14e7218a1ddae86a9686)) * Run feature server w/o gunicorn on windows ([#4024](https://github.com/feast-dev/feast/issues/4024)) ([584e9b1](https://github.com/feast-dev/feast/commit/584e9b1be9452158d9104133a24ff29d3976f9ed)) * SqlRegistry _apply_object update statement ([#4042](https://github.com/feast-dev/feast/issues/4042)) ([ef62def](https://github.com/feast-dev/feast/commit/ef62defbd80172ba3c536c413388234707278be1)) * Substrait ODFVs for online ([#4064](https://github.com/feast-dev/feast/issues/4064)) ([26391b0](https://github.com/feast-dev/feast/commit/26391b07605794bcb0eb6cdec6d59bd94720bba6)) * Swap security label check on the PR title validation job to explicit permissions instead ([#3987](https://github.com/feast-dev/feast/issues/3987)) ([f604af9](https://github.com/feast-dev/feast/commit/f604af9ebf56ebd88b4e6ef541fdc20de2cc5b8c)) * Transformation server doesn't generate files from proto ([#3902](https://github.com/feast-dev/feast/issues/3902)) ([d3a2a45](https://github.com/feast-dev/feast/commit/d3a2a45d9bc2b690a7aa784ec7b0411e91244dab)) * Trino as an OfflineStore Access Denied when BasicAuthenticaion ([#3898](https://github.com/feast-dev/feast/issues/3898)) ([49d2988](https://github.com/feast-dev/feast/commit/49d2988a562c66b3949cf2368fe44ed41e767eab)) * Trying to import pyspark lazily to avoid the dependency on the library ([#4091](https://github.com/feast-dev/feast/issues/4091)) ([a05cdbc](https://github.com/feast-dev/feast/commit/a05cdbcd38d80ce1abfff7d93bef9df589dbd61c)) * Typo Correction in Feast UI Readme ([#3939](https://github.com/feast-dev/feast/issues/3939)) ([c16e5af](https://github.com/feast-dev/feast/commit/c16e5afcc5273b0c26b79dd4e233a28618ac490a)) * Update actions/setup-python from v3 to v4 ([#4003](https://github.com/feast-dev/feast/issues/4003)) ([ee4c4f1](https://github.com/feast-dev/feast/commit/ee4c4f1ca486facc14e13ad0dbe7c9cc7c82d832)) * Update typeguard version to >=4.0.0 ([#3837](https://github.com/feast-dev/feast/issues/3837)) ([dd96150](https://github.com/feast-dev/feast/commit/dd96150e2a5829401f793a51da4b3594677e570d)) * Upgrade sqlalchemy from 1.x to 2.x regarding PVE-2022-51668. ([#4065](https://github.com/feast-dev/feast/issues/4065)) ([ec4c15c](https://github.com/feast-dev/feast/commit/ec4c15c0104fa8f4cebdbf29f9e067baab07b09b)) * Use CopyFrom() instead of __deepycopy__() for creating a copy of protobuf object. ([#3999](https://github.com/feast-dev/feast/issues/3999)) ([5561b30](https://github.com/feast-dev/feast/commit/5561b306d8c7b43851f5f411e1c4f4f34d99933f)) * Using version args to install the correct feast version ([#3953](https://github.com/feast-dev/feast/issues/3953)) ([b83a702](https://github.com/feast-dev/feast/commit/b83a70227c6afe7258328ff5847a26b526d0b5df)) * Verify the existence of Registry tables in snowflake before calling CREATE sql command. Allow read-only user to call feast apply. ([#3851](https://github.com/feast-dev/feast/issues/3851)) ([9a3590e](https://github.com/feast-dev/feast/commit/9a3590ea771ca3c3224f5e1a833453144e54284e)) ### Features * Add duckdb offline store ([#3981](https://github.com/feast-dev/feast/issues/3981)) ([161547b](https://github.com/feast-dev/feast/commit/161547b167c7a9b2d53517d498acbe50d9298a40)) * Add Entity df in format of a Spark Dataframe instead of just pd.DataFrame or string for SparkOfflineStore ([#3988](https://github.com/feast-dev/feast/issues/3988)) ([43b2c28](https://github.com/feast-dev/feast/commit/43b2c287705c2a3e882517524229f155c9ce0a01)) * Add gRPC Registry Server ([#3924](https://github.com/feast-dev/feast/issues/3924)) ([373e624](https://github.com/feast-dev/feast/commit/373e624abb8779b8a60d30aa08d25414d987bb1b)) * Add local tests for s3 registry using minio ([#4029](https://github.com/feast-dev/feast/issues/4029)) ([d82d1ec](https://github.com/feast-dev/feast/commit/d82d1ecb534ab35b901c36e920666196eae0ac79)) * Add python bytes to array type conversion support proto ([#3874](https://github.com/feast-dev/feast/issues/3874)) ([8688acd](https://github.com/feast-dev/feast/commit/8688acd1731aa04b041090c7b1c049bfba1717ed)) * Add python client for remote registry server ([#3941](https://github.com/feast-dev/feast/issues/3941)) ([42a7b81](https://github.com/feast-dev/feast/commit/42a7b8170d6dc994055c67989046d11c238af40f)) * Add Substrait-based ODFV transformation ([#3969](https://github.com/feast-dev/feast/issues/3969)) ([9e58bd4](https://github.com/feast-dev/feast/commit/9e58bd463f7ca2b4982708cb1e1250f587ecfb68)) * Add support for arrays in snowflake ([#3769](https://github.com/feast-dev/feast/issues/3769)) ([8d6bec8](https://github.com/feast-dev/feast/commit/8d6bec8fc47986c84f366ce3edfe7d03fa6b2e9f)) * Added delete_table to redis online store ([#3857](https://github.com/feast-dev/feast/issues/3857)) ([03dae13](https://github.com/feast-dev/feast/commit/03dae13aa60c072b171c7f21d4e795eaaad18e55)) * Adding support for Native Python feature transformations for ODFVs ([#4045](https://github.com/feast-dev/feast/issues/4045)) ([73bc853](https://github.com/feast-dev/feast/commit/73bc85351a9202d3db93907e8206d68123ee5baa)) * Bumping requirements ([#4079](https://github.com/feast-dev/feast/issues/4079)) ([1943056](https://github.com/feast-dev/feast/commit/194305631bbb6cca251dbb46df5b5575ffb2391b)) * Decouple transformation types from ODFVs ([#3949](https://github.com/feast-dev/feast/issues/3949)) ([0a9fae8](https://github.com/feast-dev/feast/commit/0a9fae8fd42e7348365ef902038f3f71f977ef3e)) * Dropping Python 3.8 from local integration tests and integration tests ([#3994](https://github.com/feast-dev/feast/issues/3994)) ([817995c](https://github.com/feast-dev/feast/commit/817995c12588cc35c53d1ad487efaaf53da287be)) * Dropping python 3.8 requirements files from the project. ([#4021](https://github.com/feast-dev/feast/issues/4021)) ([f09c612](https://github.com/feast-dev/feast/commit/f09c612d046dfa56e9c616ff68c05823ce0f3bb6)) * Dropping the support for python 3.8 version from feast ([#4010](https://github.com/feast-dev/feast/issues/4010)) ([a0f7472](https://github.com/feast-dev/feast/commit/a0f7472f200300f3a45aa404922dd67bb4ad237f)) * Dropping unit tests for Python 3.8 ([#3989](https://github.com/feast-dev/feast/issues/3989)) ([60f24f9](https://github.com/feast-dev/feast/commit/60f24f9ed16a216acb0f3642892dea73690ca29f)) * Enable Arrow-based columnar data transfers ([#3996](https://github.com/feast-dev/feast/issues/3996)) ([d8d7567](https://github.com/feast-dev/feast/commit/d8d75676cbaf565b6a6a097f33c49f56b852dcd7)) * Enable Vector database and retrieve_online_documents API ([#4061](https://github.com/feast-dev/feast/issues/4061)) ([ec19036](https://github.com/feast-dev/feast/commit/ec19036fcc4c77084a2dd5aae5576f8f43393eba)) * Kubernetes materialization engine written based on bytewax ([#4087](https://github.com/feast-dev/feast/issues/4087)) ([7617bdb](https://github.com/feast-dev/feast/commit/7617bdb7f4222edb69893c37621bd87b940b3227)) * Lint with ruff ([#4043](https://github.com/feast-dev/feast/issues/4043)) ([7f1557b](https://github.com/feast-dev/feast/commit/7f1557b348b7935e3586c90c8dec15fdf6cd8665)) * Make arrow primary interchange for offline ODFV execution ([#4083](https://github.com/feast-dev/feast/issues/4083)) ([9ed0a09](https://github.com/feast-dev/feast/commit/9ed0a09746aca0eb73c6e214f082e0e3887ff836)) * Pandas v2 compatibility ([#3957](https://github.com/feast-dev/feast/issues/3957)) ([64459ad](https://github.com/feast-dev/feast/commit/64459ad1b5ed4a782b7ce87fcec3012e00408c74)) * Pull duckdb from contribs, add to CI ([#4059](https://github.com/feast-dev/feast/issues/4059)) ([318a2b8](https://github.com/feast-dev/feast/commit/318a2b8bfc94f10c81206071fcb1d41f19683288)) * Refactor ODFV schema inference ([#4076](https://github.com/feast-dev/feast/issues/4076)) ([c50a9ff](https://github.com/feast-dev/feast/commit/c50a9ff783fa400542422990ff835da930bcb6bf)) * Refactor registry caching logic into a separate class ([#3943](https://github.com/feast-dev/feast/issues/3943)) ([924f944](https://github.com/feast-dev/feast/commit/924f9441107b8e36a3d5c6f8b16ed24f9a03b867)) * Rename OnDemandTransformations to Transformations ([#4038](https://github.com/feast-dev/feast/issues/4038)) ([9b98eaf](https://github.com/feast-dev/feast/commit/9b98eafccbf39b41186bfb3ebd36af20d57bd509)) * Revert updating dependencies so that feast can be run on 3.11. ([#3968](https://github.com/feast-dev/feast/issues/3968)) ([d3c68fb](https://github.com/feast-dev/feast/commit/d3c68fb8646b29032cb67b8c8e6a8c0aa7a821c7)), closes [#3958](https://github.com/feast-dev/feast/issues/3958) * Rewrite ibis point-in-time-join w/o feast abstractions ([#4023](https://github.com/feast-dev/feast/issues/4023)) ([3980e0c](https://github.com/feast-dev/feast/commit/3980e0c9a762a6ec3bcee5a0e9cdf532994bb1c9)) * Support s3gov schema by snowflake offline store during materialization ([#3891](https://github.com/feast-dev/feast/issues/3891)) ([ea8ad17](https://github.com/feast-dev/feast/commit/ea8ad1731a5ebe798b11181fc0027f7cac0e1526)) * Update odfv test ([#4054](https://github.com/feast-dev/feast/issues/4054)) ([afd52b8](https://github.com/feast-dev/feast/commit/afd52b8803d7660a90f382d2c1ad7705608c861b)) * Update pyproject.toml to use Python 3.9 as default ([#4011](https://github.com/feast-dev/feast/issues/4011)) ([277b891](https://github.com/feast-dev/feast/commit/277b891ffa1193914b123672010e588573dcaa98)) * Update the Pydantic from v1 to v2 ([#3948](https://github.com/feast-dev/feast/issues/3948)) ([ec11a7c](https://github.com/feast-dev/feast/commit/ec11a7cb8d56d8e2e5cda07e06b4c98dcc9d2ba3)) * Updating dependencies so that feast can be run on 3.11. ([#3958](https://github.com/feast-dev/feast/issues/3958)) ([59639db](https://github.com/feast-dev/feast/commit/59639dbb0272aacd2201cb5f65b01445013db6e6)) * Updating protos to separate transformation ([#4018](https://github.com/feast-dev/feast/issues/4018)) ([c58ef74](https://github.com/feast-dev/feast/commit/c58ef74c18554d823f7957bf602184c744bb7ed7)) ### Reverts * Reverting bumping requirements ([#4081](https://github.com/feast-dev/feast/issues/4081)) ([1ba65b4](https://github.com/feast-dev/feast/commit/1ba65b4e13a2af3e9cea879d1c1e48891a0f0610)), closes [#4079](https://github.com/feast-dev/feast/issues/4079) * Verify the existence of Registry tables in snowflake… ([#3907](https://github.com/feast-dev/feast/issues/3907)) ([c0d358a](https://github.com/feast-dev/feast/commit/c0d358a49d5f576bb9f1017d1ee0db2d6cd5f1a5)), closes [#3851](https://github.com/feast-dev/feast/issues/3851) --- CHANGELOG.md | 101 ++++++++++++++++++ infra/charts/feast-feature-server/Chart.yaml | 2 +- infra/charts/feast-feature-server/README.md | 4 +- infra/charts/feast-feature-server/values.yaml | 2 +- infra/charts/feast/Chart.yaml | 2 +- infra/charts/feast/README.md | 6 +- .../feast/charts/feature-server/Chart.yaml | 4 +- .../feast/charts/feature-server/README.md | 6 +- .../feast/charts/feature-server/values.yaml | 2 +- .../charts/transformation-service/Chart.yaml | 4 +- .../charts/transformation-service/README.md | 6 +- .../charts/transformation-service/values.yaml | 2 +- infra/charts/feast/requirements.yaml | 4 +- java/pom.xml | 2 +- sdk/python/feast/ui/package.json | 2 +- sdk/python/feast/ui/yarn.lock | 8 +- ui/package.json | 2 +- 17 files changed, 130 insertions(+), 29 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 26b8baa963..f09ec9e2b7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,106 @@ # Changelog +# [0.36.0](https://github.com/feast-dev/feast/compare/v0.35.0...v0.36.0) (2024-04-16) + + +### Bug Fixes + +* Add __eq__, __hash__ to SparkSource for correct comparison ([#4028](https://github.com/feast-dev/feast/issues/4028)) ([e703b40](https://github.com/feast-dev/feast/commit/e703b40582e676d4ec92551e79a444a9c0949f66)) +* Add conn.commit() to Postgresonline_write_batch.online_write_batch ([#3904](https://github.com/feast-dev/feast/issues/3904)) ([7d75fc5](https://github.com/feast-dev/feast/commit/7d75fc525a7f2f46811d168ce71f91b5736ad788)) +* Add missing __init__.py to embedded_go ([#4051](https://github.com/feast-dev/feast/issues/4051)) ([6bb4c73](https://github.com/feast-dev/feast/commit/6bb4c73b49934706002f9346c2260ab4261e4638)) +* Add missing init files in infra utils ([#4067](https://github.com/feast-dev/feast/issues/4067)) ([54910a1](https://github.com/feast-dev/feast/commit/54910a16253c3f901d3bd5399bc2ba9703a7254d)) +* Added registryPath parameter documentation in WebUI reference ([#3983](https://github.com/feast-dev/feast/issues/3983)) ([5e0af8f](https://github.com/feast-dev/feast/commit/5e0af8f52832daec34edd19cbad5e20ac3fd74d0)), closes [#3974](https://github.com/feast-dev/feast/issues/3974) [#3974](https://github.com/feast-dev/feast/issues/3974) +* Adding missing init files in materialization modules ([#4052](https://github.com/feast-dev/feast/issues/4052)) ([df05253](https://github.com/feast-dev/feast/commit/df0525355c32bbc40f890213edfa36512dd5bf55)) +* Allow trancated timestamps when converting ([#3861](https://github.com/feast-dev/feast/issues/3861)) ([bdd7dfb](https://github.com/feast-dev/feast/commit/bdd7dfb6128dfc1f314a61a266da91c611ce7892)) +* Azure blob storage support in Java feature server ([#2319](https://github.com/feast-dev/feast/issues/2319)) ([#4014](https://github.com/feast-dev/feast/issues/4014)) ([b9aabbd](https://github.com/feast-dev/feast/commit/b9aabbd35e27b26fb3af414da604062d6c8d17d0)) +* Bugfix for grabbing historical data from Snowflake with array type features. ([#3964](https://github.com/feast-dev/feast/issues/3964)) ([1cc94f2](https://github.com/feast-dev/feast/commit/1cc94f2d23f88e0d9412b2fab8761abc81f5d35c)) +* Bytewax materialization engine fails when loading feature_store.yaml ([#3912](https://github.com/feast-dev/feast/issues/3912)) ([987f0fd](https://github.com/feast-dev/feast/commit/987f0fdc99df1ef4507baff75e3df0e02bf42034)) +* CI unittest warnings ([#4006](https://github.com/feast-dev/feast/issues/4006)) ([0441b8b](https://github.com/feast-dev/feast/commit/0441b8b9a7eae2eb478d12a8de911c1bd39ced37)) +* Correct the returning class proto type of StreamFeatureView to StreamFeatureViewProto instead of FeatureViewProto. ([#3843](https://github.com/feast-dev/feast/issues/3843)) ([86d6221](https://github.com/feast-dev/feast/commit/86d62215f2338ea9d48c6e723e907c82cbe5500b)) +* Create index only if not exists during MySQL online store update ([#3905](https://github.com/feast-dev/feast/issues/3905)) ([2f99a61](https://github.com/feast-dev/feast/commit/2f99a617b6a5d8eae1e27c780bbfa94594f54441)) +* Disable minio tests in workflows on master and nightly ([#4072](https://github.com/feast-dev/feast/issues/4072)) ([c06dda8](https://github.com/feast-dev/feast/commit/c06dda84a26c5df3e761a18adaa81f87b1bcc0de)) +* Disable the Feast Usage feature by default. ([#4090](https://github.com/feast-dev/feast/issues/4090)) ([b5a7013](https://github.com/feast-dev/feast/commit/b5a701359543e9e0f4088db54beb939e57131faa)) +* Dump repo_config by alias ([#4063](https://github.com/feast-dev/feast/issues/4063)) ([e4bef67](https://github.com/feast-dev/feast/commit/e4bef6769265a9b5d87486e34ac00f022ca9ce28)) +* Extend SQL registry config with a sqlalchemy_config_kwargs key ([#3997](https://github.com/feast-dev/feast/issues/3997)) ([21931d5](https://github.com/feast-dev/feast/commit/21931d59f8a2f8b69383de0dd371a780149ccda8)) +* Feature Server image startup in OpenShift clusters ([#4096](https://github.com/feast-dev/feast/issues/4096)) ([9efb243](https://github.com/feast-dev/feast/commit/9efb243c548b075ca8288e04b09b84a9fa49dc7c)) +* Fix copy method for StreamFeatureView ([#3951](https://github.com/feast-dev/feast/issues/3951)) ([cf06704](https://github.com/feast-dev/feast/commit/cf06704bd58c77931679f1c0c7e44de7042f931f)) +* Fix for materializing entityless feature views in Snowflake ([#3961](https://github.com/feast-dev/feast/issues/3961)) ([1e64c77](https://github.com/feast-dev/feast/commit/1e64c77e1e146f952f450db9370e2da5c85a8500)) +* Fix type mapping spark ([#4071](https://github.com/feast-dev/feast/issues/4071)) ([3afa78e](https://github.com/feast-dev/feast/commit/3afa78e454b5478b041f1182edcebace916ef67b)) +* Fix typo as the cli does not support shortcut-f option. ([#3954](https://github.com/feast-dev/feast/issues/3954)) ([dd79dbb](https://github.com/feast-dev/feast/commit/dd79dbbac90caaf0617a5046c84a2618e532980b)) +* Get container host addresses from testcontainers ([#3946](https://github.com/feast-dev/feast/issues/3946)) ([2cf1a0f](https://github.com/feast-dev/feast/commit/2cf1a0fa9efbceca2e79c5e375796696e248e3d9)) +* Handle ComplexFeastType to None comparison ([#3876](https://github.com/feast-dev/feast/issues/3876)) ([fa8492d](https://github.com/feast-dev/feast/commit/fa8492dfe7f38ab493a8d35a412ec9334a0ff6b9)) +* Hashlib md5 errors in FIPS for python 3.9+ ([#4019](https://github.com/feast-dev/feast/issues/4019)) ([6d9156b](https://github.com/feast-dev/feast/commit/6d9156b3d6372d654048ea2bfb7eec3f3908d038)) +* Making the query_timeout variable as optional int because upstream is considered to be optional ([#4092](https://github.com/feast-dev/feast/issues/4092)) ([fd5b620](https://github.com/feast-dev/feast/commit/fd5b620b2c56c56286a5899b271da426c1a4ef67)) +* Move gRPC dependencies to an extra ([#3900](https://github.com/feast-dev/feast/issues/3900)) ([f93c5fd](https://github.com/feast-dev/feast/commit/f93c5fd4b8bd0031942c4f6ba4e84ebc54be8522)) +* Prevent spamming pull busybox from dockerhub ([#3923](https://github.com/feast-dev/feast/issues/3923)) ([7153cad](https://github.com/feast-dev/feast/commit/7153cad6082edfded96999c49ee1bdc9329e11c3)) +* Quickstart notebook example ([#3976](https://github.com/feast-dev/feast/issues/3976)) ([b023aa5](https://github.com/feast-dev/feast/commit/b023aa5817bffe235f460c5df879141bb5945edb)) +* Raise error when not able read of file source spark source ([#4005](https://github.com/feast-dev/feast/issues/4005)) ([34cabfb](https://github.com/feast-dev/feast/commit/34cabfb29a2692180dc6b6dda8bba9062beca4d2)) +* remove not use input parameter in spark source ([#3980](https://github.com/feast-dev/feast/issues/3980)) ([7c90882](https://github.com/feast-dev/feast/commit/7c908822f8d9f5e32ab17d96e6b5dd79e5b59b3e)) +* Remove parentheses in pull_latest_from_table_or_query ([#4026](https://github.com/feast-dev/feast/issues/4026)) ([dc4671e](https://github.com/feast-dev/feast/commit/dc4671ed7e28b4157112a81ee0a70925d02db8e8)) +* Remove proto-plus imports ([#4044](https://github.com/feast-dev/feast/issues/4044)) ([ad8f572](https://github.com/feast-dev/feast/commit/ad8f5721af6d8ad8b7539b91e0616ebf6e47f47b)) +* Remove unnecessary dependency on mysqlclient ([#3925](https://github.com/feast-dev/feast/issues/3925)) ([f494f02](https://github.com/feast-dev/feast/commit/f494f02e1254b91b56b0b69f4a15edafe8d7291a)) +* Restore label check for all actions using pull_request_target ([#3978](https://github.com/feast-dev/feast/issues/3978)) ([591ba4e](https://github.com/feast-dev/feast/commit/591ba4e39842b5fbb49db32be4fce28e6d520d93)) +* Revert mypy config ([#3952](https://github.com/feast-dev/feast/issues/3952)) ([6b8e96c](https://github.com/feast-dev/feast/commit/6b8e96c982a50587a13216666085fc61494cdfc9)) +* Rewrite Spark materialization engine to use mapInPandas ([#3936](https://github.com/feast-dev/feast/issues/3936)) ([dbb59ba](https://github.com/feast-dev/feast/commit/dbb59ba0932e5962b34b14e7218a1ddae86a9686)) +* Run feature server w/o gunicorn on windows ([#4024](https://github.com/feast-dev/feast/issues/4024)) ([584e9b1](https://github.com/feast-dev/feast/commit/584e9b1be9452158d9104133a24ff29d3976f9ed)) +* SqlRegistry _apply_object update statement ([#4042](https://github.com/feast-dev/feast/issues/4042)) ([ef62def](https://github.com/feast-dev/feast/commit/ef62defbd80172ba3c536c413388234707278be1)) +* Substrait ODFVs for online ([#4064](https://github.com/feast-dev/feast/issues/4064)) ([26391b0](https://github.com/feast-dev/feast/commit/26391b07605794bcb0eb6cdec6d59bd94720bba6)) +* Swap security label check on the PR title validation job to explicit permissions instead ([#3987](https://github.com/feast-dev/feast/issues/3987)) ([f604af9](https://github.com/feast-dev/feast/commit/f604af9ebf56ebd88b4e6ef541fdc20de2cc5b8c)) +* Transformation server doesn't generate files from proto ([#3902](https://github.com/feast-dev/feast/issues/3902)) ([d3a2a45](https://github.com/feast-dev/feast/commit/d3a2a45d9bc2b690a7aa784ec7b0411e91244dab)) +* Trino as an OfflineStore Access Denied when BasicAuthenticaion ([#3898](https://github.com/feast-dev/feast/issues/3898)) ([49d2988](https://github.com/feast-dev/feast/commit/49d2988a562c66b3949cf2368fe44ed41e767eab)) +* Trying to import pyspark lazily to avoid the dependency on the library ([#4091](https://github.com/feast-dev/feast/issues/4091)) ([a05cdbc](https://github.com/feast-dev/feast/commit/a05cdbcd38d80ce1abfff7d93bef9df589dbd61c)) +* Typo Correction in Feast UI Readme ([#3939](https://github.com/feast-dev/feast/issues/3939)) ([c16e5af](https://github.com/feast-dev/feast/commit/c16e5afcc5273b0c26b79dd4e233a28618ac490a)) +* Update actions/setup-python from v3 to v4 ([#4003](https://github.com/feast-dev/feast/issues/4003)) ([ee4c4f1](https://github.com/feast-dev/feast/commit/ee4c4f1ca486facc14e13ad0dbe7c9cc7c82d832)) +* Update typeguard version to >=4.0.0 ([#3837](https://github.com/feast-dev/feast/issues/3837)) ([dd96150](https://github.com/feast-dev/feast/commit/dd96150e2a5829401f793a51da4b3594677e570d)) +* Upgrade sqlalchemy from 1.x to 2.x regarding PVE-2022-51668. ([#4065](https://github.com/feast-dev/feast/issues/4065)) ([ec4c15c](https://github.com/feast-dev/feast/commit/ec4c15c0104fa8f4cebdbf29f9e067baab07b09b)) +* Use CopyFrom() instead of __deepycopy__() for creating a copy of protobuf object. ([#3999](https://github.com/feast-dev/feast/issues/3999)) ([5561b30](https://github.com/feast-dev/feast/commit/5561b306d8c7b43851f5f411e1c4f4f34d99933f)) +* Using version args to install the correct feast version ([#3953](https://github.com/feast-dev/feast/issues/3953)) ([b83a702](https://github.com/feast-dev/feast/commit/b83a70227c6afe7258328ff5847a26b526d0b5df)) +* Verify the existence of Registry tables in snowflake before calling CREATE sql command. Allow read-only user to call feast apply. ([#3851](https://github.com/feast-dev/feast/issues/3851)) ([9a3590e](https://github.com/feast-dev/feast/commit/9a3590ea771ca3c3224f5e1a833453144e54284e)) + + +### Features + +* Add duckdb offline store ([#3981](https://github.com/feast-dev/feast/issues/3981)) ([161547b](https://github.com/feast-dev/feast/commit/161547b167c7a9b2d53517d498acbe50d9298a40)) +* Add Entity df in format of a Spark Dataframe instead of just pd.DataFrame or string for SparkOfflineStore ([#3988](https://github.com/feast-dev/feast/issues/3988)) ([43b2c28](https://github.com/feast-dev/feast/commit/43b2c287705c2a3e882517524229f155c9ce0a01)) +* Add gRPC Registry Server ([#3924](https://github.com/feast-dev/feast/issues/3924)) ([373e624](https://github.com/feast-dev/feast/commit/373e624abb8779b8a60d30aa08d25414d987bb1b)) +* Add local tests for s3 registry using minio ([#4029](https://github.com/feast-dev/feast/issues/4029)) ([d82d1ec](https://github.com/feast-dev/feast/commit/d82d1ecb534ab35b901c36e920666196eae0ac79)) +* Add python bytes to array type conversion support proto ([#3874](https://github.com/feast-dev/feast/issues/3874)) ([8688acd](https://github.com/feast-dev/feast/commit/8688acd1731aa04b041090c7b1c049bfba1717ed)) +* Add python client for remote registry server ([#3941](https://github.com/feast-dev/feast/issues/3941)) ([42a7b81](https://github.com/feast-dev/feast/commit/42a7b8170d6dc994055c67989046d11c238af40f)) +* Add Substrait-based ODFV transformation ([#3969](https://github.com/feast-dev/feast/issues/3969)) ([9e58bd4](https://github.com/feast-dev/feast/commit/9e58bd463f7ca2b4982708cb1e1250f587ecfb68)) +* Add support for arrays in snowflake ([#3769](https://github.com/feast-dev/feast/issues/3769)) ([8d6bec8](https://github.com/feast-dev/feast/commit/8d6bec8fc47986c84f366ce3edfe7d03fa6b2e9f)) +* Added delete_table to redis online store ([#3857](https://github.com/feast-dev/feast/issues/3857)) ([03dae13](https://github.com/feast-dev/feast/commit/03dae13aa60c072b171c7f21d4e795eaaad18e55)) +* Adding support for Native Python feature transformations for ODFVs ([#4045](https://github.com/feast-dev/feast/issues/4045)) ([73bc853](https://github.com/feast-dev/feast/commit/73bc85351a9202d3db93907e8206d68123ee5baa)) +* Bumping requirements ([#4079](https://github.com/feast-dev/feast/issues/4079)) ([1943056](https://github.com/feast-dev/feast/commit/194305631bbb6cca251dbb46df5b5575ffb2391b)) +* Decouple transformation types from ODFVs ([#3949](https://github.com/feast-dev/feast/issues/3949)) ([0a9fae8](https://github.com/feast-dev/feast/commit/0a9fae8fd42e7348365ef902038f3f71f977ef3e)) +* Dropping Python 3.8 from local integration tests and integration tests ([#3994](https://github.com/feast-dev/feast/issues/3994)) ([817995c](https://github.com/feast-dev/feast/commit/817995c12588cc35c53d1ad487efaaf53da287be)) +* Dropping python 3.8 requirements files from the project. ([#4021](https://github.com/feast-dev/feast/issues/4021)) ([f09c612](https://github.com/feast-dev/feast/commit/f09c612d046dfa56e9c616ff68c05823ce0f3bb6)) +* Dropping the support for python 3.8 version from feast ([#4010](https://github.com/feast-dev/feast/issues/4010)) ([a0f7472](https://github.com/feast-dev/feast/commit/a0f7472f200300f3a45aa404922dd67bb4ad237f)) +* Dropping unit tests for Python 3.8 ([#3989](https://github.com/feast-dev/feast/issues/3989)) ([60f24f9](https://github.com/feast-dev/feast/commit/60f24f9ed16a216acb0f3642892dea73690ca29f)) +* Enable Arrow-based columnar data transfers ([#3996](https://github.com/feast-dev/feast/issues/3996)) ([d8d7567](https://github.com/feast-dev/feast/commit/d8d75676cbaf565b6a6a097f33c49f56b852dcd7)) +* Enable Vector database and retrieve_online_documents API ([#4061](https://github.com/feast-dev/feast/issues/4061)) ([ec19036](https://github.com/feast-dev/feast/commit/ec19036fcc4c77084a2dd5aae5576f8f43393eba)) +* Kubernetes materialization engine written based on bytewax ([#4087](https://github.com/feast-dev/feast/issues/4087)) ([7617bdb](https://github.com/feast-dev/feast/commit/7617bdb7f4222edb69893c37621bd87b940b3227)) +* Lint with ruff ([#4043](https://github.com/feast-dev/feast/issues/4043)) ([7f1557b](https://github.com/feast-dev/feast/commit/7f1557b348b7935e3586c90c8dec15fdf6cd8665)) +* Make arrow primary interchange for offline ODFV execution ([#4083](https://github.com/feast-dev/feast/issues/4083)) ([9ed0a09](https://github.com/feast-dev/feast/commit/9ed0a09746aca0eb73c6e214f082e0e3887ff836)) +* Pandas v2 compatibility ([#3957](https://github.com/feast-dev/feast/issues/3957)) ([64459ad](https://github.com/feast-dev/feast/commit/64459ad1b5ed4a782b7ce87fcec3012e00408c74)) +* Pull duckdb from contribs, add to CI ([#4059](https://github.com/feast-dev/feast/issues/4059)) ([318a2b8](https://github.com/feast-dev/feast/commit/318a2b8bfc94f10c81206071fcb1d41f19683288)) +* Refactor ODFV schema inference ([#4076](https://github.com/feast-dev/feast/issues/4076)) ([c50a9ff](https://github.com/feast-dev/feast/commit/c50a9ff783fa400542422990ff835da930bcb6bf)) +* Refactor registry caching logic into a separate class ([#3943](https://github.com/feast-dev/feast/issues/3943)) ([924f944](https://github.com/feast-dev/feast/commit/924f9441107b8e36a3d5c6f8b16ed24f9a03b867)) +* Rename OnDemandTransformations to Transformations ([#4038](https://github.com/feast-dev/feast/issues/4038)) ([9b98eaf](https://github.com/feast-dev/feast/commit/9b98eafccbf39b41186bfb3ebd36af20d57bd509)) +* Revert updating dependencies so that feast can be run on 3.11. ([#3968](https://github.com/feast-dev/feast/issues/3968)) ([d3c68fb](https://github.com/feast-dev/feast/commit/d3c68fb8646b29032cb67b8c8e6a8c0aa7a821c7)), closes [#3958](https://github.com/feast-dev/feast/issues/3958) +* Rewrite ibis point-in-time-join w/o feast abstractions ([#4023](https://github.com/feast-dev/feast/issues/4023)) ([3980e0c](https://github.com/feast-dev/feast/commit/3980e0c9a762a6ec3bcee5a0e9cdf532994bb1c9)) +* Support s3gov schema by snowflake offline store during materialization ([#3891](https://github.com/feast-dev/feast/issues/3891)) ([ea8ad17](https://github.com/feast-dev/feast/commit/ea8ad1731a5ebe798b11181fc0027f7cac0e1526)) +* Update odfv test ([#4054](https://github.com/feast-dev/feast/issues/4054)) ([afd52b8](https://github.com/feast-dev/feast/commit/afd52b8803d7660a90f382d2c1ad7705608c861b)) +* Update pyproject.toml to use Python 3.9 as default ([#4011](https://github.com/feast-dev/feast/issues/4011)) ([277b891](https://github.com/feast-dev/feast/commit/277b891ffa1193914b123672010e588573dcaa98)) +* Update the Pydantic from v1 to v2 ([#3948](https://github.com/feast-dev/feast/issues/3948)) ([ec11a7c](https://github.com/feast-dev/feast/commit/ec11a7cb8d56d8e2e5cda07e06b4c98dcc9d2ba3)) +* Updating dependencies so that feast can be run on 3.11. ([#3958](https://github.com/feast-dev/feast/issues/3958)) ([59639db](https://github.com/feast-dev/feast/commit/59639dbb0272aacd2201cb5f65b01445013db6e6)) +* Updating protos to separate transformation ([#4018](https://github.com/feast-dev/feast/issues/4018)) ([c58ef74](https://github.com/feast-dev/feast/commit/c58ef74c18554d823f7957bf602184c744bb7ed7)) + + +### Reverts + +* Reverting bumping requirements ([#4081](https://github.com/feast-dev/feast/issues/4081)) ([1ba65b4](https://github.com/feast-dev/feast/commit/1ba65b4e13a2af3e9cea879d1c1e48891a0f0610)), closes [#4079](https://github.com/feast-dev/feast/issues/4079) +* Verify the existence of Registry tables in snowflake… ([#3907](https://github.com/feast-dev/feast/issues/3907)) ([c0d358a](https://github.com/feast-dev/feast/commit/c0d358a49d5f576bb9f1017d1ee0db2d6cd5f1a5)), closes [#3851](https://github.com/feast-dev/feast/issues/3851) + # [0.35.0](https://github.com/feast-dev/feast/compare/v0.34.0...v0.35.0) (2024-01-13) diff --git a/infra/charts/feast-feature-server/Chart.yaml b/infra/charts/feast-feature-server/Chart.yaml index 6111639b68..d7a7e30651 100644 --- a/infra/charts/feast-feature-server/Chart.yaml +++ b/infra/charts/feast-feature-server/Chart.yaml @@ -2,7 +2,7 @@ apiVersion: v2 name: feast-feature-server description: Feast Feature Server in Go or Python type: application -version: 0.35.0 +version: 0.36.0 keywords: - machine learning - big data diff --git a/infra/charts/feast-feature-server/README.md b/infra/charts/feast-feature-server/README.md index 2467b60e1e..48018b88d5 100644 --- a/infra/charts/feast-feature-server/README.md +++ b/infra/charts/feast-feature-server/README.md @@ -1,6 +1,6 @@ # Feast Python / Go Feature Server Helm Charts -Current chart version is `0.35.0` +Current chart version is `0.36.0` ## Installation @@ -30,7 +30,7 @@ See [here](https://github.com/feast-dev/feast/tree/master/examples/python-helm-d | fullnameOverride | string | `""` | | | image.pullPolicy | string | `"IfNotPresent"` | | | image.repository | string | `"feastdev/feature-server"` | Docker image for Feature Server repository | -| image.tag | string | `"0.35.0"` | The Docker image tag (can be overwritten if custom feature server deps are needed for on demand transforms) | +| image.tag | string | `"0.36.0"` | The Docker image tag (can be overwritten if custom feature server deps are needed for on demand transforms) | | imagePullSecrets | list | `[]` | | | livenessProbe.initialDelaySeconds | int | `30` | | | livenessProbe.periodSeconds | int | `30` | | diff --git a/infra/charts/feast-feature-server/values.yaml b/infra/charts/feast-feature-server/values.yaml index 2383d6fa9f..6177e746fb 100644 --- a/infra/charts/feast-feature-server/values.yaml +++ b/infra/charts/feast-feature-server/values.yaml @@ -9,7 +9,7 @@ image: repository: feastdev/feature-server pullPolicy: IfNotPresent # image.tag -- The Docker image tag (can be overwritten if custom feature server deps are needed for on demand transforms) - tag: 0.35.0 + tag: 0.36.0 imagePullSecrets: [] nameOverride: "" diff --git a/infra/charts/feast/Chart.yaml b/infra/charts/feast/Chart.yaml index c53b73848c..6e146a442b 100644 --- a/infra/charts/feast/Chart.yaml +++ b/infra/charts/feast/Chart.yaml @@ -1,7 +1,7 @@ apiVersion: v1 description: Feature store for machine learning name: feast -version: 0.35.0 +version: 0.36.0 keywords: - machine learning - big data diff --git a/infra/charts/feast/README.md b/infra/charts/feast/README.md index d8d4a3d376..241bafdc65 100644 --- a/infra/charts/feast/README.md +++ b/infra/charts/feast/README.md @@ -8,7 +8,7 @@ This repo contains Helm charts for Feast Java components that are being installe ## Chart: Feast -Feature store for machine learning Current chart version is `0.35.0` +Feature store for machine learning Current chart version is `0.36.0` ## Installation @@ -65,8 +65,8 @@ See [here](https://github.com/feast-dev/feast/tree/master/examples/java-demo) fo | Repository | Name | Version | |------------|------|---------| | https://charts.helm.sh/stable | redis | 10.5.6 | -| https://feast-helm-charts.storage.googleapis.com | feature-server(feature-server) | 0.35.0 | -| https://feast-helm-charts.storage.googleapis.com | transformation-service(transformation-service) | 0.35.0 | +| https://feast-helm-charts.storage.googleapis.com | feature-server(feature-server) | 0.36.0 | +| https://feast-helm-charts.storage.googleapis.com | transformation-service(transformation-service) | 0.36.0 | ## Values diff --git a/infra/charts/feast/charts/feature-server/Chart.yaml b/infra/charts/feast/charts/feature-server/Chart.yaml index 122c80f9fd..14da582933 100644 --- a/infra/charts/feast/charts/feature-server/Chart.yaml +++ b/infra/charts/feast/charts/feature-server/Chart.yaml @@ -1,8 +1,8 @@ apiVersion: v1 description: "Feast Feature Server: Online feature serving service for Feast" name: feature-server -version: 0.35.0 -appVersion: v0.35.0 +version: 0.36.0 +appVersion: v0.36.0 keywords: - machine learning - big data diff --git a/infra/charts/feast/charts/feature-server/README.md b/infra/charts/feast/charts/feature-server/README.md index 514fcc5727..8fd847e71b 100644 --- a/infra/charts/feast/charts/feature-server/README.md +++ b/infra/charts/feast/charts/feature-server/README.md @@ -1,6 +1,6 @@ # feature-server -![Version: 0.35.0](https://img.shields.io/badge/Version-0.35.0-informational?style=flat-square) ![AppVersion: v0.35.0](https://img.shields.io/badge/AppVersion-v0.35.0-informational?style=flat-square) +![Version: 0.36.0](https://img.shields.io/badge/Version-0.36.0-informational?style=flat-square) ![AppVersion: v0.36.0](https://img.shields.io/badge/AppVersion-v0.36.0-informational?style=flat-square) Feast Feature Server: Online feature serving service for Feast @@ -17,7 +17,7 @@ Feast Feature Server: Online feature serving service for Feast | envOverrides | object | `{}` | Extra environment variables to set | | image.pullPolicy | string | `"IfNotPresent"` | Image pull policy | | image.repository | string | `"feastdev/feature-server-java"` | Docker image for Feature Server repository | -| image.tag | string | `"0.35.0"` | Image tag | +| image.tag | string | `"0.36.0"` | Image tag | | ingress.grpc.annotations | object | `{}` | Extra annotations for the ingress | | ingress.grpc.auth.enabled | bool | `false` | Flag to enable auth | | ingress.grpc.class | string | `"nginx"` | Which ingress controller to use | @@ -64,4 +64,4 @@ Feast Feature Server: Online feature serving service for Feast | transformationService.port | int | `6566` | | ---------------------------------------------- -Autogenerated from chart metadata using [helm-docs v1.12.0](https://github.com/norwoodj/helm-docs/releases/v1.12.0) +Autogenerated from chart metadata using [helm-docs v1.13.1](https://github.com/norwoodj/helm-docs/releases/v1.13.1) diff --git a/infra/charts/feast/charts/feature-server/values.yaml b/infra/charts/feast/charts/feature-server/values.yaml index a14cc2d28f..76bfe2c0cb 100644 --- a/infra/charts/feast/charts/feature-server/values.yaml +++ b/infra/charts/feast/charts/feature-server/values.yaml @@ -5,7 +5,7 @@ image: # image.repository -- Docker image for Feature Server repository repository: feastdev/feature-server-java # image.tag -- Image tag - tag: 0.35.0 + tag: 0.36.0 # image.pullPolicy -- Image pull policy pullPolicy: IfNotPresent diff --git a/infra/charts/feast/charts/transformation-service/Chart.yaml b/infra/charts/feast/charts/transformation-service/Chart.yaml index 852045f52b..218726b7f1 100644 --- a/infra/charts/feast/charts/transformation-service/Chart.yaml +++ b/infra/charts/feast/charts/transformation-service/Chart.yaml @@ -1,8 +1,8 @@ apiVersion: v1 description: "Transformation service: to compute on-demand features" name: transformation-service -version: 0.35.0 -appVersion: v0.35.0 +version: 0.36.0 +appVersion: v0.36.0 keywords: - machine learning - big data diff --git a/infra/charts/feast/charts/transformation-service/README.md b/infra/charts/feast/charts/transformation-service/README.md index 758620d56a..5d4772572d 100644 --- a/infra/charts/feast/charts/transformation-service/README.md +++ b/infra/charts/feast/charts/transformation-service/README.md @@ -1,6 +1,6 @@ # transformation-service -![Version: 0.35.0](https://img.shields.io/badge/Version-0.35.0-informational?style=flat-square) ![AppVersion: v0.35.0](https://img.shields.io/badge/AppVersion-v0.35.0-informational?style=flat-square) +![Version: 0.36.0](https://img.shields.io/badge/Version-0.36.0-informational?style=flat-square) ![AppVersion: v0.36.0](https://img.shields.io/badge/AppVersion-v0.36.0-informational?style=flat-square) Transformation service: to compute on-demand features @@ -13,7 +13,7 @@ Transformation service: to compute on-demand features | envOverrides | object | `{}` | Extra environment variables to set | | image.pullPolicy | string | `"IfNotPresent"` | Image pull policy | | image.repository | string | `"feastdev/feature-transformation-server"` | Docker image for Transformation Server repository | -| image.tag | string | `"0.35.0"` | Image tag | +| image.tag | string | `"0.36.0"` | Image tag | | nodeSelector | object | `{}` | Node labels for pod assignment | | podLabels | object | `{}` | Labels to be added to Feast Serving pods | | replicaCount | int | `1` | Number of pods that will be created | @@ -25,4 +25,4 @@ Transformation service: to compute on-demand features | service.type | string | `"ClusterIP"` | Kubernetes service type | ---------------------------------------------- -Autogenerated from chart metadata using [helm-docs v1.12.0](https://github.com/norwoodj/helm-docs/releases/v1.12.0) +Autogenerated from chart metadata using [helm-docs v1.13.1](https://github.com/norwoodj/helm-docs/releases/v1.13.1) diff --git a/infra/charts/feast/charts/transformation-service/values.yaml b/infra/charts/feast/charts/transformation-service/values.yaml index e45ef47288..8e9fe2f87a 100644 --- a/infra/charts/feast/charts/transformation-service/values.yaml +++ b/infra/charts/feast/charts/transformation-service/values.yaml @@ -5,7 +5,7 @@ image: # image.repository -- Docker image for Transformation Server repository repository: feastdev/feature-transformation-server # image.tag -- Image tag - tag: 0.35.0 + tag: 0.36.0 # image.pullPolicy -- Image pull policy pullPolicy: IfNotPresent diff --git a/infra/charts/feast/requirements.yaml b/infra/charts/feast/requirements.yaml index ec098f2f7b..601d9a7a84 100644 --- a/infra/charts/feast/requirements.yaml +++ b/infra/charts/feast/requirements.yaml @@ -1,12 +1,12 @@ dependencies: - name: feature-server alias: feature-server - version: 0.35.0 + version: 0.36.0 condition: feature-server.enabled repository: https://feast-helm-charts.storage.googleapis.com - name: transformation-service alias: transformation-service - version: 0.35.0 + version: 0.36.0 condition: transformation-service.enabled repository: https://feast-helm-charts.storage.googleapis.com - name: redis diff --git a/java/pom.xml b/java/pom.xml index ccb3312596..b8d82c8e04 100644 --- a/java/pom.xml +++ b/java/pom.xml @@ -35,7 +35,7 @@ - 0.35.0 + 0.36.0 https://github.com/feast-dev/feast UTF-8 diff --git a/sdk/python/feast/ui/package.json b/sdk/python/feast/ui/package.json index f142b0b644..5ba6777007 100644 --- a/sdk/python/feast/ui/package.json +++ b/sdk/python/feast/ui/package.json @@ -6,7 +6,7 @@ "@elastic/datemath": "^5.0.3", "@elastic/eui": "^55.0.1", "@emotion/react": "^11.9.0", - "@feast-dev/feast-ui": "0.35.0", + "@feast-dev/feast-ui": "0.36.0", "@testing-library/jest-dom": "^5.16.4", "@testing-library/react": "^13.2.0", "@testing-library/user-event": "^13.5.0", diff --git a/sdk/python/feast/ui/yarn.lock b/sdk/python/feast/ui/yarn.lock index 48cbd30803..0db6b68e24 100644 --- a/sdk/python/feast/ui/yarn.lock +++ b/sdk/python/feast/ui/yarn.lock @@ -1451,10 +1451,10 @@ minimatch "^3.1.2" strip-json-comments "^3.1.1" -"@feast-dev/feast-ui@0.35.0": - version "0.35.0" - resolved "https://registry.yarnpkg.com/@feast-dev/feast-ui/-/feast-ui-0.35.0.tgz#f28eb82ae4855673230f14e3740a7786f545fdd7" - integrity sha512-t0Rd2TWUMim6ITfVVlQU8aBZboLvxla6Z7udGW+tQ3UUGcq1VbM6/y+GobuYQfbdHHeF2GmlYqp6zw5DuoIM+Q== +"@feast-dev/feast-ui@0.36.0": + version "0.36.0" + resolved "https://registry.yarnpkg.com/@feast-dev/feast-ui/-/feast-ui-0.36.0.tgz#b8cdc5f3e959fa2c787e2f1839aaaec99c739d65" + integrity sha512-R+F1cQI89Nrw64GnePENYMRLcSb6DH24zs5aO57ovGbFQquR0vqJ6UfqjhLlIQpkEE6quV2Zr3QOwVB1GBVL1g== dependencies: "@elastic/datemath" "^5.0.3" "@elastic/eui" "^55.0.1" diff --git a/ui/package.json b/ui/package.json index c826737cf6..babf01b8fa 100644 --- a/ui/package.json +++ b/ui/package.json @@ -1,6 +1,6 @@ { "name": "@feast-dev/feast-ui", - "version": "0.35.0", + "version": "0.36.0", "private": false, "files": [ "dist"